Exemple #1
0
 def test_json(self):
     data = {
         "firstName": "Jane",
         "lastName": "Doe",
         "hobbies": ["running", "sky diving", "singing"],
         "age": 35,
         "children": [
             {
                 "firstName": "Alice",
                 "age": 6
             },
             {
                 "firstName": "Bob",
                 "age": 8
             }
         ]
     }
     person = Person("Dima", 25)
     self.assertEqual(to_json(person),json.dumps(person.__dict__))
     self.assertEqual(to_json(data), json.dumps(data))
Exemple #2
0
    def computeHist(self):
        self.rmsAnalysis = rmsIterator(self.fileInput,self.framesPerChunk)
        self.rmsAnalysis.iterate(self.howManyChunks)
        print "<br>I'm trying to compute."
        fig = plt.figure()
        fig.set_size_inches(8,8)
        fig.subplots_adjust(wspace=0.5)
        fig.subplots_adjust(hspace=0.35)
        #fig.tight_layout()

        ax1 = fig.add_subplot(221)
        fileAx1,bins1,trash1 = plt.hist(self.rmsAnalysis.returnDataT1P0[:],bins=8,histtype='step',normed=True,label = 'B'+str(self.rmsAnalysis.beam)+':T'+str("%.1f" % (self.rmsAnalysis.tune1/1e6))+":P0")
        ax1.set_xticks(bins1)
        ax1.xaxis.set_major_formatter(FormatStrFormatter('%0.2f'))
        plt.setp(ax1.get_xticklabels(), rotation=90)
        ax1.set_xlabel('Step')
        ax1.set_ylabel('Count')
        ax1.legend(loc='best')

        ax2 = fig.add_subplot(222)
        fileAx2,bins2,trash2 = plt.hist(self.rmsAnalysis.returnDataT1P1[:],bins=8,histtype='step',normed=True,label = "B"+str(self.rmsAnalysis.beam)+":T"+str("%.1f" % (self.rmsAnalysis.tune1/1e6))+":P1")
        ax2.set_xticks(bins2)
        ax2.xaxis.set_major_formatter(FormatStrFormatter('%0.2f'))
        plt.setp(ax2.get_xticklabels(), rotation=90)
        ax2.set_xlabel('Step')
        ax2.set_ylabel('Count')
        ax2.legend(loc='best')

        ax3 = fig.add_subplot(223)
        fileAx3,bins3,trash3 = plt.hist(self.rmsAnalysis.returnDataT2P0[:],bins=8,histtype='step',normed=True,label = "B"+str(self.rmsAnalysis.beam)+":T"+str("%.1f" % (self.rmsAnalysis.tune2/1e6))+":P0")
        ax3.set_xticks(bins3)
        ax3.xaxis.set_major_formatter(FormatStrFormatter('%0.2f'))
        plt.setp(ax3.get_xticklabels(), rotation=90)
        ax3.set_xlabel('Step')
        ax3.set_ylabel('Count')
        ax3.legend(loc='best')

        ax4 = fig.add_subplot(224)
        fileAx4,bins4,trash4 = plt.hist(self.rmsAnalysis.returnDataT2P1[:],bins=8,histtype='step',normed=True,label = "B"+str(self.rmsAnalysis.beam)+":T"+str("%.1f" % (self.rmsAnalysis.tune2/1e6))+":P1")
        ax4.set_xticks(bins4)
        ax4.xaxis.set_major_formatter(FormatStrFormatter('%0.2f'))
        plt.setp(ax4.get_xticklabels(), rotation=90)
        ax4.set_xlabel('Step')
        ax4.set_ylabel('Count')
        ax4.legend(loc='best')

        fig.savefig(self.fileOutput+".png")
        to_json(self.rmsAnalysis.jsonT1P0,self.fileOutput+"_t1p0.json")
        to_json(self.rmsAnalysis.jsonT1P1,self.fileOutput+"_t1p1.json")
        to_json(self.rmsAnalysis.jsonT2P0,self.fileOutput+"_t2p0.json")
        to_json(self.rmsAnalysis.jsonT2P1,self.fileOutput+"_t2p1.json")
Exemple #3
0
    def compute(self):
        self.rmsAnalysis = rmsIterator(self.fileInput,self.framesPerChunk)
        self.rmsAnalysis.iterate(self.howManyChunks)
        print "<br>I'm trying to compute."
        fig = plt.figure()  
        fig.set_size_inches(8,8)
        fig.subplots_adjust(wspace=0.5)
        fig.subplots_adjust(hspace=0.35)
        #fig.tight_layout()

        ax1 = fig.add_subplot(221)
        ax1.set_xlabel('Time ('+str("%.6f" % (self.framesPerChunk*self.howManyChunks*4096/19.6e6))+' Seconds total)')
        ax1.set_ylabel('ADC Magnitude')
        ax2 = fig.add_subplot(222)
        ax2.set_xlabel('Time ('+str("%.6f" % (self.framesPerChunk*self.howManyChunks*4096/19.6e6))+' Seconds total)')
        ax2.set_ylabel('ADC Magnitude')
        ax3 = fig.add_subplot(223)
        ax3.set_xlabel('Time ('+str("%.6f" % (self.framesPerChunk*self.howManyChunks*4096/19.6e6))+' Seconds total)')
        ax3.set_ylabel('ADC Magnitude')
        ax4 = fig.add_subplot(224)
        ax4.set_xlabel('Time ('+str("%.6f" % (self.framesPerChunk*self.howManyChunks*4096/19.6e6))+' Seconds total)')
        ax4.set_ylabel('ADC Magnitude')


        ax1.plot(self.rmsAnalysis.returnDataT1P0[:],label='B'+str(self.rmsAnalysis.beam)+':T'+str("%.1f" % (self.rmsAnalysis.tune1/1e6))+':P0')
        ax2.plot(self.rmsAnalysis.returnDataT1P1[:],label='B'+str(self.rmsAnalysis.beam)+':T'+str("%.1f" % (self.rmsAnalysis.tune1/1e6))+':P1')
        ax3.plot(self.rmsAnalysis.returnDataT2P0[:],label='B'+str(self.rmsAnalysis.beam)+':T'+str("%.1f" % (self.rmsAnalysis.tune2/1e6))+':P0')
        ax4.plot(self.rmsAnalysis.returnDataT2P1[:],label='B'+str(self.rmsAnalysis.beam)+':T'+str("%.1f" % (self.rmsAnalysis.tune2/1e6))+':P1')


        ax1.legend(loc='best')
        ax2.legend(loc='best')
        ax3.legend(loc='best')
        ax4.legend(loc='best')

        fig.savefig(self.fileOutput)
        to_json(self.rmsAnalysis.jsonT1P0,self.fileOutput+"_t1p0.json")
        to_json(self.rmsAnalysis.jsonT1P1,self.fileOutput+"_t1p1.json")
        to_json(self.rmsAnalysis.jsonT2P0,self.fileOutput+"_t2p0.json")
        to_json(self.rmsAnalysis.jsonT2P1,self.fileOutput+"_t2p1.json")
 def onResultsReady(self):
     with open(self.resultsPath, 'w') as fid:
         output = {"data": self.results}
         fid.write(to_json(output))
def main(bagfilename, motiontopic, maptopic, output_file):
    bagfile = rosbag.Bag(bagfilename, mode='r')

    # data structure that maps timestamp to a dictionary
    alltimesteps = {}

    # parse fullstate topic
    for _, msg, _ in bagfile.read_messages(topics=motiontopic):
        entry = {}

        ts = msg.header.stamp.secs * 1000000000 + msg.header.stamp.nsecs

        entry['group'] = msg.group

        entry['ImagePath'] = ''
        entry['Timestamp'] = ts
        entry['Tsb_XYZ'] = rosvec_to_list(msg.gsb.translation)
        entry['qsb_WXYZ'] = rosquat_to_list(msg.gsb.rotation)
        entry['Tbc_XYZ'] = rosvec_to_list(msg.gbc.translation)
        entry['qbc_WXYZ'] = rosquat_to_list(msg.gbc.rotation)
        entry['Tsc_XYZ'] = rosvec_to_list(msg.gsc.translation)
        entry['qsc_WXYZ'] = rosquat_to_list(msg.gsc.rotation)
        entry['Vsb_XYZ'] = rosvec_to_list(msg.Vsb)

        entry['Pstate'] = [msg.MotionStateSize, list(msg.covariance)]

        entry['MeasurementUpdateInitialized'] = bool(
            msg.MeasurementUpdateInitialized)
        entry['inn_Tsb'] = rosvec_to_list(msg.inn_Tsb)
        entry['inn_Wsb'] = rosvec_to_list(msg.inn_Wsb)
        entry['inn_Vsb'] = rosvec_to_list(msg.inn_Vsb)

        entry['bg'] = rosvec_to_list(msg.bg)
        entry['ba'] = rosvec_to_list(msg.ba)
        entry['qg_WXYZ'] = rosquat_to_list(msg.qg)
        entry['td'] = msg.td
        entry['Ca'] = list(msg.Ca)
        entry['Cg'] = list(msg.Cg)

        alltimesteps[ts] = entry

    # Parse map topic
    for _, msg, _ in bagfile.read_messages(topics=maptopic):

        ts = msg.header.stamp.secs * 1000000000 + msg.header.stamp.nsecs

        entry = alltimesteps[ts]

        feature_positions = []
        feature_ids = []
        feature_covariances = []

        for feature in msg.features:
            feature_ids.append(feature.id)
            feature_positions.extend(rosvec_to_list(feature.Xs))

            cov = roslist_to_list(feature.covariance, 9)
            feature_covariances.extend(
                [cov[0], cov[1], cov[2], cov[4], cov[5], cov[8]])

        entry['num_instate_features'] = msg.num_features
        entry['feature_positions'] = feature_positions
        entry['feature_covs'] = feature_covariances
        entry['feature_ids'] = feature_ids

        alltimesteps[ts] = entry

    # sort timestamps
    timestamps = alltimesteps.keys()
    timestamps.sort()

    # create list of dictionaries
    final_list = []
    for ts in timestamps:
        final_list.append(alltimesteps[ts])

    json_string = to_json.to_json(final_list)
    with open(output_file, 'w') as fid:
        fid.write(json_string)
 def write_json(self, filename):
     json_string = to_json(self.estimator_results)
     with open(filename, 'w') as fid:
         fid.write(json_string)
if args.to_json:
    num_generations = population.num_generations
    clear_index = max(num_generations - args.gen_back, 0)
    to_clear = population.generations[clear_index].members
    for node in to_clear:
        node.suspected_mother = None
        node.suspected_mother_id = None
        node.suspected_father = None
        node.suspected_father_id = None
    unlabeled_nodes = set(chain.from_iterable(generation.members
                                          for generation
                                          in population.generations[-3:]))
    related_nodes = related_pairs(unlabeled_nodes, labeled_nodes, population,
                                  args.gen_back)
    json = to_json(population, labeled_nodes, related_nodes)
    with open(args.to_json, "w") as json_file:
        json_file.write(json)
    exit()

print("Loading recombination data.")
recombinators = recombinators_from_directory("../data/recombination_rates/")
chrom_sizes = recombinators[Sex.Male]._num_bases
genome_generator = RecombGenomeGenerator(chrom_sizes)


print("Populating length classifier.")

clobber = not (args.recover or args.num_iterations == 0)

classifier = generate_classifier(population, labeled_nodes,
Exemple #8
0
 def test2(self):
     self.assertEqual(to_json.to_json(self.obj1), json.dumps(self.obj1))
Exemple #9
0
 def test_to_json(self):
     test_object = []
     obj = to_json.to_json(test_object)
     self.assertIsInstance(obj, str)