def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) print("Making orientation heatmap from star file...") md = MetaData(args.i) particles = self.get_particles(md) new_particles = [] new_particles.extend(self.removePrefOrient(particles, args.sd)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("File %s was created..." % args.o) print("Finished. Have fun!")
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) print("Performing rotation of particles from star file...") md = MetaData(args.i) new_particles = [] particles = self.get_particles(md) new_particles.extend( self.rotateParticles(particles, rotValue, tiltValue, psiValue, xValue, yValue, zValue, md.version)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) md.addLabels("data_particles", "rlnBeamTiltClass") print("Reading in input star file.....") particles = self.get_particles(md) print( "Total %s particles in input star file. \nAdding rlnBeamTiltClass." % str(len(particles))) self.addBeamTiltClass(particles) mdOut = MetaData() mdOut.addDataTable("data_") mdOut.addLabels("data_", md.getLabels("data_")) mdOut.addData("data_", particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) print("Reading in input star file.....") micrographs = self.get_micrographs(md) print( "Total %s micrographs in input star file. \nRenaming to micXXX convention." % str(len(micrographs))) self.renameMicrographs(micrographs, args.mic_dir) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, micrographs) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def get_flatted_representation(self, meta_data=MetaData()): """ Trans 1: A -> B Trans 2: B -> C Trans 3 Q-Start: C -> D Trans 4: C -> D Trans 5: C -> D Trans 6 Q-End: C -> D Trans 7: D -> E :returns: states = [A, B, C, D, D, D, D, E] transitions = [A -> B, B -> C, C -> D, D -> D, D -> D, D -> D, D -> E] """ transitions = [] states = [] flatted_meta_data = MetaData() last_res_s = None processing_queue = False for trans in self.transitions: src_s = trans.source_state_o res_s = trans.resulting_state_o if processing_queue: states.append(res_s) new_trans = Transition( trans.resulting_state, trans.resulting_state_o, trans.action, trans.interacted_widget, trans.interacted_widget_o, trans.resulting_state, trans.resulting_state_o, trans.start_time, trans.end_time, trans.successful, trans.exception, trans.data, trans.action_id, trans.has_result_screen, trans.custom) transitions.append(new_trans) # We copy the data of the sequence element within the original meta data and apply to the # flatted states and transition representation if self in meta_data.data: flatted_meta_data.put(res_s, meta_data.get(self)) flatted_meta_data.put(new_trans, meta_data.get(self)) else: if last_res_s != src_s: states.append(src_s) if self in meta_data.data: flatted_meta_data.put(src_s, meta_data.get(self)) states.append(res_s) transitions.append(trans) # We copy the data of the sequence element within the origin meta data and apply to the # flatted states and transition representation if self in meta_data.data: flatted_meta_data.put(res_s, meta_data.get(self)) flatted_meta_data.put(trans, meta_data.get(self)) last_res_s = res_s if droidmateutil.is_queue_start(trans): processing_queue = True if droidmateutil.is_queue_end(trans): processing_queue = False return states, transitions, flatted_meta_data
def append_terminate(self, path: Path, uid_state_map, resulting_end_state): assert path.nodes last_node = path.nodes[-1] last_node_o = uid_state_map[last_node] assert last_node_o is not None, f"Did not find an object for node {last_node_o}" resulting_end_state_o = uid_state_map[resulting_end_state] assert resulting_end_state_o is not None, f"Did not find an object for node {resulting_end_state}" trans = Transition.construct_terminate_transition(source_state_o=last_node_o, resulting_state_o=resulting_end_state_o) meta_data = [MetaData(data={metadata.META_DATA_TRANSITION: trans}), MetaData()] path.append_path(nodes=[last_node, resulting_end_state], meta_data_elements=meta_data, count_dist=False) return path
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) print("Selecting particles/micrographs from star file...") md = MetaData(args.i) if md.version == "3.1": ilabels = md.getLabels("data_particles") else: ilabels = md.getLabels("data_") if ("rlnDefocusU" not in ilabels) or ("rlnDefocusV" not in ilabels): self.error( "No labels rlnDefocusU or rlnDefocusV found in Input file.") if ("rlnFinalResolution" not in ilabels) and (args.res > 0): print( "No label rlnFinalResolution found in input file. Switching off resolution filtering..." ) args.res = 0 mdOut = MetaData() new_particles = [] particles = self.get_particles(md) new_particles.extend( self.selParticles( particles, args.astg, args.res, )) if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() compValue, selValue, rangeHi, rangeLo, rangeSel = self.validate(args) if args.sellb == "None": print("Performing math on all particles from star file...") else: if rangeSel: print( "Performing math on particles where %s is in range <%s, %s>." % (args.sellb, rangeLo, rangeHi)) else: print("Performing math on particles where %s is %s %s." % (args.sellb, args.selop, selValue)) md = MetaData(args.i) if md.version == "3.1": ilabels = md.getLabels("data_particles") else: ilabels = md.getLabels("data_") if args.lb not in ilabels: self.error("No label " + args.lb + " found in Input file.") new_particles = [] particles = self.get_particles(md) new_particles.extend( self.mathParticles(particles, args.lb, args.op, compValue, args.selop, args.sellb, selValue, rangeHi, rangeLo, rangeSel)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def __init__(self, file_name=None, file_type=None): self.__dataset = np.ndarray((0, 0), dtype=float) self.__metadata = MetaData() self.__header = {} if file_name is not None: self.load(file_name, file_type)
def main(): # set up logging logger = logging.getLogger('store_data_locally') logger.setLevel(logging.DEBUG) fh = logging.FileHandler('store_data_locally.log') fh.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) logger.addHandler(fh) # get arguments parser = argparse.ArgumentParser( description='Query sensor readings from the IDEAL database' 'and store locally.') parser.add_argument('--dataset_path', help='directory of the original IDEAL dataset') parser.add_argument('--data_path', default=LOCAL_DATA_DIR, help='directory to store data') args = parser.parse_args() # store metadata locally converter = IdealCSV2Hdf5(args.dataset_path, data_dir=args.data_path) converter.store_metadata() with MetaDataStore(data_dir=args.data_path) as s: metadata = MetaData(s) # get relevant sensorids sensors = metadata.sensor_merged() indices = pd.Series([False] * sensors.shape[0], index=sensors.index.copy()) indices = indices | sensors.sensorid.isin(metadata.electric_sensors()) indices = indices & sensors.homeid.astype(int).isin(metadata.gold_homes()) sensorids = sensors.sensorid[indices] sensorids_to_store = sensorids print('Query and store readings from {0} sensors'.format( len(sensorids_to_store))) for idx, sensorid in enumerate(sensorids_to_store): converter = IdealCSV2Hdf5(args.dataset_path, data_dir=args.data_path) logger.info('({0}/{1}) Sensorid: {2}'.format(idx + 1, len(sensorids_to_store), sensorid)) converter.store_readings(sensorid) # try and read stored data readings_store = ReadingDataStore(data_dir=args.data_path) readings_count = 0 for idx, sensorid in enumerate(sensorids): readings = readings_store.get_sensor_readings(sensorid) readings_count += len(readings) logger.info('Total readings : {0}'.format(readings_count))
def test_is_equation_valid(equation, is_expected_valid): # Set up validation object metadata = MetaData(OPERATORS_DICTIONARY) # handle the legal white spaces metadata.equation_string = remove_white_spaces(equation, LEGAL_WHITE_SPACES) sv = StringValidator(metadata) assert sv.is_equation_valid() == is_expected_valid
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) print("Binning correct input star file. Using binning factor %s." % str(args.bin_factor)) md = MetaData(args.i) new_particles = [] particles = self.get_particles(md) if (hasattr(particles[0], 'rlnOriginX')) and (hasattr(particles[0], 'rlnOriginY')): correctOrigin = True else: print("Note: rlnOriginX or rlnOriginY not found in input star file. Not correcting for particle shift.") correctOrigin = False if hasattr(particles[0], 'rlnDetectorPixelSize'): correctApix = True else: print("Note: rlnDetectorPixelSize not found in input star file. Not correcting for pixel size.") correctApix = False new_particles.extend( self.binParticles(particles, args.bin_factor, correctOrigin, correctApix, args.suf_orig, args.suf_new)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) if md.version == "3.1": iLabels = md.getLabels("data_optics") else: self.error("Relion 3.1 star file is needed as input.") if "rlnEvenZernike" not in iLabels: self.error( "Zernike 4th order polynomials are not present in the STAR file. Please do a 4th order aberration CTF refinement first." ) # create output header print("| Optics group | Apparent Cs [mm] | realApix [A] |") print("|------------------------------------------------|") apixSum = 0.0 csSum = 0.0 opticsGroupNr = 0 for optic_group in md.data_optics: z40 = float(optic_group.rlnEvenZernike.split(",")[6]) csTrue = optic_group.rlnSphericalAberration nomPixSize = optic_group.rlnImagePixelSize # note wavelength is for relativistically corrected accelerating voltage (i.e. 388.06 kV, 239.14 kV and 109.78 kV) if optic_group.rlnVoltage == 300: waveLength = 0.019687 elif optic_group.rlnVoltage == 200: waveLength = 0.025079 elif optic_group.rlnVoltage == 100: waveLength = 0.037014 else: self.error( "Only 100, 200 and 300 kV acceleration voltages are supported." ) csApparent = csTrue + (12 * z40 * 1e-7) / (pi * waveLength**3) realPixSize = nomPixSize * (csTrue / csApparent)**0.25 print("| %2d | %0.2f | %0.3f |" % (optic_group.rlnOpticsGroup, csApparent, realPixSize)) opticsGroupNr += 1 apixSum += realPixSize csSum += csApparent # Show average values print("|------------------------------------------------|") print("| Average | %0.2f | %0.3f |" % (csSum / opticsGroupNr, apixSum / opticsGroupNr)) print("|------------------------------------------------|")
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) if md.version != "3.1": self.error("Input file '%s' is not RELION 3.1 format." % args.i) new_particles = [] print("Reading in input star file.....") particles = self.get_particles(md) optic_groups = self.get_optic_groups(md) print("Total %s particles in input star file." % str(len(particles))) print("Total %s optic groups found in input star file." % str(len(optic_groups))) new_particles.extend(self.rel30format(particles, optic_groups)) mdOut = MetaData() particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels("data_particles")) mdOut.addLabels(particleTableName, [ 'rlnVoltage', 'rlnSphericalAberration', 'rlnAmplitudeContrast', 'rlnMagnification', 'rlnDetectorPixelSize', 'rlnOriginX', 'rlnOriginY', 'rlnBeamTiltClass' ]) mdOut.removeLabels( "data_", ['rlnOpticsGroup', 'rlnOriginXAngst', 'rlnOriginYAngst']) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s in RELION 3.0 format created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) print("Modifying star file to be compatible with helix refinement.") md = MetaData(args.i) if md.version == "3.1": ilabels = md.getLabels("data_particles") else: ilabels = md.getLabels("data_") if 'rlnAnglePsiFlipRatio' not in ilabels: md.addLabels(['rlnAnglePsiFlipRatio']) if 'rlnHelicalTubeID' not in ilabels: md.addLabels(['rlnHelicalTubeID']) if 'rlnHelicalTrackLength' not in ilabels: md.addLabels(['rlnHelicalTrackLength']) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) particles = self.get_particles(md) self.helixParticles(particles) mdOut.addData(particleTableName, particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) # create output star file mdOut = MetaData() mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addDataTable("data_particles") mdOut.addLabels("data_particles", md.getLabels("data_particles")) print("Reading in input star file.....") particles = self.get_particles(md) print( "Total %s particles in input star file. \nAdding rlnOpticsGroup." % str(len(particles))) new_particles, opticsGroupsNames = self.addOpticGroupsToParticles( particles, args.word_count) # create new optics groups opticGroup = md.data_optics[0] opticsGroups = [] for opticGroupNr, opticGroupName in enumerate(opticsGroupsNames): newOpticGroup = deepcopy(opticGroup) newOpticGroup.rlnOpticsGroup = opticGroupNr + 1 newOpticGroup.rlnOpticsGroupName = "opticsGroup_" + str( opticGroupName) opticsGroups.append(newOpticGroup) mdOut.addData("data_optics", opticsGroups) mdOut.addData("data_particles", new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(): # create instance oodt = OODTWorkFlowWrapper("http://localhost:9200") # get event info events = oodt.getEventNames() # create metadata object to invoke an event met = MetaData() met.addMetaData("hello", "world") # print available events print 'available events:', events
def main(self): self.define_parser() args = self.parser.parse_args() compValue, rangeHi, rangeLo, rangeSel, prctl_l, prctl_h = self.validate( args) if rangeSel: print( "Selecting particles particles where %s is in range <%s, %s>." % (args.lb, rangeLo, rangeHi)) elif args.prctl_l == "-1" and args.prctl_h == "-1": print("Selecting particles particles where %s is %s %s." % (args.lb, args.op, compValue)) md = MetaData(args.i) new_particles = [] particles = self.get_particles(md) new_particles.extend( self.selParticles(particles, args.lb, args.op, compValue, rangeHi, rangeLo, rangeSel, prctl_l, prctl_h)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def lambda_handler(event, contex): if event: meta = MetaData(event=event) meta.metadata() response = json.loads(meta.to_json()) if response.get('output'): return str(response) else: response['output'] = 1 return str(response)
def __init__(self, nodes=None, meta_data_elements: List[MetaData] = None): if nodes is None: nodes = [] assert meta_data_elements is None meta_data_elements = [MetaData() for node in nodes] else: assert meta_data_elements is not None assert len(nodes) == len(meta_data_elements) self.nodes: List[str] = [] self.actual_length = 0 self.meta_data: List[MetaData] = [] self.append_path(nodes=nodes, meta_data_elements=meta_data_elements) self.trace = None
def prepend_path(self, nodes: List[str], meta_data_elements: List[MetaData] = None): """ Do not count the distance. """ assert self.nodes assert self.nodes[0] == nodes[-1] len_nodes = len(nodes) self.nodes = nodes[:len_nodes - 1] + self.nodes if meta_data_elements is None: meta_data_elements = [MetaData() for node in nodes] assert len(nodes) == len(meta_data_elements) self.meta_data = meta_data_elements[:len_nodes - 1] + self.meta_data
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) new_particles = [] print("Reading in input star file.....") particles = self.get_particles(md) print( "Total %s particles in input star file. \nSelecting random particles from their symmetry copies." % str(len(particles))) new_particles.extend(self.randParticles(particles)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) print("Selecting particles from star file...") md = MetaData(args.i) new_particles = [] particles = self.get_particles(md) new_particles.extend( self.selParticles(particles, args.rot_min, args.rot_max, args.tilt_min, args.tilt_max, args.psi_min, args.psi_max)) mdOut = MetaData() if md.version == "3.1": mdOut.version = "3.1" mdOut.addDataTable("data_optics") mdOut.addLabels("data_optics", md.getLabels("data_optics")) mdOut.addData("data_optics", getattr(md, "data_optics")) particleTableName = "data_particles" else: particleTableName = "data_" mdOut.addDataTable(particleTableName) mdOut.addLabels(particleTableName, md.getLabels(particleTableName)) mdOut.addData(particleTableName, new_particles) mdOut.write(args.o) print("New star file %s created. Have fun!" % args.o)
def test_create_postfix_list_convertor(equation, expected): # Set up the InfixToPostFixConvertor # create the metadata metadata = MetaData(OPERATORS_DICTIONARY) # convert the string to list convertor_string_to_list = StringToListConverter() metadata.equation_list = convertor_string_to_list.convert_string_to_list( equation) # create main convertor convertor = InfixToPostfixConvertor(metadata.equation_list, OPERATORS_DICTIONARY) assert convertor.infix_to_postfix() == expected, "failed item: " + equation
def test_create_organize_list_convertor(equation, expected): # Set up the ConvertListToOrganizedList # create the metadata metadata = MetaData(OPERATORS_DICTIONARY) # convert the string to list convertor_string_to_list = StringToListConverter() metadata.equation_list = convertor_string_to_list.convert_string_to_list( equation) # create main convertor convertor = ConvertListToOrganizedList(metadata) assert convertor.create_organize_list() == expected, \ "failed to check item: " + equation
def get_flatted_representation(self, meta_data=MetaData()): """ Trans 1: A -> B Trans 2: B -> C Trans 3 Q-Start: C -> D Trans 4: C -> D Trans 5: C -> D Trans 6 Q-End: C -> D Trans 7: D -> E :returns: states = [A, B, C, D, D, D, D, E] transitions = [A -> B, B -> C, C -> D, D -> D, D -> D, D -> D, D -> E] """ transitions = [] states = [] flatted_meta_data = MetaData() for seq_elem in self.seq_elements: tmp_states, tmp_transitions, tmp_meta_data = seq_elem.get_flatted_representation( meta_data=meta_data) states.extend(tmp_states) transitions.extend(tmp_transitions) flatted_meta_data.merge(tmp_meta_data) return states, transitions, flatted_meta_data
def __init__(self, file_name=None, file_type=None): """ Initializer of a BData instance Parameters ---------- file_name : str, optional File which contains BData (default: None) file_type : {'Npy', 'Matlab', 'HDF5'} File type (default: None) """ self.dataSet = np.ndarray((0, 0), dtype=float) self.metaData = MetaData() if file_name is not None: self.load(file_name, file_type)
def test_solve_equation(equation, expected): # Set up the solver # create the metadata metadata = MetaData(OPERATORS_DICTIONARY) # clean the white spaces from equation equation = remove_white_spaces(equation, LEGAL_WHITE_SPACES) # convert the string to list convertor_string_to_list = StringToListConverter() metadata.equation_list = \ convertor_string_to_list.convert_string_to_list(equation) # create solver model = Model() assert model.solve_equation(metadata).result == expected, \ "failed to check item: " + equation
def main(self): self.define_parser() args = self.parser.parse_args() self.validate(args) md = MetaData(args.i) if args.lb == "ALL": if md.version == "3.1": iLabels = md.getLabels("data_particles") else: iLabels = md.getLabels("data_") else: iLabels = args.lb.split(" ") particles = self.get_particles(md) self.statsOnParticles(particles, iLabels)
def generate_cache_data(data_name, algorithm_name): print(f'Preprocessing the data {data_name} - {algorithm_name}...') print("graph data loading... [0/6]") graph_object, label_dict_set, labels = load_data_from_text( data_name=data_name) print("done.") print("The node size " + str(len(graph_object.nodes))) print("The edge size " + str(len(graph_object.edges))) meta_data = MetaData(graph_object=graph_object, label_dict_set=label_dict_set, algorithm_name=algorithm_name, labels=labels) ################################################################### # overview data file format ################################################################### overview_data = [] print("data caching... [6/6]") for perturbation in meta_data.perturbations: overview_data_item = { "remove_id": perturbation["remove_id"], "vul_percentile": perturbation["vul_percentile"], "rank": perturbation["rank"], "node_influence": perturbation["node_influence"], "label": perturbation["label"], "label_influence": perturbation["label_influence"] } overview_data.append(overview_data_item) with open( "../cached_data/" + data_name + "_" + algorithm_name + "_detail_" + str(perturbation["remove_id"]) + ".json", "w") as jf: json.dump(perturbation, jf, cls=MetaDataEncoder) with open( "../cached_data/" + data_name + "_" + algorithm_name + "_overview.json", "w") as jf: json.dump({"perturbations": overview_data, "nodes": meta_data.nodes, "labels": meta_data.labels, "labelNames": meta_data.labelNames, "vulnerabilityList": meta_data.vulnerabilityList, "perturbationSummary": meta_data.perturbationSummary}, jf, cls=MetaDataEncoder) print("data cached.")