def __call__(self, machine_graph, file_path): """ :param machine_graph: The graph to convert :param file_path: Where to write the JSON """ progress = ProgressBar(machine_graph.n_vertices + 1, "Converting to JSON graph") # write basic stuff json_graph = OrderedDict() # write vertices data vertices = OrderedDict() json_graph["vertices_resources"] = vertices edges = OrderedDict() json_graph["edges"] = edges vertex_by_id = OrderedDict() partition_by_id = OrderedDict() for vertex in progress.over(machine_graph.vertices, False): self._convert_vertex(vertex, vertex_by_id, vertices, edges, machine_graph, partition_by_id) with open(file_path, "w") as file_to_write: json.dump(json_graph, file_to_write) progress.update() file_format_schemas.validate(json_graph, "machine_graph.json") progress.end() return file_path, vertex_by_id, partition_by_id
def write_json(machine, json_folder, progress=None): """ Runs the code to write the machine in Java readable JSON. :param ~spinn_machine.Machine machine: Machine to convert :param str json_folder: the folder to which the JSON are being written :param progress: Progress Bar if one used :type progress: ~spinn_utilities.progress_bar.ProgressBar or None :return: the name of the generated file :rtype: str """ file_path = os.path.join(json_folder, MACHINE_FILENAME) if not os.path.exists(file_path): json_obj = to_json(machine) if progress: progress.update() # validate the schema file_format_schemas.validate(json_obj, MACHINE_FILENAME) # update and complete progress bar if progress: progress.end() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) if progress: progress.end() return file_path
def __call__(self, placements, file_path): """ :param placements: the memory placements object :param file_path: the file path for the placements.json :return: file path for the placements.json """ # write basic stuff json_obj = dict() vertex_by_id = dict() progress = ProgressBar(placements.n_placements + 1, "converting to JSON placements") # process placements for placement in progress.over(placements, False): vertex_id = ident(placement.vertex) vertex_by_id[vertex_id] = placement.vertex json_obj[vertex_id] = [placement.x, placement.y] # dump dict into json file with open(file_path, "w") as file_to_write: json.dump(json_obj, file_to_write) progress.update() # validate the schema file_format_schemas.validate(json_obj, "placements.json") progress.end() # return the file format return file_path, vertex_by_id
def do_convert(machine, file_path, progress=None): """ Runs the code to write the machine in Java readable JSON. :param machine: Machine to convert :type machine: :py:class:`spinn_machine.machine.Machine` :param file_path: Location to write file to. Warning will overwrite! :type file_path: str """ json_obj = JsonMachine.to_json(machine) if progress: progress.update() # validate the schema file_format_schemas.validate(json_obj, "jmachine.json") # update and complete progress bar if progress: progress.end() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) if progress: progress.update() return file_path
def __call__(self, machine_graph, machine, file_path): """ :param machine_graph: the machine graph :param machine: the machine """ progress = ProgressBar( machine_graph.n_vertices + 2, "creating JSON constraints") json_obj = list() self._add_monitor_core_reserve(json_obj) progress.update() self._add_extra_monitor_cores(json_obj, machine) progress.update() vertex_by_id = self._search_graph_for_placement_constraints( json_obj, machine_graph, machine, progress) with open(file_path, "w") as f: json.dump(json_obj, f) # validate the schema file_format_schemas.validate(json_obj, "constraints.json") # complete progress bar progress.end() return file_path, vertex_by_id
def __call__(self, placements, file_path): """ :param placements: the memory placements object :param file_path: the file path for the placements.json :return: file path for the placements.json """ # write basic stuff json_obj = dict() vertex_by_id = dict() progress = ProgressBar(placements.n_placements + 1, "converting to JSON placements") # process placements for placement in progress.over(placements, False): vertex_id = ident(placement.vertex) vertex_by_id[vertex_id] = placement.vertex json_obj[vertex_id] = [placement.x, placement.y] # dump dict into json file with open(file_path, "w") as file_to_write: json.dump(json_obj, file_to_write) progress.update() # validate the schema file_format_schemas.validate(json_obj, "placements.json") progress.end() # return the file format return file_path, vertex_by_id
def __call__(self, placements, file_path): """ :param placements: :param file_path: """ progress = ProgressBar(len(placements) + 1, "Converting to JSON core allocations") # write basic stuff json_obj = OrderedDict() json_obj['type'] = "cores" vertex_by_id = OrderedDict() # process placements for placement in progress.over(placements, False): self._convert_placement(placement, vertex_by_id, json_obj) # dump dict into json file with open(file_path, "w") as f: json.dump(json_obj, f) progress.update() # validate the schema file_format_schemas.validate(json_obj, "core_allocations.json") # complete progress bar progress.end() # return the file format return file_path, vertex_by_id
def test_new_multicast_routing_tables(self): key_combo = 0xff35 mask = 0xffff proc_ids = list() link_ids = list() for i in range(18): proc_ids.append(i) for i in range(6): link_ids.append(i) multicast_entries1 = MulticastRoutingEntry(key_combo, mask, proc_ids, link_ids, True) multicast_entries2 = MulticastRoutingEntry(key_combo - 1, mask - 1, proc_ids, link_ids, True) mrt = list() t1 = UnCompressedMulticastRoutingTable(0, 0, [multicast_entries1]) t2 = UnCompressedMulticastRoutingTable(1, 0, [multicast_entries2]) mrt.append(t1) mrt.append(t2) tables = MulticastRoutingTables(mrt) retrieved_tables = tables.routing_tables self.assertEqual(len(retrieved_tables), len(mrt)) for tab in retrieved_tables: self.assertIn(tab, mrt) self.assertEqual(tables.get_routing_table_for_chip(0, 0), t1) self.assertEqual(tables.get_routing_table_for_chip(1, 0), t2) self.assertEqual(tables.get_routing_table_for_chip(2, 0), None) json_obj = to_json(tables) file_format_schemas.validate(json_obj, "routing_tables.json") new_tables = from_json(json_obj) self.assertEqual(new_tables.get_routing_table_for_chip(0, 0), t1) self.assertEqual(new_tables.get_routing_table_for_chip(1, 0), t2) self.assertEqual(new_tables.get_routing_table_for_chip(2, 0), None)
def __call__(self, machine_graph, machine, file_path): """ :param machine_graph: the machine graph :param machine: the machine """ progress = ProgressBar( machine_graph.n_vertices + 2, "creating JSON constraints") json_obj = list() self._add_monitor_core_reserve(json_obj) progress.update() self._add_extra_monitor_cores(json_obj, machine) progress.update() vertex_by_id = self._search_graph_for_placement_constraints( json_obj, machine_graph, machine, progress) with open(file_path, "w") as f: json.dump(json_obj, f) # validate the schema file_format_schemas.validate(json_obj, "constraints.json") # complete progress bar progress.end() return file_path, vertex_by_id
def do_convert(router_tables, json_folder, progress=None): """ Runs the code to write the machine in Java readable JSON. :param MulticastRoutingTables router_tables: Routing Tables to convert :param str json_folder: the folder to which the JSON files are being written :param progress: The progress bar, if any :type progress: ~spinn_utilities.progress_bar.ProgressBar or None :return: the name of the generated file :rtype: str """ file_path = os.path.join(json_folder, ROUTING_TABLES_FILENAME) json_obj = to_json(router_tables) if progress: progress.update() # validate the schema file_format_schemas.validate(json_obj, ROUTING_TABLES_FILENAME) # update and complete progress bar if progress: progress.update() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) if progress: progress.end() return file_path
def graph_there_and_back(self, there): j_object = graph_to_json(there) print(j_object) file_format_schemas.validate(j_object, MACHINE_GRAPH_FILENAME) back = graph_from_json(j_object) self.assertEqual(there.n_vertices, back.n_vertices) for vertex in there.vertices: b_vertex = back.vertex_by_label(vertex.label) self._compare_vertex(vertex, b_vertex)
def __call__(self, machine, file_path): """ :param machine: :param file_path: """ progress = ProgressBar( (machine.max_chip_x + 1) * (machine.max_chip_y + 1) + 2, "Converting to JSON machine") # write basic stuff json_obj = { "width": machine.max_chip_x + 1, "height": machine.max_chip_y + 1, "chip_resources": { "cores": CHIP_HOMOGENEOUS_CORES, "sdram": CHIP_HOMOGENEOUS_SDRAM, "sram": CHIP_HOMOGENEOUS_SRAM, "router_entries": ROUTER_HOMOGENEOUS_ENTRIES, "tags": CHIP_HOMOGENEOUS_TAGS }, "dead_chips": [], "dead_links": [] } # handle exceptions (dead chips) exceptions = defaultdict(dict) for x in range(0, machine.max_chip_x + 1): for y in progress.over(range(0, machine.max_chip_y + 1), False): self._add_exceptions(json_obj, machine, x, y, exceptions) json_obj["chip_resource_exceptions"] = [[x, y, exceptions[x, y]] for x, y in exceptions] progress.update() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) progress.update() # validate the schema file_format_schemas.validate(json_obj, "machine.json") # update and complete progress bar progress.end() return file_path
def __call__(self, machine, file_path): """ :param machine: :param file_path: """ progress = ProgressBar( (machine.max_chip_x + 1) * (machine.max_chip_y + 1) + 2, "Converting to JSON machine") # write basic stuff json_obj = { "width": machine.max_chip_x + 1, "height": machine.max_chip_y + 1, "chip_resources": { "cores": CHIP_HOMOGENEOUS_CORES, "sdram": CHIP_HOMOGENEOUS_SDRAM, "sram": CHIP_HOMOGENEOUS_SRAM, "router_entries": ROUTER_HOMOGENEOUS_ENTRIES, "tags": CHIP_HOMOGENEOUS_TAGS}, "dead_chips": [], "dead_links": []} # handle exceptions (dead chips) exceptions = defaultdict(dict) for x in range(0, machine.max_chip_x + 1): for y in progress.over(range(0, machine.max_chip_y + 1), False): self._add_exceptions(json_obj, machine, x, y, exceptions) json_obj["chip_resource_exceptions"] = [ [x, y, exceptions[x, y]] for x, y in exceptions] progress.update() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) progress.update() # validate the schema file_format_schemas.validate(json_obj, "machine.json") # update and complete progress bar progress.end() return file_path
def write_json(machine_graph, json_folder, progress=None): """ Runs the code to write the machine graph in Java readable JSON. :param MachineGraph machine_graph: The machine_graph to place :param str json_folder: The folder to which the JSON are being written .. warning:: Will overwrite existing file in this folder! :param ~spinn_utilities.progress_bar.ProgressBar progress: :return: the name of the generated file :rtype: str """ file_path = os.path.join(json_folder, MACHINE_GRAPH_FILENAME) json_obj = graph_to_json(machine_graph) if progress: progress.update() # validate the schema try: file_format_schemas.validate(json_obj, MACHINE_GRAPH_FILENAME) except ValidationError as ex: logger.error("JSON validation exception: {}\n{}", ex.message, ex.instance) # update and complete progress bar if progress: progress.update() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f, cls=NumpyEncoder) if progress: progress.end() return file_path
def write_json(partition_to_n_keys_map, json_folder, progress=None): """ Runs the code to write the machine in Java readable JSON. :param AbstractMachinePartitionNKeysMap partition_to_n_keys_map: The number of keys needed for each partition. :param str json_folder: the folder to which the JSON are being written :param progress: Progress Bar if one used :type progress: ~spinn_utilities.progress_bar.ProgressBar or None :return: the name of the generated file :rtype: str """ file_path = os.path.join(json_folder, N_KEYS_MAP_FILENAME) json_obj = partition_to_n_keys_map_to_json(partition_to_n_keys_map) if progress: progress.update() # validate the schema try: file_format_schemas.validate(json_obj, N_KEYS_MAP_FILENAME) except ValidationError as ex: logger.error("JSON validation exception: {}\n{}", ex.message, ex.instance) # update and complete progress bar if progress: progress.update() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) if progress: progress.end() return file_path
def __call__(self, machine_graph, plan_n_timesteps, file_path): """ :param machine_graph: The graph to convert :param plan_n_timesteps: number of timesteps to plan for :type plan_n_timesteps: int :param file_path: Where to write the JSON """ progress = ProgressBar( machine_graph.n_vertices + 1, "Converting to JSON graph") # write basic stuff json_graph = dict() # write vertices data vertices_resources = dict() json_graph["vertices_resources"] = vertices_resources edges_resources = defaultdict() json_graph["edges"] = edges_resources vertex_by_id = dict() partition_by_id = dict() for vertex in progress.over(machine_graph.vertices, False): self._convert_vertex( vertex, vertex_by_id, vertices_resources, edges_resources, machine_graph, plan_n_timesteps, partition_by_id) with open(file_path, "w") as f: json.dump(json_graph, f) progress.update() file_format_schemas.validate(json_graph, "machine_graph.json") progress.end() return file_path, vertex_by_id, partition_by_id
def write_json(placements, json_folder, progress=None): """ Runs the code to write the placements in Java readable JSON. :param Placements placements: The placements to write :param str json_folder: the folder to which the JSON are being written :param progress: Progress Bar if one used :type progress: ~spinn_utilities.progress_bar.ProgressBar or None :return: the name of the generated file :rtype: str """ file_path = os.path.join(json_folder, PLACEMENTS_FILENAME) json_obj = placements_to_json(placements) if progress: progress.update() # validate the schema try: file_format_schemas.validate(json_obj, PLACEMENTS_FILENAME) except ValidationError as ex: logger.error("JSON validation exception: {}\n{}", ex.message, ex.instance) # update and complete progress bar if progress: progress.update() # dump to json file with open(file_path, "w") as f: json.dump(json_obj, f) if progress: progress.end() return file_path
def _validate_file_read_data( placements_obj, allocations_obj, constraints_obj): # verify that the files meet the schemas. validate(placements_obj, "placements.json") validate(allocations_obj, "core_allocations.json") validate(constraints_obj, "constraints.json")
def _validate_file_read_data(placements_obj, allocations_obj, constraints_obj): # verify that the files meet the schemas. validate(placements_obj, "placements.json") validate(allocations_obj, "core_allocations.json") validate(constraints_obj, "constraints.json")