def __call__(cls, *args, **kwargs): time = timeit.default_timer() graph = args[0] platform = args[1] graph_names, platform_names = MappingRepresentation.gen_hash( graph, platform) if (cls, graph, platform) in cls._instances: different = cls._instances[(cls, graph, platform)].changed_parameters( *args[2:]) if (cls, graph, platform) not in cls._instances or different: # make hashables of these two cls._instances[(cls, graph_names, platform_names)] = super( MappingRepresentation, cls).__call__(*args, **kwargs) log.info( f"Initializing representation {cls} of graph with processes: " f"{graph_names} on platform with cores {platform_names}") instance = copy(cls._instances[(cls, graph_names, platform_names)]) instance.graph = graph instance.platform = platform com_mapper = ComFullMapper(graph, platform) instance.list_mapper = ProcPartialMapper(graph, platform, com_mapper) instance.init_time = timeit.default_timer() - time return instance
def __init__( self, graph, platform, norm_p, extra_dimensions=True, extra_dimensions_factor=3, ignore_channels=True, target_distortion=1.1, jlt_tries=10, verbose=False, disable_embedding_test=False, ): # todo: make sure the correspondence of cores is correct! M_matrix, self._arch_nc, self._arch_nc_inv = arch_to_distance_metric( platform, heterogeneity=extra_dimensions) self._M = FiniteMetricSpace(M_matrix) self.graph = graph self.platform = platform self.extra_dims = extra_dimensions self.jlt_tries = jlt_tries self.target_distortion = target_distortion self.ignore_channels = ignore_channels self.verbose = verbose if hasattr(platform, "embedding_json"): self.embedding_matrix_path = platform.embedding_json else: self.embedding_matrix_path = None if not self.ignore_channels: log.warning("Not ignoring channels might lead" " to invalid mappings when approximating.") self.extra_dims_factor = extra_dimensions_factor self._d = len(graph.processes()) if self.extra_dims: self._split_d = self._d self._split_k = len(platform.processors()) self._d += len(graph.channels()) self.p = norm_p com_mapper = ComFullMapper(graph, platform) self.list_mapper = ProcPartialMapper(graph, platform, com_mapper) init_app_ncs(self, graph) if self.p != 2: log.error(f"Metric space embeddings only supports p = 2." f" For p = 1, for example, finding such an embedding" f" is NP-hard (See Matousek, J., Lectures on Discrete" f" Geometry, Chap. 15.5)") MetricSpaceEmbedding.__init__( self, self._M, self._d, jlt_tries=self.jlt_tries, embedding_matrix_path=self.embedding_matrix_path, target_distortion=self.target_distortion, verbose=verbose, disable_embedding_test=disable_embedding_test, ) log.info(f"Found embedding with distortion: {self.distortion}")
def mapping(graph, platform): com_mapper = ComFullMapper(graph, platform) mapper = ProcPartialMapper(graph, platform, com_mapper) from_list = [] # Map process "a" from_list.append(0) # Map process "b" from_list.append(1) mapping = mapper.generate_mapping(from_list) return mapping
def test_allEquivalent(platform, graph): com_mapper = ComFullMapper(graph, platform) mapper = ProcPartialMapper(graph, platform, com_mapper) mapping = mapper.generate_mapping([0, 1]) representation = SymmetryRepresentation(graph, platform) assert len(list(representation.allEquivalent(mapping))) == 12 assert ( len(list(representation.allEquivalent(mapping, only_support=True))) == 6 ) representation = SymmetryRepresentation(graph, platform, disable_mpsym=True) assert len(list(representation.allEquivalent(mapping))) == 12 assert ( len(list(representation.allEquivalent(mapping, only_support=True))) == 6 )
def __init__( self, graph, platform, channels=False, periodic_boundary_conditions=False, norm_p=2, ): self.graph = graph self.platform = platform self.channels = channels self.boundary_conditions = periodic_boundary_conditions self.p = norm_p self.num_procs = len(list(self.graph._processes.keys())) com_mapper = ComFullMapper(graph, platform) self.list_mapper = ProcPartialMapper(graph, platform, com_mapper)
def pareto_mappings(platform, graph): com_mapper = ComFullMapper(graph, platform) mapper = ProcPartialMapper(graph, platform, com_mapper) mapping_tuples = [ ([0, 0], 10.2, 21.45), ([0, 5], 5.2, 31.15), ([0, 1], 9.7, 23.45), ([4, 4], 6.0, 35.45), ([4, 5], 4.32, 39.1), ] mappings = [] for tup in mapping_tuples: mapping = mapper.generate_mapping(tup[0]) mapping.metadata.exec_time = tup[1] mapping.metadata.energy = tup[2] mappings.append(mapping) return mappings
def test_mapping_table_writer_with(platform, graph, tmpdir, expected_csv): output_file = Path(tmpdir).joinpath("output_table.csv") com_mapper = ComFullMapper(graph, platform) mapper = ProcPartialMapper(graph, platform, com_mapper) mapping1 = mapper.generate_mapping([0, 0]) mapping1.metadata.exec_time = 10.2 mapping1.metadata.energy = 21.45 mapping2 = mapper.generate_mapping([0, 5]) mapping2.metadata.exec_time = 5.2 mapping2.metadata.energy = 31.15 attributes = {"num_resources": num_resources} with MappingTableWriter( platform, graph, output_file, attributes=attributes ) as writer: writer.write_header() writer.write_mapping(mapping1) writer.write_mapping(mapping2) assert filecmp.cmp(output_file, expected_csv, shallow=False)
def __init__( self, platform, graph, path, process_prefix="t_", process_suffix="", metadata_exec_time="executionTime", metadata_energy="dynamicEnergy", attributes=None, ): self.platform = platform self.graph = graph self.path = path self._process_prefix = process_prefix self._process_suffix = process_suffix self._metadata_exec_time = metadata_exec_time self._metadata_energy = metadata_energy self._attributes = attributes if self._attributes is None: self._attributes = [] # Parsed data self._data = [] # Read and constructed mappings self.mappings = None self.com_mapper = ComFullMapper(graph, platform) self.mapper = ProcPartialMapper(graph, platform, self.com_mapper) self._process_names = [p.name for p in self.graph.processes()] self._processor_numbers = {} for i, pe in enumerate(self.platform.processors()): self._processor_numbers[pe.name] = i self._read_csv()
def __init__( self, graph, platform, channels=False, periodic_boundary_conditions=False, norm_p=2, canonical_operations=True, disable_mpsym=False, disable_symmetries_test=False, ): self._topologyGraph = platform.to_adjacency_dict( include_proc_type_labels=True) self.graph = graph self.platform = platform self._d = len(graph.processes()) init_app_ncs(self, graph) self._arch_nc_inv = {} self.channels = channels self.boundary_conditions = periodic_boundary_conditions self.p = norm_p com_mapper = ComFullMapper(graph, platform) self.list_mapper = ProcPartialMapper(graph, platform, com_mapper) self.canonical_operations = canonical_operations n = len(self.platform.processors()) correct = None if disable_mpsym: self.sym_library = False else: try: mpsym except NameError: self.sym_library = False else: self.sym_library = True if hasattr(platform, "ag"): self._ag = platform.ag log.info( "Symmetries initialized with mpsym: Platform Generator." ) elif hasattr(platform, "ag_json"): if exists(platform.ag_json): self._ag = mpsym.ArchGraphSystem.from_json_file( platform.ag_json) if disable_symmetries_test: log.warning( "Using symmetries JSON without testing.") correct = True else: try: correct = checkSymmetries( platform.to_adjacency_dict(), self._ag.automorphisms(), ) except Exception as e: log.warning( "An unknown error occurred while reading " "the embedding JSON file. Did you provide " "the correct file for the given platform? " f"({e})") correct = False if not correct: log.warning( "Symmetries json does not fit platform.") del self._ag else: log.info( "Symmetries initialized with mpsym: JSON file." ) else: log.warning( "Invalid symmetries JSON path (file does not exist)." ) if not hasattr(self, "_ag"): # only calculate this if not already present log.info("No pre-comupted mpsym symmetry group available." " Initalizing architecture graph...") ( adjacency_dict, num_vertices, coloring, self._arch_nc, ) = to_labeled_edge_graph(self._topologyGraph) nautygraph = pynauty.Graph(num_vertices, True, adjacency_dict, coloring) log.info("Architecture graph initialized. Calculating " "automorphism group using Nauty...") autgrp_edges = pynauty.autgrp(nautygraph) autgrp, _ = edge_to_node_autgrp(autgrp_edges[0], self._arch_nc) self._ag = mpsym.ArchGraphAutomorphisms( [mpsym.Perm(g) for g in autgrp]) for node in self._arch_nc: self._arch_nc_inv[self._arch_nc[node]] = node # TODO: ensure that nodes_correspondence fits simpleVec if not self.sym_library: log.info( "Using python symmetries: Initalizing architecture graph...") ( adjacency_dict, num_vertices, coloring, self._arch_nc, ) = to_labeled_edge_graph(self._topologyGraph) nautygraph = pynauty.Graph(num_vertices, True, adjacency_dict, coloring) log.info("Architecture graph initialized. Calculating " "automorphism group using Nauty...") autgrp_edges = pynauty.autgrp(nautygraph) autgrp, _ = edge_to_node_autgrp(autgrp_edges[0], self._arch_nc) permutations_lists = map(list_to_tuple_permutation, autgrp) permutations = [ Permutation.fromLists(p, n=n) for p in permutations_lists ] self._G = PermutationGroup(permutations) log.info("Initialized automorphism group with internal symmetries")
def mapper(graph, platform_odroid): com_mapper = ComFullMapper(graph, platform_odroid) return ProcPartialMapper(graph, platform_odroid, com_mapper)