def test_embedding_to_one_node(self): """an embedding that maps everything to one node should result in a singleton graph""" target_adj = nx.barbell_graph(16, 7) embedding = {'a': set(target_adj)} # all map to 'a' source_adj = eutil.target_to_source(target_adj, embedding) self.assertEqual(source_adj, {'a': set()}) embedding = {'a': {0, 1}} # not every node is assigned to a chain source_adj = eutil.target_to_source(target_adj, embedding) self.assertEqual(source_adj, {'a': set()})
def test_embedding_overlap(self): """overlapping embeddings should raise an error""" target_adj = nx.complete_graph(5) embedding = {'a': {0, 1}, 'b': {1, 2}} # overlap with self.assertRaises(ValueError): source_adj = eutil.target_to_source(target_adj, embedding)
def load_embedding(target_nodelist, target_edgelist, embedding, embedding_tag): target_adjacency = {v: set() for v in target_nodelist} for u, v in target_edgelist: target_adjacency[u].add(v) target_adjacency[v].add(u) source_adjacency = embutil.target_to_source(target_adjacency, embedding) source_nodelist = sorted(source_adjacency) source_edgelist = sorted( sorted(edge) for edge in _adjacency_to_edges(source_adjacency)) with cache_connect() as cur: insert_embedding(cur, source_nodelist, source_edgelist, target_nodelist, target_edgelist, embedding, embedding_tag)
def test_self_embedding(self): """a 1-to-1 embedding should not change the adjacency""" target_adj = dnx.chimera_graph(4) embedding = {v: {v} for v in target_adj} source_adj = eutil.target_to_source(target_adj, embedding) # print(source_adj) # test the adjacencies are equal (source_adj is a dict and target_adj is a networkx graph) for v in target_adj: self.assertIn(v, source_adj) for u in target_adj[v]: self.assertIn(u, source_adj[v]) for v in source_adj: self.assertIn(v, target_adj) for u in source_adj[v]: self.assertIn(u, target_adj[v])
def __init__(self, sampler, embedding, chain_strength=None, flux_biases=None, flux_bias_num_reads=1000, flux_bias_max_age=3600): self.children = [sampler] self.parameters = parameters = {'apply_flux_bias_offsets': []} parameters.update(sampler.parameters) self.properties = {'child_properties': sampler.properties.copy()} # # Get the adjacency of the child sampler (this is the target for our embedding) # try: target_nodelist, target_edgelist, target_adjacency = sampler.structure except: # todo, better exception catching raise # # Validate the chain strength, or obtain it from J-range if chain strength is not provided. # self.chain_strength = self._validate_chain_strength(chain_strength) # # We want to track the persistent embedding so that we can map input problems # to the child sampler. # if isinstance(embedding, str): embedding = get_embedding_from_tag(embedding, target_nodelist, target_edgelist) elif not isinstance(embedding, dict): raise TypeError("expected input `embedding` to be a dict.") self.embedding = embedding # # Derive the structure of our composed from the target graph and the embedding # source_adjacency = embutil.target_to_source(target_adjacency, embedding) try: nodelist = sorted(source_adjacency) edgelist = sorted(_adjacency_to_edges(source_adjacency)) except TypeError: # python3 does not allow sorting of unlike types, so if nodes have # different type names just choose an arbitrary order nodelist = list(source_adjacency) edgelist = list(_adjacency_to_edges(source_adjacency)) self.nodelist = nodelist self.edgelist = edgelist self.adjacency = source_adjacency # # If the sampler accepts flux bias offsets, we'll want to set them # if flux_biases is None and FLUX_BIAS_KWARG in sampler.parameters: # If nothing is provided, then we either get them from the cache or generate them flux_biases = get_flux_biases(sampler, embedding, num_reads=flux_bias_num_reads, max_age=flux_bias_max_age) elif flux_biases: if FLUX_BIAS_KWARG not in sampler.accepted_kwargs: raise ValueError( "Given child sampler does not accept flux_biases.") # something provided, error check if not isinstance(flux_biases, list): flux_biases = list(flux_biases) # cast to a list else: # disabled, empty or not available for this sampler so do nothing flux_biases = None self.flux_biases = flux_biases