Example #1
0
    def __init__(self, sampler, embedding,
                 chain_strength=None,
                 flux_biases=None,
                 flux_bias_num_reads=1000,
                 flux_bias_max_age=3600):

        child, = self.children = [FixedEmbeddingComposite(sampler, embedding)]
        self.nodelist, self.edgelist, self.adjacency = child.structure
        self.embedding = child.embedding

        self.parameters = parameters = {'apply_flux_bias_offsets': []}
        parameters.update(child.parameters)

        self.properties = child.properties.copy()  # shallow copy

        # Validate the chain strength, or obtain it from J-range if chain strength is not provided.
        self.chain_strength = _validate_chain_strength(sampler, chain_strength)

        if flux_biases is False:  # use 'is' because bool(None) is False
            # in this case we are done
            self.flux_biases = None
            return

        if FLUX_BIAS_KWARG not in sampler.parameters:
            raise ValueError("Given child sampler does not accept flux_biases.")

        # come back as a dict
        flux_biases = get_flux_biases(sampler, embedding,
                                      num_reads=flux_bias_num_reads,
                                      chain_strength=self.chain_strength,
                                      max_age=flux_bias_max_age)

        self.flux_biases = [flux_biases.get(v, 0.0) for v in range(sampler.properties['num_qubits'])]

        return
    def __init__(self, sampler, embedding,
                 chain_strength=None,
                 flux_biases=None,
                 flux_bias_num_reads=1000,
                 flux_bias_max_age=3600):

        super(VirtualGraphComposite, self).__init__(sampler, embedding)
        self.parameters.update(apply_flux_bias_offsets=[])

        # Validate the chain strength, or obtain it from J-range if chain strength is not provided.
        self.chain_strength = _validate_chain_strength(sampler, chain_strength)

        if flux_biases is False:  # use 'is' because bool(None) is False
            # in this case we are done
            self.flux_biases = None
            return

        if FLUX_BIAS_KWARG not in sampler.parameters:
            raise ValueError("Given child sampler does not accept flux_biases.")

        # come back as a dict
        flux_biases = get_flux_biases(sampler, embedding,
                                      num_reads=flux_bias_num_reads,
                                      chain_strength=self.chain_strength,
                                      max_age=flux_bias_max_age)

        self.flux_biases = [flux_biases.get(v, 0.0) for v in range(sampler.properties['num_qubits'])]

        return
Example #3
0
 def step2(self, sampler, embedding):
     """get flux biases from cache"""
     return fb.get_flux_biases(sampler, embedding, chain_strength=1)
Example #4
0
 def step1(self, sampler, embedding):
     """get new flux bias"""
     return fb.get_flux_biases(sampler,
                               embedding,
                               chain_strength=1,
                               max_age=0)
Example #5
0
    def __init__(self, sampler, embedding,
                 chain_strength=None,
                 flux_biases=None, flux_bias_num_reads=1000, flux_bias_max_age=3600):
        self.children = [sampler]

        self.parameters = parameters = {'apply_flux_bias_offsets': []}
        parameters.update(sampler.parameters)

        self.properties = {'child_properties': sampler.properties.copy()}

        #
        # Get the adjacency of the child sampler (this is the target for our embedding)
        #
        try:
            target_nodelist, target_edgelist, target_adjacency = sampler.structure
        except:
            # todo, better exception catching
            raise

        #
        # Validate the chain strength, or obtain it from J-range if chain strength is not provided.
        #
        self.chain_strength = self._validate_chain_strength(chain_strength)

        #
        # We want to track the persistent embedding so that we can map input problems
        # to the child sampler.
        #
        if isinstance(embedding, str):
            embedding = get_embedding_from_tag(embedding, target_nodelist, target_edgelist)
        elif not isinstance(embedding, dict):
            raise TypeError("expected input `embedding` to be a dict.")
        self.embedding = embedding

        #
        # Derive the structure of our composed from the target graph and the embedding
        #
        source_adjacency = dimod.embedding.target_to_source(target_adjacency, embedding)
        try:
            nodelist = sorted(source_adjacency)
            edgelist = sorted(_adjacency_to_edges(source_adjacency))
        except TypeError:
            # python3 does not allow sorting of unlike types, so if nodes have
            # different type names just choose an arbitrary order
            nodelist = list(source_adjacency)
            edgelist = list(_adjacency_to_edges(source_adjacency))
        self.nodelist = nodelist
        self.edgelist = edgelist
        self.adjacency = source_adjacency

        #
        # If the sampler accepts flux bias offsets, we'll want to set them
        #
        if flux_biases is None and FLUX_BIAS_KWARG in sampler.parameters:
            # If nothing is provided, then we either get them from the cache or generate them
            flux_biases = get_flux_biases(sampler, embedding, num_reads=flux_bias_num_reads,
                                          max_age=flux_bias_max_age)
        elif flux_biases:
            if FLUX_BIAS_KWARG not in sampler.accepted_kwargs:
                raise ValueError("Given child sampler does not accept flux_biases.")
            # something provided, error check
            if not isinstance(flux_biases, list):
                flux_biases = list(flux_biases)  # cast to a list
        else:
            # disabled, empty or not available for this sampler so do nothing
            flux_biases = None
        self.flux_biases = flux_biases