# number of generations, or temperatures to progress through num_iter = 20 # population size num_samples = 20 # QPU initial sampling: limits the PA workflow to QPU-sized problems qpu_init = (hybrid.IdentityDecomposer() | hybrid.QPUSubproblemAutoEmbeddingSampler(num_reads=num_samples) | hybrid.IdentityComposer()) | hybrid.AggregatedSamples(False) # PA workflow: after initial beta schedule estimation, we do `num_iter` steps # (one per beta/temperature) of fixed-temperature sampling / weighted resampling workflow = qpu_init | CalculateAnnealingBetaSchedule( length=num_iter) | hybrid.Loop( ProgressBetaAlongSchedule() | FixedTemperatureSampler( num_sweeps=num_sweeps) | EnergyWeightedResampler(), max_iter=num_iter) # run the workflow state = hybrid.State.from_problem(bqm) solution = workflow.run(state).result() # show execution profile hybrid.profiling.print_counters(workflow) # show results print("Solution: sample={0.samples.first}, energy={0.samples.first.energy}". format(solution))
replicas = hybrid.States(*[state.updated() for _ in range(n_replicas)]) # get a reasonable beta range beta_hot, beta_cold = neal.default_beta_range(bqm) # generate betas for all branches/replicas betas = np.geomspace(beta_hot, beta_cold, n_replicas) # QPU branch: limits the PT workflow to QPU-sized problems qpu = hybrid.IdentityDecomposer() | hybrid.QPUSubproblemAutoEmbeddingSampler( ) | hybrid.IdentityComposer() # use QPU as the hottest temperature sampler and `n_replicas-1` fixed-temperature-samplers update = hybrid.Branches( qpu, *[ FixedTemperatureSampler(beta=beta, num_sweeps=n_sweeps) for beta in betas[1:] ]) # swap step is `n_replicas-1` pairwise potential swaps swap = SwapReplicasDownsweep(betas=betas) # we'll run update/swap sequence for `n_iterations` workflow = hybrid.Loop(update | swap, max_iter=n_iterations) \ | hybrid.MergeSamples(aggregate=True) # execute the workflow solution = workflow.run(replicas).result() # show execution profile hybrid.profiling.print_counters(workflow)
n_replicas = 10 n_iterations = 10 # states are randomly initialized state = hybrid.State.from_problem(bqm) # get a reasonable beta range beta_hot, beta_cold = neal.default_beta_range(bqm) # generate betas for all branches/replicas betas = np.geomspace(beta_hot, beta_cold, n_replicas) # create n_replicas with geometric distribution of betas (inverse temperature) replicas = hybrid.States(*[state.updated(beta=b) for b in betas]) # run replicas update/swap for n_iterations # (after each update/sampling step, do n_replicas-1 swap operations) update = hybrid.Map(FixedTemperatureSampler(num_sweeps=n_sweeps)) swap = SwapReplicasDownsweep() workflow = hybrid.Loop(update | swap, max_iter=n_iterations) \ | hybrid.MergeSamples(aggregate=True) solution = workflow.run(replicas).result() # show execution profile hybrid.profiling.print_counters(workflow) # show results print("Solution: sample={0.samples.first}, energy={0.samples.first.energy}". format(solution))
print("BQM: {} nodes, {} edges, {:.2f} density".format( len(bqm), len(bqm.quadratic), hybrid.bqm_density(bqm))) # sweeps per fixed-temperature sampling step num_sweeps = 1000 # number of generations, or temperatures to progress through num_iter = 20 # population size num_samples = 20 # PA workflow: after initial beta schedule estimation, we do `num_iter` steps # (one per beta/temperature) of fixed-temperature sampling / weighted resampling workflow = CalculateAnnealingBetaSchedule(length=num_iter) | hybrid.Loop( ProgressBetaAlongSchedule() | FixedTemperatureSampler(num_sweeps=num_sweeps, num_reads=num_samples) | EnergyWeightedResampler(), max_iter=num_iter) # run the workflow state = hybrid.State.from_problem(bqm) solution = workflow.run(state).result() # show execution profile hybrid.profiling.print_counters(workflow) # show results print("Solution: sample={0.samples.first}, energy={0.samples.first.energy}". format(solution))