def __call__(self, objective: Objective, variables: typing.List[Variable] = None, initial_values: typing.Dict[Variable, numbers.Real] = None, gradient: typing.Dict[Variable, Objective] = None, hessian: typing.Dict[typing.Tuple[Variable, Variable], Objective] = None, reset_history: bool = True, *args, **kwargs) -> SciPyResults: """ Perform optimization using scipy optimizers. Parameters ---------- objective: Objective: the objective to optimize. variables: list, optional: the variables of objective to optimize. If None: optimize all. initial_values: dict, optional: a starting point from which to begin optimization. Will be generated if None. gradient: optional: Information or object used to calculate the gradient of objective. Defaults to None: get analytically. hessian: optional: Information or object used to calculate the hessian of objective. Defaults to None: get analytically. reset_history: bool: Default = True: whether or not to reset all history before optimizing. args kwargs Returns ------- ScipyReturnType: the results of optimization. """ objective = objective.contract() infostring = "{:15} : {}\n".format("Method", self.method) infostring += "{:15} : {} expectationvalues\n".format( "Objective", objective.count_expectationvalues()) if gradient is not None: infostring += "{:15} : {}\n".format("grad instr", gradient) if hessian is not None: infostring += "{:15} : {}\n".format("hess_instr", hessian) if self.save_history and reset_history: self.reset_history() active_angles, passive_angles, variables = self.initialize_variables( objective, initial_values, variables) # Transform the initial value directory into (ordered) arrays param_keys, param_values = zip(*active_angles.items()) param_values = numpy.array(param_values) # process and initialize scipy bounds bounds = None if self.method_bounds is not None: bounds = {k: None for k in active_angles} for k, v in self.method_bounds.items(): if k in bounds: bounds[k] = v infostring += "{:15} : {}\n".format("bounds", self.method_bounds) names, bounds = zip(*bounds.items()) assert (names == param_keys ) # make sure the bounds are not shuffled # do the compilation here to avoid costly recompilation during the optimization compiled_objective = self.compile_objective(objective=objective, *args, **kwargs) E = _EvalContainer(objective=compiled_objective, param_keys=param_keys, samples=self.samples, passive_angles=passive_angles, save_history=self.save_history, print_level=self.print_level) compile_gradient = self.method in (self.gradient_based_methods + self.hessian_based_methods) compile_hessian = self.method in self.hessian_based_methods dE = None ddE = None # detect if numerical gradients shall be used # switch off compiling if so if isinstance(gradient, str): if gradient.lower() == 'qng': compile_gradient = False if compile_hessian: raise TequilaException( 'Sorry, QNG and hessian not yet tested together.') combos = get_qng_combos(objective, initial_values=initial_values, backend=self.backend, samples=self.samples, noise=self.noise) dE = _QngContainer(combos=combos, param_keys=param_keys, passive_angles=passive_angles) infostring += "{:15} : QNG {}\n".format("gradient", dE) else: dE = gradient compile_gradient = False if compile_hessian: compile_hessian = False if hessian is None: hessian = gradient infostring += "{:15} : scipy numerical {}\n".format( "gradient", dE) infostring += "{:15} : scipy numerical {}\n".format( "hessian", ddE) if isinstance(gradient, dict): if gradient['method'] == 'qng': func = gradient['function'] compile_gradient = False if compile_hessian: raise TequilaException( 'Sorry, QNG and hessian not yet tested together.') combos = get_qng_combos(objective, func=func, initial_values=initial_values, backend=self.backend, samples=self.samples, noise=self.noise) dE = _QngContainer(combos=combos, param_keys=param_keys, passive_angles=passive_angles) infostring += "{:15} : QNG {}\n".format("gradient", dE) if isinstance(hessian, str): ddE = hessian compile_hessian = False if compile_gradient: grad_obj, comp_grad_obj = self.compile_gradient( objective=objective, variables=variables, gradient=gradient, *args, **kwargs) expvals = sum( [o.count_expectationvalues() for o in comp_grad_obj.values()]) infostring += "{:15} : {} expectationvalues\n".format( "gradient", expvals) dE = _GradContainer(objective=comp_grad_obj, param_keys=param_keys, samples=self.samples, passive_angles=passive_angles, save_history=self.save_history, print_level=self.print_level) if compile_hessian: hess_obj, comp_hess_obj = self.compile_hessian( variables=variables, hessian=hessian, grad_obj=grad_obj, comp_grad_obj=comp_grad_obj, *args, **kwargs) expvals = sum( [o.count_expectationvalues() for o in comp_hess_obj.values()]) infostring += "{:15} : {} expectationvalues\n".format( "hessian", expvals) ddE = _HessContainer(objective=comp_hess_obj, param_keys=param_keys, samples=self.samples, passive_angles=passive_angles, save_history=self.save_history, print_level=self.print_level) if self.print_level > 0: print(self) print(infostring) print("{:15} : {}\n".format("active variables", len(active_angles))) Es = [] optimizer_instance = self class SciPyCallback: energies = [] gradients = [] hessians = [] angles = [] real_iterations = 0 def __call__(self, *args, **kwargs): self.energies.append(E.history[-1]) self.angles.append(E.history_angles[-1]) if dE is not None and not isinstance(dE, str): self.gradients.append(dE.history[-1]) if ddE is not None and not isinstance(ddE, str): self.hessians.append(ddE.history[-1]) self.real_iterations += 1 if 'callback' in optimizer_instance.kwargs: optimizer_instance.kwargs['callback'](E.history_angles[-1]) callback = SciPyCallback() res = scipy.optimize.minimize(E, x0=param_values, jac=dE, hess=ddE, args=(Es, ), method=self.method, tol=self.tol, bounds=bounds, constraints=self.method_constraints, options=self.method_options, callback=callback) # failsafe since callback is not implemented everywhere if callback.real_iterations == 0: real_iterations = range(len(E.history)) if self.save_history: self.history.energies = callback.energies self.history.energy_evaluations = E.history self.history.angles = callback.angles self.history.angles_evaluations = E.history_angles self.history.gradients = callback.gradients self.history.hessians = callback.hessians if dE is not None and not isinstance(dE, str): self.history.gradients_evaluations = dE.history if ddE is not None and not isinstance(ddE, str): self.history.hessians_evaluations = ddE.history # some methods like "cobyla" do not support callback functions if len(self.history.energies) == 0: self.history.energies = E.history self.history.angles = E.history_angles # some scipy methods always give back the last value and not the minimum (e.g. cobyla) ea = sorted(zip(E.history, E.history_angles), key=lambda x: x[0]) E_final = ea[0][0] angles_final = ea[0][ 1] #dict((param_keys[i], res.x[i]) for i in range(len(param_keys))) angles_final = {**angles_final, **passive_angles} return SciPyResults(energy=E_final, history=self.history, variables=format_variable_dictionary(angles_final), scipy_result=res)
def prepare(self, objective: Objective, initial_values: dict = None, variables: list = None, gradient=None): """ perform all initialization for an objective, register it with lookup tables, and return it compiled. MUST be called before step is used. Parameters ---------- objective: Objective: the objective to ready for optimization. initial_values: dict, optional: the initial values of to prepare the optimizer with. Default: choose randomly. variables: list, optional: which variables to optimize over, and hence prepare gradients for. Default value: optimize over all variables in objective. gradient: optional: extra keyword; information used to compile alternate gradients. Default: prepare the standard, analytical gradient. Returns ------- Objective: compiled version of objective. """ objective = objective.contract() active_angles, passive_angles, variables = self.initialize_variables( objective, initial_values, variables) comp = self.compile_objective(objective=objective) for arg in comp.args: if hasattr(arg, 'U'): if arg.U.device is not None: # don't retrieve computer 100 times; pyquil errors out if this happens! self.device = arg.U.device break compile_gradient = True dE = None if isinstance(gradient, str): if gradient.lower() == 'qng': compile_gradient = False combos = get_qng_combos( objective, initial_values=initial_values, backend=self.backend, device=self.device, samples=self.samples, noise=self.noise, ) dE = QNGVector(combos) else: gradient = {"method": gradient, "stepsize": 1.e-4} elif isinstance(gradient, dict): if gradient['method'] == 'qng': func = gradient['function'] compile_gradient = False combos = get_qng_combos(objective, func=func, initial_values=initial_values, backend=self.backend, device=self.device, samples=self.samples, noise=self.noise) dE = QNGVector(combos) if compile_gradient: grad_obj, comp_grad_obj = self.compile_gradient( objective=objective, variables=variables, gradient=gradient) dE = CallableVector( [comp_grad_obj[k] for k in comp_grad_obj.keys()]) ostring = id(comp) if not self.silent: print(self) print("{:15} : {} expectationvalues".format( "Objective", objective.count_expectationvalues())) if compile_gradient: counts = [ x.count_expectationvalues() for x in comp_grad_obj.values() ] print("{:15} : {} expectationvalues".format( "Gradient", sum(counts))) print("{:15} : {}".format("gradient instr", gradient)) print("{:15} : {}".format("active variables", len(active_angles))) vec_len = len(active_angles) first = numpy.zeros(vec_len) second = numpy.zeros(vec_len) self.gradient_lookup[ostring] = dE self.active_key_lookup[ostring] = active_angles.keys() self.moments_lookup[ostring] = (first, second) self.moments_trajectory[ostring] = [(first, second)] self.step_lookup[ostring] = 0 return comp