示例#1
0
def get_gradients(objective: Objective, compile_args: dict):
    """
    get the gradients of the Objective and compile them all.
    Parameters
    ----------
    objective: Objective:
        an Objective
    compile_args: dict:
        compilation arguments for compiling the gradient once it is constructed.

    Returns
    -------
    Dict:
        the gradient, compiled for use.

    """
    compile_args = check_compiler_args(compile_args)
    grads = grad(objective)
    back = {}
    for k, v in grads.items():
        new = []
        if isinstance(v, Objective):
            new.append(compile(v, **compile_args))
        else:
            for o in v:
                new.append(compile(o, **compile_args))
        back[k] = new

    return back
示例#2
0
    def __call__(self, objective: Objective,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 variables: typing.List[typing.Hashable] = None,
                 method: str = 'lbfgs', *args, **kwargs) -> GPyOptReturnType:

        active_angles, passive_angles, variables = self.initialize_variables(objective, initial_values, variables)
        dom = self.get_domain(objective, passive_angles)

        O = compile(objective=objective, variables=initial_values, backend=self.backend,
                    noise=self.noise, samples=self.samples,
                    backend_options=self.backend_options)

        if not self.silent:
            print(self)
            print("{:15} : {}".format("method", method))
            print("{:15} : {} expectationvalues".format("Objective", O.count_expectationvalues()))

        f = self.construct_function(O, passive_angles)
        opt = self.get_object(f, dom, method)
        opt.run_optimization(self.maxiter, verbosity=not self.silent)
        if self.save_history:
            self.history.energies = opt.get_evaluations()[1].flatten()
            self.history.angles = [self.redictify(v, objective, passive_angles) for v in opt.get_evaluations()[0]]
        return GPyOptReturnType(energy=opt.fx_opt, angles=self.redictify(opt.x_opt, objective, passive_angles),
                                history=self.history, object=opt)
示例#3
0
 def __call__(self, objective: Objective,
              maxiter: int,
              passives: typing.Dict[Variable, numbers.Real] = None,
              samples: int = None,
              backend: str = None,
              noise=None,
              method: str = 'lbfgs') -> GPyOptReturnType:
     if self.samples is not None:
         if samples is None:
             samples = self.samples
         else:
             pass
     else:
         pass
     dom = self.get_domain(objective, passives)
     init = {v: np.random.uniform(0, 2 * np.pi) for v in objective.extract_variables()}
     ### O is broken, not using it right now
     O = compile(objective=objective, variables=init, backend=backend, noise=noise, samples=samples)
     f = self.construct_function(O, backend, passives, samples, noise_model=noise)
     opt = self.get_object(f, dom, method)
     opt.run_optimization(maxiter)
     if self.save_history:
         self.history.energies = opt.get_evaluations()[1].flatten()
         self.history.angles = [self.redictify(v, objective, passives) for v in opt.get_evaluations()[0]]
     return GPyOptReturnType(energy=opt.fx_opt, angles=self.redictify(opt.x_opt, objective, passives),
                             history=self.history, opt=opt)
示例#4
0
 def compile_objective(self, objective: Objective, *args, **kwargs):
     return compile(objective=objective,
                    samples=self.samples,
                    backend=self.backend,
                    backend_options=self.backend_options,
                    noise=self.noise,
                    *args, **kwargs)
示例#5
0
def preamble(objective: Objective,
             compile_args: dict = None,
             input_vars: list = None):
    """
    Helper function for interfaces to ml backends.
    Parameters
    ----------
    objective: Objective:
        the objective to manipulate and compile.
    compile_args: dict, optional:
        a dictionary of args that can be passed as kwargs to tq.compile
    input_vars: list, optional:
        a list of variables of the objective to specify as input, rather than itnernal weights.

    Returns
    -------
    tuple
        the compiled objective, it's compile arguments, its weight variables, dicts for the weight and input gradients,
        and a dictionary that links positions in an array to each variable (parses parameters).
    """
    def var_sorter(e):
        return hash(e.name)

    all_vars = objective.extract_variables()
    all_vars.sort(key=var_sorter)
    compile_args = check_compiler_args(compile_args)

    weight_vars = []
    if input_vars is None:
        input_vars = []
        weight_vars = all_vars
    else:
        input_vars = [assign_variable(v) for v in input_vars]
        for var in all_vars:
            if var not in input_vars:
                weight_vars.append(assign_variable(var))

    init_vals = compile_args['initial_values']
    if init_vals is not None:
        for k in init_vals.keys():
            if assign_variable(k) in input_vars:
                raise TequilaMLException(
                    'initial_values contained key {},'
                    'which is meant to be an input variable.'.format(k))
        compile_args['initial_values'] = format_variable_dictionary(init_vals)

    comped = compile(objective, **compile_args)
    gradients = get_gradients(objective, compile_args)
    w_grad, i_grad = separate_gradients(gradients,
                                        weight_vars=weight_vars,
                                        input_vars=input_vars)
    first, second = get_variable_orders(weight_vars, input_vars)
    return comped, compile_args, input_vars, weight_vars, i_grad, w_grad, first, second
示例#6
0
    def compile_objective(self, objective: Objective, *args, **kwargs):
        """
        convenience function to wrap over compile; for use by inheritors.
        Parameters
        ----------
        objective: Objective:
            an objective to compile.
        args
        kwargs

        Returns
        -------
        Objective:
            a compiled Objective. Types vary.
        """
        return compile(objective=objective,
                       samples=self.samples,
                       backend=self.backend,
                       device=self.device,
                       noise=self.noise,
                       *args,
                       **kwargs)
示例#7
0
def __grad_inner(arg, variable):
    '''
    a modified loop over __grad_objective, which gets derivatives
     all the way down to variables, return 1 or 0 when a variable is (isnt) identical to var.
    :param arg: a transform or variable object, to be differentiated
    :param variable: the Variable with respect to which par should be differentiated.
    :ivar var: the string representation of variable
    '''

    assert (isinstance(variable, Variable))
    if isinstance(arg, Variable):
        if arg == variable:
            return 1.0
        else:
            return 0.0
    elif isinstance(arg, ExpectationValueImpl):
        return __grad_expectationvalue(arg, variable=variable)
    elif hasattr(arg, "abstract_expectationvalue"):
        E = arg.abstract_expectationvalue
        dE = __grad_expectationvalue(E, variable=variable)
        return compile(dE, **arg._input_args)
    else:
        return __grad_objective(objective=arg, variable=variable)
示例#8
0
    def __call__(self, objective: Objective,
                 initial_values: typing.Dict[Variable, numbers.Real],
                 variables: typing.List[Variable],
                 gradient: typing.Dict[Variable, Objective] = None,
                 qng: bool = False,
                 hessian: typing.Dict[typing.Tuple[Variable, Variable], Objective] = None,
                 samples: int = None,
                 backend: str = None,
                 backend_options: dict = None,
                 noise: NoiseModel = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> SciPyReturnType:
        """
        Optimizes with scipy and gives back the optimized angles
        Get the optimized energies over the history
        :param objective: The tequila Objective to minimize
        :param initial_valuesxx: initial values for the objective
        :param return_scipy_output: chose if the full scipy output shall be returned
        :param reset_history: reset the history before optimization starts (has no effect if self.save_history is False)
        :return: tuple of optimized energy ,optimized angles and scipy output
        """

        infostring = "Starting {method} optimization\n".format(method=self.method)
        infostring += "Objective: {} expectationvalues\n".format(objective.count_expectationvalues())

        if self.save_history and reset_history:
            self.reset_history()

        active_angles = {}
        for v in variables:
            active_angles[v] = initial_values[v]

        passive_angles = {}
        for k, v in initial_values.items():
            if k not in active_angles.keys():
                passive_angles[k] = v

        # Transform the initial value directory into (ordered) arrays
        param_keys, param_values = zip(*active_angles.items())
        param_values = numpy.array(param_values)

        bounds = None
        if self.method_bounds is not None:
            bounds = {k: None for k in active_angles}
            for k, v in self.method_bounds.items():
                if k in bounds:
                    bounds[k] = v
            infostring += "bounds : {}\n".format(self.method_bounds)
            names, bounds = zip(*bounds.items())
            assert (names == param_keys)  # make sure the bounds are not shuffled

        # do the compilation here to avoid costly recompilation during the optimization
        compiled_objective = compile(objective=objective, variables=initial_values, backend=backend, noise=noise,
                                     samples=samples, *args, **kwargs)

        E = _EvalContainer(objective=compiled_objective,
                           param_keys=param_keys,
                           samples=samples,
                           passive_angles=passive_angles,
                           save_history=self.save_history,
                           backend_options = backend_options,
                           silent=self.silent)

        # compile gradients
        if self.method in self.gradient_based_methods + self.hessian_based_methods and not isinstance(gradient, str):
            compiled_grad_objectives = dict()
            if gradient is None:
                gradient = {assign_variable(k): grad(objective=objective, variable=k) for k in active_angles.keys()}
            else:
                gradient = {assign_variable(k): v for k, v in gradient.items()}

            grad_exval = []
            for k in active_angles.keys():
                if k not in gradient:
                    raise Exception("No gradient for variable {}".format(k))
                grad_exval.append(gradient[k].count_expectationvalues())
                compiled_grad_objectives[k] = compile(objective=gradient[k], variables=initial_values,
                                                      samples=samples, noise=noise, backend=backend, *args, **kwargs)

            if qng:
                combos = get_qng_combos(objective, samples=samples, backend=backend,
                                        noise=noise, initial_values=initial_values)

                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   samples=samples,
                                   passive_angles=passive_angles,
                                   save_history=self.save_history,
                                   silent=self.silent,
                                   backend_options=backend_options)
            else:

                dE = _GradContainer(objective=compiled_grad_objectives,
                                    param_keys=param_keys,
                                    samples=samples,
                                    passive_angles=passive_angles,
                                    save_history=self.save_history,
                                    silent=self.silent,
                                    backend_options=backend_options)

                infostring += "Gradients: {} expectationvalues (min={}, max={})\n".format(sum(grad_exval),
                                                                                          min(grad_exval),
                                                                                          max(grad_exval))
        else:
            # use numerical gradient
            dE = gradient
            infostring += "Gradients: {}\n".format(gradient)

        # compile hessian

        if self.method in self.hessian_based_methods and not isinstance(hessian, str):

            if isinstance(gradient, str):
                raise TequilaScipyException("Can not use numerical gradients for Hessian based methods")
            if qng is True:
                raise TequilaScipyException('Quantum Natural Hessian not yet well-defined, sorry!')
            compiled_hess_objectives = dict()
            hess_exval = []
            for i, k in enumerate(active_angles.keys()):
                for j, l in enumerate(active_angles.keys()):
                    if j > i: continue
                    hess = grad(gradient[k], l)
                    compiled_hess = compile(objective=hess, variables=initial_values, samples=samples,
                                            noise=noise,
                                            backend=backend, *args, **kwargs)
                    compiled_hess_objectives[(k, l)] = compiled_hess
                    compiled_hess_objectives[(l, k)] = compiled_hess
                    hess_exval.append(compiled_hess.count_expectationvalues())

            ddE = _HessContainer(objective=compiled_hess_objectives,
                                 param_keys=param_keys,
                                 samples=samples,
                                 passive_angles=passive_angles,
                                 save_history=self.save_history,
                                 silent=self.silent)

            infostring += "Hessian: {} expectationvalues (min={}, max={})\n".format(sum(hess_exval), min(hess_exval),
                                                                                    max(hess_exval))

        else:
            infostring += "Hessian: {}\n".format(hessian)
            if self.method != "TRUST-CONSTR" and hessian is not None:
                raise TequilaScipyException("numerical hessians only for trust-constr method")
            ddE = hessian

        if not self.silent:
            print("ObjectiveType is {}".format(type(compiled_objective)))
            print(infostring)
            print("backend: {}".format(compiled_objective.backend))
            print("samples: {}".format(samples))
            print("{} active variables".format(len(active_angles)))

        # get the number of real scipy iterations for better histories
        real_iterations = []

        Es = []
        callback = lambda x, *args: real_iterations.append(len(E.history) - 1)
        res = scipy.optimize.minimize(E, x0=param_values, jac=dE, hess=ddE,
                                      args=(Es,),
                                      method=self.method, tol=self.tol,
                                      bounds=bounds,
                                      constraints=self.method_constraints,
                                      options=self.method_options,
                                      callback=callback)

        # failsafe since callback is not implemented everywhere
        if len(real_iterations) == 0:
            real_iterations = range(len(E.history))
        else:
            real_iterations = [0] + real_iterations
        if self.save_history:
            self.history.energies = [E.history[i] for i in real_iterations]
            self.history.energy_evaluations = E.history
            self.history.angles = [E.history_angles[i] for i in real_iterations]
            self.history.angles_evaluations = E.history_angles
            if dE is not None and not isinstance(dE, str):
                # can currently only save gradients if explicitly evaluated
                # and will fail for hessian based approaches
                # need better callback functions
                try:
                    if self.method not in self.hessian_based_methods:
                        self.history.gradients = [dE.history[i] for i in real_iterations]
                except:
                    print("WARNING: History could not assign the stored gradients")
                self.history.gradients_evaluations = dE.history
            if ddE is not None and not isinstance(ddE, str):
                # hessians are not evaluated in the same frequencies as energies
                # therefore we can not store the "real" iterations currently
                self.history.hessians_evaluations = ddE.history

        E_final = res.fun
        angles_final = dict((param_keys[i], res.x[i]) for i in range(len(param_keys)))
        angles_final = {**angles_final, **passive_angles}

        return SciPyReturnType(energy=E_final, angles=format_variable_dictionary(angles_final), history=self.history,
                               scipy_output=res)
示例#9
0
    def __call__(self,
                 objective: Objective,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 variables: typing.List[typing.Hashable] = None,
                 method: str = 'lbfgs',
                 *args,
                 **kwargs) -> GPyOptResults:
        """
        perform optimization of an objective via GPyOpt.

        Parameters
        ----------
        objective: Objective:
            the objective to optimize.
        initial_values: dict, optional:
            a starting point for optimization.
            Default: generate at random.
        variables: list, optional:
            which variables to optimize over.
            If None: optimize over all variables.
        method: str: Default = 'lbfgs'
            what method to use for the acquisition function of the bayesian optimization.
            Default: use lbfgs.
        args
        kwargs

        Returns
        -------
        GPyOptResults.
            Results of the optimization.
        """

        active_angles, passive_angles, variables = self.initialize_variables(
            objective, initial_values, variables)
        dom = self.get_domain(objective, passive_angles)

        O = compile(objective=objective,
                    variables=initial_values,
                    backend=self.backend,
                    noise=self.noise,
                    samples=self.samples,
                    device=self.device)

        if not self.silent:
            print(self)
            print("{:15} : {}".format("method", method))
            print("{:15} : {} expectationvalues".format(
                "Objective", O.count_expectationvalues()))

        f = self.construct_function(O, passive_angles)
        opt = self.get_object(f, dom, method)
        opt.run_optimization(self.maxiter, verbosity=not self.silent)
        if self.save_history:
            self.history.energies = opt.get_evaluations()[1].flatten()
            self.history.angles = [
                self.redictify(v, objective, passive_angles)
                for v in opt.get_evaluations()[0]
            ]
        return GPyOptResults(energy=opt.fx_opt,
                             variables=self.redictify(opt.x_opt, objective,
                                                      passive_angles),
                             history=self.history,
                             gpyopt_instance=opt)
示例#10
0
    def __call__(self,
                 objective: Objective,
                 maxiter,
                 lr: float = .01,
                 method: str = 'sgd',
                 qng: bool = False,
                 stop_count: int = None,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 variables: typing.List[Variable] = None,
                 samples: int = None,
                 backend: str = None,
                 noise: NoiseModel = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> GDReturnType:
        """
        Optimizes with a variation of gradient descent and gives back the optimized angles
        Get the optimized energies over the history
        :param objective: The tequila Objective to minimize
        :param maxiter: how many iterations to run, at maximum.
        :param method: what method to optimize via.
        :param qng: whether or not to use the QNG to calculate gradients.
        :param stop_count: how many steps after which to abort if no improvement occurs.
        :param initial_values: initial values for the objective
        :param variables: which variables to optimize over. Default None: all the variables of the objective.
        :param samples: the number of samples to use. Default None: Wavefunction simulation used instead.
        :param backend: which simulation backend to use. Default None: let Tequila Pick!
        :param noise: the NoiseModel to apply to sampling. Default None. Affects chosen simulator.
        :param reset_history: reset the history before optimization starts (has no effect if self.save_history is False)
        :return: tuple of optimized energy ,optimized angles and scipy output
        """

        if self.save_history and reset_history:
            self.reset_history()

        active_angles = {}
        for v in variables:
            active_angles[v] = initial_values[v]

        passive_angles = {}
        for k, v in initial_values.items():
            if k not in active_angles.keys():
                passive_angles[k] = v

        # Transform the initial value directory into (ordered) arrays

        comp = compile(objective=objective,
                       variables=initial_values,
                       backend=backend,
                       noise=noise,
                       samples=samples)

        if not qng:
            g_list = []
            for k in active_angles.keys():
                g = grad(objective, k)
                g_comp = compile(objective=g,
                                 variables=initial_values,
                                 backend=backend,
                                 noise=noise,
                                 samples=samples)
                g_list.append(g_comp)

            gradients = CallableVector(g_list)
        else:
            if method.lower() == 'adagrad':
                print(
                    'Warning! you have chosen to use QNG with adagrad ; convergence is not likely.'
                    .format(method))
            gradients = QNGVector(
                get_qng_combos(objective=objective,
                               initial_values=initial_values,
                               backend=backend,
                               noise=noise,
                               samples=samples))

        if not self.silent:
            print("backend: {}".format(comp.backend))
            print("samples: {}".format(samples))
            print("{} active variables".format(len(active_angles)))
            print("qng: {}".format(str(qng)))

        ### prefactor. Early stopping, initialization, etc. handled here

        if maxiter is None:
            maxiter = self.maxiter
        if stop_count == None:
            stop_count = maxiter

        ### the actual algorithm acts here:

        f = self.method_dict[method.lower()]
        v = initial_values
        vec_len = len(active_angles)
        best = None
        best_angles = None
        first = numpy.zeros(vec_len)
        second = numpy.zeros(vec_len)
        moments = [first, second]
        all_moments = [moments]
        tally = 0
        for step in range(maxiter):
            e = comp(v, samples=samples)
            self.history.energies.append(e)
            self.history.angles.append(v)

            ### saving best performance and counting the stop tally.
            if step == 0:
                best = e
                best_angles = v
                tally = 0
            else:
                if e < best:
                    best = e
                    best_angles = v
                    tally = 0
                else:
                    tally += 1

            if not self.silent:
                string = "Iteration: {} , Energy: {}, angles: {}".format(
                    str(step), str(e), v)
                print(string)

            ### check if its time to stop!
            if tally == stop_count:
                if not self.silent:
                    print(
                        'no improvement after {} epochs. Stopping optimization.'
                        .format(str(stop_count)))
                break

            new, moments, grads = f(lr=lr,
                                    step=step,
                                    gradients=gradients,
                                    v=v,
                                    moments=moments,
                                    active_angles=active_angles,
                                    samples=samples,
                                    **kwargs)
            save_grad = {}
            if passive_angles != None:
                v = {**new, **passive_angles}
            else:
                v = new
            for i, k in enumerate(active_angles.keys()):
                save_grad[k] = grads[i]
            self.history.gradients.append(save_grad)
            all_moments.append(moments)
        E_final, angles_final = best, best_angles
        angles_final = {**angles_final, **passive_angles}
        return GDReturnType(energy=E_final,
                            angles=format_variable_dictionary(angles_final),
                            history=self.history,
                            moments=all_moments)