コード例 #1
0
ファイル: optimizer_base.py プロジェクト: mfdgroot/tequila
    def initialize_variables(self, objective, initial_values, variables):
        # bring into right format
        variables = format_variable_list(variables)
        initial_values = format_variable_dictionary(initial_values)
        all_variables = objective.extract_variables()
        if variables is None:
            variables = all_variables
        if initial_values is None:
            initial_values = {k: numpy.random.uniform(0, 2 * numpy.pi) for k in all_variables}
        else:
            # autocomplete initial values, warn if you did
            detected = False
            for k in all_variables:
                if k not in initial_values:
                    initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                    detected = True
            if detected and not self.silent:
                print("WARNING: initial_variables given but not complete: Autocomplete with random number")

        active_angles = {}
        for v in variables:
            active_angles[v] = initial_values[v]

        passive_angles = {}
        for k, v in initial_values.items():
            if k not in active_angles.keys():
                passive_angles[k] = v
        return active_angles, passive_angles, variables
コード例 #2
0
ファイル: optimizer_base.py プロジェクト: shukob/tequila
    def initialize_variables(self, objective, initial_values, variables):
        """
        Convenience function to format the variables of some objective recieved in calls to optimzers.

        Parameters
        ----------
        objective: Objective:
            the objective being optimized.
        initial_values: dict:
            initial values for the variables of objective, as a dictionary.
        variables: list:
            the variables being optimized over.

        Returns
        -------
        tuple:
            active_angles, a dict of those variables being optimized.
            passive_angles, a dict of those variables NOT being optimized.
            variables: formatted list of the variables being optimized.
        """
        # bring into right format
        variables = format_variable_list(variables)
        initial_values = format_variable_dictionary(initial_values)
        all_variables = objective.extract_variables()
        if variables is None:
            variables = all_variables
        if initial_values is None:
            initial_values = {k: numpy.random.uniform(0, 2 * numpy.pi) for k in all_variables}
        else:
            # autocomplete initial values, warn if you did
            detected = False
            for k in all_variables:
                if k not in initial_values:
                    initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                    detected = True
            if detected and not self.silent:
                print("WARNING: initial_variables given but not complete: Autocomplete with random number")

        active_angles = {}
        for v in variables:
            active_angles[v] = initial_values[v]

        passive_angles = {}
        for k, v in initial_values.items():
            if k not in active_angles.keys():
                passive_angles[k] = v
        return active_angles, passive_angles, variables
コード例 #3
0
ファイル: optimizer_scipy.py プロジェクト: akpc/margarita
def minimize(objective: Objective,
             gradient: typing.Union[str, typing.Dict[Variable, Objective]] = None,
             hessian: typing.Union[str, typing.Dict[typing.Tuple[Variable, Variable], Objective]] = None,
             qng: bool = None,
             initial_values: typing.Dict[typing.Hashable, numbers.Real] = None,
             variables: typing.List[typing.Hashable] = None,
             samples: int = None,
             maxiter: int = 100,
             backend: str = None,
             backend_options: dict = None,
             noise: NoiseModel = None,
             method: str = "BFGS",
             tol: float = 1.e-3,
             method_options: dict = None,
             method_bounds: typing.Dict[typing.Hashable, numbers.Real] = None,
             method_constraints=None,
             silent: bool = False,
             save_history: bool = True,
             *args,
             **kwargs) -> SciPyReturnType:
    """

    Parameters
    ----------
    objective: Objective :
        The tequila objective to optimize
    gradient: typing.Union[str, typing.Dict[Variable, Objective], None] : (Default value = None) :
        '2-point', 'cs' or '3-point' for numerical gradient evaluation (does not work in combination with all optimizers),
        dictionary of variables and tequila objective to define own gradient,
        None for automatic construction (default)
    hessian: typing.Union[str, typing.Dict[Variable, Objective], None] : (Default value = None) :
        '2-point', 'cs' or '3-point' for numerical gradient evaluation (does not work in combination with all optimizers),
        dictionary (keys:tuple of variables, values:tequila objective) to define own gradient,
        None for automatic construction (default)
    qng: bool : (Default value = False) :
        whether or not, in the event that a gradient-based method is to be used, the qng, rather than the standard gradient,
        should be employed. NOTE: throws an error for anything but a single expectationvalue with no passive angles.
    initial_values: typing.Dict[typing.Hashable, numbers.Real]: (Default value = None):
        Initial values as dictionary of Hashable types (variable keys) and floating point numbers. If given None they will all be set to zero
    variables: typing.List[typing.Hashable] :
         (Default value = None)
         List of Variables to optimize
    samples: int :
         (Default value = None)
         samples/shots to take in every run of the quantum circuits (None activates full wavefunction simulation)
    maxiter: int :
         (Default value = 100)
    backend: str :
         (Default value = None)
         Simulator backend, will be automatically chosen if set to None
    backend_options: dict:
         (Default value = None)
         Additional options for the backend
         Will be unpacked and passed to the compiled objective in every call
    noise: NoiseModel:
         (Default value =None)
         a NoiseModel to apply to all expectation values in the objective.
    method: str :
         (Default value = "BFGS")
         Optimization method (see scipy documentation, or 'available methods')
    tol: float :
         (Default value = 1.e-3)
         Convergence tolerance for optimization (see scipy documentation)
    method_options: dict :
         (Default value = None)
         Dictionary of options
         (see scipy documentation)
    method_bounds: typing.Dict[typing.Hashable, typing.Tuple[float, float]]:
        (Default value = None)
        bounds for the variables (see scipy documentation)
    method_constraints :
         (Default value = None)
         (see scipy documentation
    silent: bool :
         (Default value = False)
         No printout if True
    save_history: bool:
        (Default value = True)
        Save the history throughout the optimization

    Returns
    -------

    """

    # bring into right format
    variables = format_variable_list(variables)
    initial_values = format_variable_dictionary(initial_values)
    if isinstance(gradient, dict) or hasattr(gradient, "items"):
        gradient = format_variable_dictionary(gradient)
    if isinstance(hessian, dict) or hasattr(hessian, "items"):
        hessian = {(assign_variable(k[0]), assign_variable([k[1]])): v for k, v in hessian.items()}
    method_bounds = format_variable_dictionary(method_bounds)

    # set defaults
    all_variables = objective.extract_variables()
    if variables is None:
        variables = all_variables
    if initial_values is None:
        initial_values = {k: numpy.random.uniform(0, 2 * numpy.pi) for k in all_variables}
    else:
        # autocomplete initial values, warn if you did
        detected = False
        for k in all_variables:
            if k not in initial_values:
                initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                detected = True
        if detected and not silent:
            print("WARNING: initial_variables given but not complete: Autocomplete with random number")

    optimizer = OptimizerSciPy(save_history=save_history,
                               maxiter=maxiter,
                               method=method,
                               method_options=method_options,
                               method_bounds=method_bounds,
                               method_constraints=method_constraints,
                               silent=silent,
                               tol=tol)
    if initial_values is not None:
        initial_values = {assign_variable(k): v for k, v in initial_values.items()}
    return optimizer(objective=objective, qng=qng,
                     backend=backend, backend_options=backend_options, gradient=gradient, hessian=hessian, initial_values=initial_values,
                     variables=variables, noise=noise,
                     samples=samples, *args, **kwargs)
コード例 #4
0
ファイル: optimizer_base.py プロジェクト: silky/tequila
    def initialize_variables(self, objective, initial_values, variables):
        """
        Convenience function to format the variables of some objective recieved in calls to optimzers.

        Parameters
        ----------
        objective: Objective:
            the objective being optimized.
        initial_values: dict or string:
            initial values for the variables of objective, as a dictionary.
            if string: can be `zero` or `random`
            if callable: custom function that initializes when keys are passed
            if None: random initialization between 0 and 2pi (not recommended)
        variables: list:
            the variables being optimized over.

        Returns
        -------
        tuple:
            active_angles, a dict of those variables being optimized.
            passive_angles, a dict of those variables NOT being optimized.
            variables: formatted list of the variables being optimized.
        """
        # bring into right format
        variables = format_variable_list(variables)
        initial_values = format_variable_dictionary(initial_values)
        all_variables = objective.extract_variables()
        if variables is None:
            variables = all_variables
        if initial_values is None:
            initial_values = {
                k: numpy.random.uniform(0, 2 * numpy.pi)
                for k in all_variables
            }
        elif hasattr(initial_values, "lower"):
            if initial_values.lower() == "zero":
                initial_values = {k: 0.0 for k in all_variables}
            elif initial_values.lower() == "random":
                initial_values = {
                    k: numpy.random.uniform(0, 2 * numpy.pi)
                    for k in all_variables
                }
            else:
                raise TequilaOptimizerException(
                    "unknown initialization instruction: {}".format(
                        initial_values))
        elif callable(initial_values):
            initial_values = {k: initial_values(k) for k in all_variables}
        elif isinstance(initial_values, numbers.Number):
            initial_values = {k: initial_values for k in all_variables}
        else:
            # autocomplete initial values, warn if you did
            detected = False
            for k in all_variables:
                if k not in initial_values:
                    initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                    detected = True
            if detected and not self.silent:
                warnings.warn(
                    "initial_variables given but not complete: Autocompleted with random numbers",
                    TequilaWarning)

        active_angles = {}
        for v in variables:
            active_angles[v] = initial_values[v]

        passive_angles = {}
        for k, v in initial_values.items():
            if k not in active_angles.keys():
                passive_angles[k] = v
        return active_angles, passive_angles, variables
コード例 #5
0
def minimize(objective: Objective,
             lr=0.01,
             method='sgd',
             qng: bool = False,
             stop_count=None,
             initial_values: typing.Dict[typing.Hashable, numbers.Real] = None,
             variables: typing.List[typing.Hashable] = None,
             samples: int = None,
             maxiter: int = 100,
             backend: str = None,
             noise: NoiseModel = None,
             silent: bool = False,
             save_history: bool = True,
             *args,
             **kwargs) -> GDReturnType:
    """

    Parameters
    ----------
    objective: Objective :
        The tequila objective to optimize
    lr: float >0:
        the learning rate. Default 0.01.
    method: string:
        which variation on Gradient Descent to use. Options include 'sgd','adam','nesterov','adagrad','rmsprop',
    qng: bool:
        whether or not the gradient calculated should be the quantum natural gradient or not. defaults to False.
    stop_count: int:
        how many steps after which to cease training if no improvement occurs. Default None results in going till maxiter is complete
    initial_values: typing.Dict[typing.Hashable, numbers.Real]: (Default value = None):
        Initial values as dictionary of Hashable types (variable keys) and floating point numbers. If given None they will all be set to zero
    variables: typing.List[typing.Hashable] :
         (Default value = None)
         List of Variables to optimize
    samples: int :
         (Default value = None)
         samples/shots to take in every run of the quantum circuits (None activates full wavefunction simulation)
    maxiter: int :
         (Default value = 100)
    backend: str :
         (Default value = None)
         Simulator backend, will be automatically chosen if set to None
    noise: NoiseModel:
         (Default value = None)
         a NoiseModel to apply to all expectation values in the objective.
    stop_count: int :
         (Default value = None)
         Convergence tolerance for optimization; if no improvement after this many epochs, stop.
    silent: bool :
         (Default value = False)
         No printout if True
    save_history: bool:
        (Default value = True)
        Save the history throughout the optimization


    optional kwargs may include beta, beta2, and rho, parameters which affect (but do not need to be altered) the various
    method algorithms.
    Returns
    -------

    """

    # bring into right format
    variables = format_variable_list(variables)
    initial_values = format_variable_dictionary(initial_values)

    # set defaults
    all_variables = objective.extract_variables()
    if variables is None:
        variables = all_variables
    if initial_values is None:
        initial_values = {
            k: numpy.random.uniform(0, 2 * numpy.pi)
            for k in all_variables
        }
    else:
        # autocomplete initial values, warn if you did
        detected = False
        for k in all_variables:
            if k not in initial_values:
                initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                detected = True
        if detected and not silent:
            print(
                "WARNING: initial_variables given but not complete: Autocomplete with random number"
            )

    optimizer = OptimizerGD(save_history=save_history,
                            maxiter=maxiter,
                            silent=silent)
    if initial_values is not None:
        initial_values = {
            assign_variable(k): v
            for k, v in initial_values.items()
        }
    return optimizer(objective=objective,
                     maxiter=maxiter,
                     lr=lr,
                     method=method,
                     qng=qng,
                     stop_count=stop_count,
                     backend=backend,
                     initial_values=initial_values,
                     variables=variables,
                     noise=noise,
                     samples=samples,
                     *args,
                     **kwargs)