Пример #1
0
def compile(objective: typing.Union['Objective', 'QCircuit'],
            variables: Dict[Union['Variable', Hashable], RealNumber] = None,
            samples: int = None,
            backend: str = None,
            noise_model=None,
            *args,
            **kwargs) -> typing.Union['BackendCircuit', 'Objective']:
    """Compile a tequila objective or circuit to a backend

    Parameters
    ----------
    objective : Objective:
        tequila objective or circuit
    variables : Dict[Union[Variable :Hashable]:RealNumber]:
        The variables of the objective given as dictionary
        with keys as tequila Variables and values the corresponding real numbers
    samples : str : (Default value = None) :
        if None a full wavefunction simulation is performed, otherwise a fixed number of samples is simulated
    backend : str : (Default value = None) :
        specify the backend or give None for automatic assignment
    noise_model: NoiseModel : (Default value =None) :
        the noise model to apply to the objective or QCircuit.

    Returns
    -------
    simulators.BackendCircuit
        simulated/sampled objective or simulated/sampled wavefunction

    """

    backend = pick_backend(backend=backend,
                           noise=noise_model is not None,
                           samples=samples)

    if variables is None and not (len(objective.extract_variables()) == 0):
        variables = {key: 0.0 for key in objective.extract_variables()}
    elif variables is not None:
        # allow hashable types as keys without casting it to variables
        variables = {assign_variable(k): v for k, v in variables.items()}

    if isinstance(objective, Objective) or hasattr(objective, "args"):
        return compile_objective(objective=objective,
                                 variables=variables,
                                 backend=backend,
                                 noise_model=noise_model)
    elif hasattr(objective, "gates") or hasattr(objective, "abstract_circuit"):
        return compile_circuit(abstract_circuit=objective,
                               variables=variables,
                               backend=backend,
                               noise_model=noise_model,
                               *args,
                               **kwargs)
    else:
        raise TequilaException(
            "Don't know how to compile object of type: {type}, \n{object}".
            format(type=type(objective), object=objective))
Пример #2
0
def simulate(objective: typing.Union['Objective', 'QCircuit'],
             variables: Dict[Union[Variable, Hashable], RealNumber] = None,
             samples: int = None,
             backend: str = None,
             noise: NoiseModel = None,
             device: str = None,
             *args,
             **kwargs) -> Union[RealNumber, 'QubitWaveFunction']:
    """Simulate a tequila objective or circuit

    Parameters
    ----------
    objective: Objective:
        tequila objective or circuit
    variables: Dict:
        The variables of the objective given as dictionary
        with keys as tequila Variables/hashable types and values the corresponding real numbers
    samples : int, optional:
        if None a full wavefunction simulation is performed, otherwise a fixed number of samples is simulated
    backend : str, optional:
        specify the backend or give None for automatic assignment
    noise: NoiseModel, optional:
        specify a noise model to apply to simulation/sampling
    device:
        a device upon which (or in emulation of which) to sample
    *args :

    **kwargs :
        read_out_qubits = list[int] (define the qubits which shall be measured, has only effect on pure QCircuit simulation with samples)

    Returns
    -------
    float or QubitWaveFunction
        the result of simulation.
    """

    variables = format_variable_dictionary(variables)

    if variables is None and not (len(objective.extract_variables()) == 0):
        raise TequilaException(
            "You called simulate for a parametrized type but forgot to pass down the variables: {}"
            .format(objective.extract_variables()))

    compiled_objective = compile(objective=objective,
                                 samples=samples,
                                 variables=variables,
                                 backend=backend,
                                 noise=noise,
                                 device=device,
                                 *args,
                                 **kwargs)

    return compiled_objective(variables=variables,
                              samples=samples,
                              *args,
                              **kwargs)
Пример #3
0
def simulate(objective: typing.Union['Objective', 'QCircuit'],
             variables: Dict[Union[Variable, Hashable], RealNumber] = None,
             samples: int = None,
             backend: str = None,
             noise_model=None,
             *args,
             **kwargs) -> Union[RealNumber, 'QubitWaveFunction']:
    """Simulate a tequila objective or circuit

    Parameters
    ----------
    objective :
        tequila objective or circuit
    variables :
        The variables of the objective given as dictionary
        with keys as tequila Variables/hashable types and values the corresponding real numbers
    samples : int : (Default value = None)
        if None a full wavefunction simulation is performed, otherwise a fixed number of samples is simulated
    backend : str : (Default value = None)
        specify the backend or give None for automatic assignment
    noise_model: NoiseModel :
        specify a noise model to apply to simulation/sampling

    *args :

    **kwargs :


    Returns
    -------
    type
        simulated/sampled objective or simulated/sampled wavefunction

    """

    variables = format_variable_dictionary(variables)

    if variables is None and not (len(objective.extract_variables()) == 0):
        raise TequilaException(
            "You called simulate for a parametrized type but forgot to pass down the variables: {}"
            .format(objective.extract_variables()))

    compiled_objective = compile(objective=objective,
                                 samples=samples,
                                 variables=variables,
                                 backend=backend,
                                 noise_model=noise_model,
                                 *args,
                                 **kwargs)

    return compiled_objective(variables=variables,
                              samples=samples,
                              *args,
                              **kwargs)
Пример #4
0
def check_compiler_args(c_args: dict) -> typing.Dict:
    """
    Check if a dictionary's keys are exclusively arguments accepted by tq.compile.
    If the dictionary is correct, it is returned.
    Parameters
    ----------
    c_args: dict:
        a dictionary whose validity as kwargs for compilation is to be established.

    Returns
    -------
    dict:
        a dictionary that can serve as kwargs for tq.compile

    See Also
    --------
    tequila.simulators.simulator_api.compile
    """

    valid_keys = ['backend', 'samples', 'noise', 'device', 'initial_values']
    if c_args is None:
        return {k: None for k in valid_keys}
    for k in c_args.keys():
        if k not in valid_keys:
            raise TequilaException(
                'improper keyword {} found in compilation kwargs dict; please try again.'
                .format(k))
        else:
            pass
    for k in valid_keys:
        if k in c_args.keys():
            pass
        else:
            c_args[k] = None

    return c_args
Пример #5
0
def pick_backend(backend: str = None, samples: int = None, noise: NoiseModel = None, device=None,
                 exclude_symbolic: bool = True) -> str:

    """
    choose, or verify, a backend for the user.
    Parameters
    ----------
    backend: str, optional:
        what backend to choose or verify. if None: choose for the user.
    samples: int, optional:
        if int and not None, choose (verify) a simulator which supports sampling.
    noise: str or NoiseModel, optional:
        if not None, choose (verify) a simulator supports the specified noise.
    device: optional:
        verify that a given backend supports the specified device. MUST specify backend, if not None.
        if None: do not emulate or use real device.
    exclude_symbolic: bool, optional:
        whether or not to exclude the tequila debugging simulator from the available simulators, when choosing.

    Returns
    -------
    str:
        the name of the chosen (or verified) backend.
    """

    if len(INSTALLED_SIMULATORS) == 0:
        raise TequilaException("No simulators installed on your system")

    if backend is None and device is not None:
        raise TequilaException('device use requires backend specification!')

    if backend is None:
        if noise is None:
            if samples is None:
                for f in SUPPORTED_BACKENDS:
                    if f in INSTALLED_SIMULATORS:
                        return f
            else:
                for f in INSTALLED_SAMPLERS.keys():
                    return f
        else:
            if samples is None:
                raise TequilaException(
                    "Noise requires sampling; please provide a positive, integer value for samples")
            for f in SUPPORTED_NOISE_BACKENDS:
                if noise == 'device':
                    raise TequilaException('device noise requires a device, which requires a named backend!')
                else:
                    return f
            raise TequilaException(
                            'Could not find any installed sampler!')


    if hasattr(backend, "lower"):
        backend = backend.lower()

    if backend == "random":
        if device is not None:
            raise TequilaException('cannot ask for a random backend and a specific device!')
        from numpy import random as random
        import time
        state = random.RandomState(int(str(time.process_time()).split('.')[-1]) % 2 ** 32)
        if samples is None:
            backend = state.choice(list(INSTALLED_SIMULATORS.keys()), 1)[0]
        else:
            backend = state.choice(list(INSTALLED_SAMPLERS.keys()), 1)[0]

        if exclude_symbolic:
            while (backend == "symbolic"):
                backend = state.choice(list(INSTALLED_SIMULATORS.keys()), 1)[0]
        return backend

    if device is not None and samples is None:
        raise TequilaException('Use of a device requires sampling!')
    if noise == 'device' and device is None:
        raise TequilaException('Use of device noise requires a device!')

    if backend not in SUPPORTED_BACKENDS:
        raise TequilaException("Backend {backend} not supported ".format(backend=backend))

    elif noise is None and samples is None and backend not in INSTALLED_SIMULATORS.keys():
        raise TequilaException("Backend {backend} not installed ".format(backend=backend))
    elif noise is None and samples is not None and backend not in INSTALLED_SAMPLERS.keys():
        raise TequilaException("Backend {backend} not installed or sampling not supported".format(backend=backend))
    elif noise is not None and samples is not None and backend not in INSTALLED_NOISE_SAMPLERS.keys():
        raise TequilaException(
            "Backend {backend} not installed or else Noise has not been implemented".format(backend=backend))

    return backend
Пример #6
0
    def __call__(self,
                 objective: Objective,
                 variables: typing.List[Variable] = None,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 gradient: typing.Dict[Variable, Objective] = None,
                 hessian: typing.Dict[typing.Tuple[Variable, Variable],
                                      Objective] = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> SciPyResults:
        """
        Perform optimization using scipy optimizers.

        Parameters
        ----------
        objective: Objective:
            the objective to optimize.
        variables: list, optional:
            the variables of objective to optimize. If None: optimize all.
        initial_values: dict, optional:
            a starting point from which to begin optimization. Will be generated if None.
        gradient: optional:
            Information or object used to calculate the gradient of objective. Defaults to None: get analytically.
        hessian: optional:
            Information or object used to calculate the hessian of objective. Defaults to None: get analytically.
        reset_history: bool: Default = True:
            whether or not to reset all history before optimizing.
        args
        kwargs

        Returns
        -------
        ScipyReturnType:
            the results of optimization.
        """

        objective = objective.contract()
        infostring = "{:15} : {}\n".format("Method", self.method)
        infostring += "{:15} : {} expectationvalues\n".format(
            "Objective", objective.count_expectationvalues())

        if gradient is not None:
            infostring += "{:15} : {}\n".format("grad instr", gradient)
        if hessian is not None:
            infostring += "{:15} : {}\n".format("hess_instr", hessian)

        if self.save_history and reset_history:
            self.reset_history()

        active_angles, passive_angles, variables = self.initialize_variables(
            objective, initial_values, variables)

        # Transform the initial value directory into (ordered) arrays
        param_keys, param_values = zip(*active_angles.items())
        param_values = numpy.array(param_values)

        # process and initialize scipy bounds
        bounds = None
        if self.method_bounds is not None:
            bounds = {k: None for k in active_angles}
            for k, v in self.method_bounds.items():
                if k in bounds:
                    bounds[k] = v
            infostring += "{:15} : {}\n".format("bounds", self.method_bounds)
            names, bounds = zip(*bounds.items())
            assert (names == param_keys
                    )  # make sure the bounds are not shuffled

        # do the compilation here to avoid costly recompilation during the optimization
        compiled_objective = self.compile_objective(objective=objective,
                                                    *args,
                                                    **kwargs)
        E = _EvalContainer(objective=compiled_objective,
                           param_keys=param_keys,
                           samples=self.samples,
                           passive_angles=passive_angles,
                           save_history=self.save_history,
                           print_level=self.print_level)

        compile_gradient = self.method in (self.gradient_based_methods +
                                           self.hessian_based_methods)
        compile_hessian = self.method in self.hessian_based_methods

        dE = None
        ddE = None
        # detect if numerical gradients shall be used
        # switch off compiling if so
        if isinstance(gradient, str):
            if gradient.lower() == 'qng':
                compile_gradient = False
                if compile_hessian:
                    raise TequilaException(
                        'Sorry, QNG and hessian not yet tested together.')

                combos = get_qng_combos(objective,
                                        initial_values=initial_values,
                                        backend=self.backend,
                                        samples=self.samples,
                                        noise=self.noise)
                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   passive_angles=passive_angles)
                infostring += "{:15} : QNG {}\n".format("gradient", dE)
            else:
                dE = gradient
                compile_gradient = False
                if compile_hessian:
                    compile_hessian = False
                    if hessian is None:
                        hessian = gradient
                infostring += "{:15} : scipy numerical {}\n".format(
                    "gradient", dE)
                infostring += "{:15} : scipy numerical {}\n".format(
                    "hessian", ddE)

        if isinstance(gradient, dict):
            if gradient['method'] == 'qng':
                func = gradient['function']
                compile_gradient = False
                if compile_hessian:
                    raise TequilaException(
                        'Sorry, QNG and hessian not yet tested together.')

                combos = get_qng_combos(objective,
                                        func=func,
                                        initial_values=initial_values,
                                        backend=self.backend,
                                        samples=self.samples,
                                        noise=self.noise)
                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   passive_angles=passive_angles)
                infostring += "{:15} : QNG {}\n".format("gradient", dE)

        if isinstance(hessian, str):
            ddE = hessian
            compile_hessian = False

        if compile_gradient:
            grad_obj, comp_grad_obj = self.compile_gradient(
                objective=objective,
                variables=variables,
                gradient=gradient,
                *args,
                **kwargs)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_grad_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "gradient", expvals)
            dE = _GradContainer(objective=comp_grad_obj,
                                param_keys=param_keys,
                                samples=self.samples,
                                passive_angles=passive_angles,
                                save_history=self.save_history,
                                print_level=self.print_level)
        if compile_hessian:
            hess_obj, comp_hess_obj = self.compile_hessian(
                variables=variables,
                hessian=hessian,
                grad_obj=grad_obj,
                comp_grad_obj=comp_grad_obj,
                *args,
                **kwargs)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_hess_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "hessian", expvals)
            ddE = _HessContainer(objective=comp_hess_obj,
                                 param_keys=param_keys,
                                 samples=self.samples,
                                 passive_angles=passive_angles,
                                 save_history=self.save_history,
                                 print_level=self.print_level)
        if self.print_level > 0:
            print(self)
            print(infostring)
            print("{:15} : {}\n".format("active variables",
                                        len(active_angles)))

        Es = []

        optimizer_instance = self

        class SciPyCallback:
            energies = []
            gradients = []
            hessians = []
            angles = []
            real_iterations = 0

            def __call__(self, *args, **kwargs):
                self.energies.append(E.history[-1])
                self.angles.append(E.history_angles[-1])
                if dE is not None and not isinstance(dE, str):
                    self.gradients.append(dE.history[-1])
                if ddE is not None and not isinstance(ddE, str):
                    self.hessians.append(ddE.history[-1])
                self.real_iterations += 1
                if 'callback' in optimizer_instance.kwargs:
                    optimizer_instance.kwargs['callback'](E.history_angles[-1])

        callback = SciPyCallback()
        res = scipy.optimize.minimize(E,
                                      x0=param_values,
                                      jac=dE,
                                      hess=ddE,
                                      args=(Es, ),
                                      method=self.method,
                                      tol=self.tol,
                                      bounds=bounds,
                                      constraints=self.method_constraints,
                                      options=self.method_options,
                                      callback=callback)

        # failsafe since callback is not implemented everywhere
        if callback.real_iterations == 0:
            real_iterations = range(len(E.history))

        if self.save_history:
            self.history.energies = callback.energies
            self.history.energy_evaluations = E.history
            self.history.angles = callback.angles
            self.history.angles_evaluations = E.history_angles
            self.history.gradients = callback.gradients
            self.history.hessians = callback.hessians
            if dE is not None and not isinstance(dE, str):
                self.history.gradients_evaluations = dE.history
            if ddE is not None and not isinstance(ddE, str):
                self.history.hessians_evaluations = ddE.history

            # some methods like "cobyla" do not support callback functions
            if len(self.history.energies) == 0:
                self.history.energies = E.history
                self.history.angles = E.history_angles

        # some scipy methods always give back the last value and not the minimum (e.g. cobyla)
        ea = sorted(zip(E.history, E.history_angles), key=lambda x: x[0])
        E_final = ea[0][0]
        angles_final = ea[0][
            1]  #dict((param_keys[i], res.x[i]) for i in range(len(param_keys)))
        angles_final = {**angles_final, **passive_angles}

        return SciPyResults(energy=E_final,
                            history=self.history,
                            variables=format_variable_dictionary(angles_final),
                            scipy_result=res)
Пример #7
0
    def __call__(self,
                 objective: Objective,
                 variables: typing.List[Variable] = None,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 gradient: typing.Dict[Variable, Objective] = None,
                 hessian: typing.Dict[typing.Tuple[Variable, Variable],
                                      Objective] = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> SciPyReturnType:
        """
        Optimizes with scipy and gives back the optimized angles
        Get the optimized energies over the history
        :param objective: The tequila Objective to minimize
        :param initial_values: initial values for the objective
        :param return_scipy_output: chose if the full scipy output shall be returned
        :param reset_history: reset the history before optimization starts (has no effect if self.save_history is False)
        :return: tuple of optimized energy ,optimized angles and scipy output
        """

        infostring = "{:15} : {}\n".format("Method", self.method)
        infostring += "{:15} : {} expectationvalues\n".format(
            "Objective", objective.count_expectationvalues())

        if gradient is not None:
            infostring += "{:15} : {}\n".format("grad instr", gradient)
        if hessian is not None:
            infostring += "{:15} : {}\n".format("hess_instr", hessian)

        if self.save_history and reset_history:
            self.reset_history()

        active_angles, passive_angles, variables = self.initialize_variables(
            objective, initial_values, variables)

        # Transform the initial value directory into (ordered) arrays
        param_keys, param_values = zip(*active_angles.items())
        param_values = numpy.array(param_values)

        # process and initialize scipy bounds
        bounds = None
        if self.method_bounds is not None:
            bounds = {k: None for k in active_angles}
            for k, v in self.method_bounds.items():
                if k in bounds:
                    bounds[k] = v
            infostring += "{:15} : {}\n".format("bounds", self.method_bounds)
            names, bounds = zip(*bounds.items())
            assert (names == param_keys
                    )  # make sure the bounds are not shuffled

        # do the compilation here to avoid costly recompilation during the optimization
        compiled_objective = self.compile_objective(objective=objective)
        E = _EvalContainer(objective=compiled_objective,
                           param_keys=param_keys,
                           samples=self.samples,
                           passive_angles=passive_angles,
                           save_history=self.save_history,
                           backend_options=self.backend_options,
                           print_level=self.print_level)

        compile_gradient = self.method in (self.gradient_based_methods +
                                           self.hessian_based_methods)
        compile_hessian = self.method in self.hessian_based_methods

        dE = None
        ddE = None
        # detect if numerical gradients shall be used
        # switch off compiling if so
        if isinstance(gradient, str):
            if gradient.lower() == 'qng':
                compile_gradient = False
                if compile_hessian:
                    raise TequilaException(
                        'Sorry, QNG and hessian not yet tested together.')

                combos = get_qng_combos(objective,
                                        initial_values=initial_values,
                                        backend=self.backend,
                                        samples=self.samples,
                                        noise=self.noise,
                                        backend_options=self.backend_options)
                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   passive_angles=passive_angles)
                infostring += "{:15} : QNG {}\n".format("gradient", dE)
            else:
                dE = gradient
                compile_gradient = False
                if compile_hessian:
                    compile_hessian = False
                    if hessian is None:
                        hessian = gradient
                infostring += "{:15} : scipy numerical {}\n".format(
                    "gradient", dE)
                infostring += "{:15} : scipy numerical {}\n".format(
                    "hessian", ddE)

        if isinstance(hessian, str):
            ddE = hessian
            compile_hessian = False

        if compile_gradient:
            grad_obj, comp_grad_obj = self.compile_gradient(
                objective=objective, variables=variables, gradient=gradient)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_grad_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "gradient", expvals)
            dE = _GradContainer(objective=comp_grad_obj,
                                param_keys=param_keys,
                                samples=self.samples,
                                passive_angles=passive_angles,
                                save_history=self.save_history,
                                print_level=self.print_level,
                                backend_options=self.backend_options)

        if compile_hessian:
            hess_obj, comp_hess_obj = self.compile_hessian(
                variables=variables,
                hessian=hessian,
                grad_obj=grad_obj,
                comp_grad_obj=comp_grad_obj)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_hess_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "hessian", expvals)
            ddE = _HessContainer(objective=comp_hess_obj,
                                 param_keys=param_keys,
                                 samples=self.samples,
                                 passive_angles=passive_angles,
                                 save_history=self.save_history,
                                 print_level=self.print_level,
                                 backend_options=self.backend_options)

        if self.print_level > 0:
            print(self)
            print(infostring)
            print("{:15} : {}\n".format("active variables",
                                        len(active_angles)))

        Es = []

        class SciPyCallback:
            energies = []
            gradients = []
            hessians = []
            angles = []
            real_iterations = 0

            def __call__(self, *args, **kwargs):
                self.energies.append(E.history[-1])
                self.angles.append(E.history_angles[-1])
                if dE is not None and not isinstance(dE, str):
                    self.gradients.append(dE.history[-1])
                if ddE is not None and not isinstance(ddE, str):
                    self.hessians.append(ddE.history[-1])
                self.real_iterations += 1

        callback = SciPyCallback()
        res = scipy.optimize.minimize(E,
                                      x0=param_values,
                                      jac=dE,
                                      hess=ddE,
                                      args=(Es, ),
                                      method=self.method,
                                      tol=self.tol,
                                      bounds=bounds,
                                      constraints=self.method_constraints,
                                      options=self.method_options,
                                      callback=callback)

        # failsafe since callback is not implemented everywhere
        if callback.real_iterations == 0:
            real_iterations = range(len(E.history))

        if self.save_history:
            self.history.energies = callback.energies
            self.history.energy_evaluations = E.history
            self.history.angles = callback.angles
            self.history.angles_evaluations = E.history_angles
            self.history.gradients = callback.gradients
            self.history.hessians = callback.hessians
            if dE is not None and not isinstance(dE, str):
                self.history.gradients_evaluations = dE.history
            if ddE is not None and not isinstance(ddE, str):
                self.history.hessians_evaluations = ddE.history

            # some methods like "cobyla" do not support callback functions
            if len(self.history.energies) == 0:
                self.history.energies = E.history
                self.history.angles = E.history_angles

        E_final = res.fun
        angles_final = dict(
            (param_keys[i], res.x[i]) for i in range(len(param_keys)))
        angles_final = {**angles_final, **passive_angles}

        return SciPyReturnType(energy=E_final,
                               angles=format_variable_dictionary(angles_final),
                               history=self.history,
                               scipy_output=res)
Пример #8
0
def pick_backend(backend: str = None,
                 samples: int = None,
                 noise: bool = False,
                 exclude_symbolic: bool = True) -> str:
    """
    verifies if the backend is installed and picks one automatically if set to None
    :param backend: the demanded backend
    :param samples: if not None the simulator needs to be able to sample wavefunctions
    :param noise: if true,
    :param exclude_symbolic: only for random choice
    :return: An installed backend as string
    """

    if len(INSTALLED_SIMULATORS) == 0:
        raise TequilaException("No simulators installed on your system")

    if backend is None:
        if noise is False:
            for f in SUPPORTED_BACKENDS:
                if samples is None:
                    if f in INSTALLED_SIMULATORS:
                        return f
                else:
                    if f in INSTALLED_SAMPLERS:
                        return f
        else:
            for f in SUPPORTED_NOISE_BACKENDS:
                if samples is None:
                    raise TequilaException(
                        "Noise requires sampling; please provide a positive, integer value for samples"
                    )
                else:
                    if f in INSTALLED_NOISE_SAMPLERS:
                        return f

    if hasattr(backend, "lower"):
        backend = backend.lower()

    if backend == "random":
        from numpy import random as random
        import time
        state = random.RandomState(
            int(str(time.process_time()).split('.')[-1]) % 2**32)
        if samples is None:
            backend = state.choice(list(INSTALLED_SIMULATORS.keys()), 1)[0]
        else:
            backend = state.choice(list(INSTALLED_SAMPLERS.keys()), 1)[0]

        if exclude_symbolic:
            while (backend == "symbolic"):
                backend = state.choice(list(INSTALLED_SIMULATORS.keys()), 1)[0]
        return backend

    if backend not in SUPPORTED_BACKENDS:
        raise TequilaException(
            "Backend {backend} not supported ".format(backend=backend))

    if noise is False and samples is None and backend not in INSTALLED_SIMULATORS:
        raise TequilaException(
            "Backend {backend} not installed ".format(backend=backend))
    elif noise is False and samples is not None and backend not in INSTALLED_SAMPLERS:
        raise TequilaException(
            "Backend {backend} not installed ".format(backend=backend))
    elif noise is not False and samples is not None and backend not in INSTALLED_NOISE_SAMPLERS:
        raise TequilaException(
            "Backend {backend} not installed or else Noise has not been implemented"
            .format(backend=backend))

    return backend
Пример #9
0
# make sure to use the jax/autograd numpy
from tequila.utils.exceptions import TequilaException

__AUTOGRAD__BACKEND__ = None
try:
    import jax
    from jax import numpy

    __AUTOGRAD__BACKEND__ = "jax"
except ImportError:
    try:
        import autograd as jax
        from autograd import numpy

        __AUTOGRAD__BACKEND__ = "autograd"
    except ImportError:
        raise TequilaException("Neither jax nor autograd found on your system")
Пример #10
0
def minimize(objective,
             method: str = "bfgs",
             variables: list = None,
             initial_values: typing.Union[dict, numbers.Number,
                                          typing.Callable] = 0.0,
             maxiter: int = None,
             *args,
             **kwargs):
    """

    Parameters
    ----------
    method: str:
       The optimization method (e.g. bfgs, cobyla, nelder-mead, ...)
       see 'tq.optimizers.show_available_methods()' for an overview
    objective: tq.Objective:
       The abstract tequila objective to be optimized
    variables: list of names:
       The variables which shall be optimized given as list
       Can be passed as list of names or list of tq variables
    initial_values: dict:
       Initial values for the optimization, passed as dictionary
       with the variable names as keys.
       Alternatively `zero`, `random` or a single number are accepted
    maxiter:
       maximum number of iterations
    kwargs:
       further keyword arguments for the actual minimization functions
       can also be called directly as tq.minimize_modulename
       e.g. tq.minimize_scipy
       See their documentation for more details

       example: gradient keyword:
       gradient (Default Value: None):
       instructions for gradient compilation
       can be a dictionary of tequila objectives representing the gradients
       or a string/dictionary giving instructions for numerical gradients
       examples are
            gradient = '2-point'
            gradient = {'method':'2-point', 'stepsize': 1.e-4}
            gradient = {'method':Callable, 'stepsize': 1.e-4}
            see optimizer_base.py for method examples

        gradient = None: analytical gradients are compiled


    Returns
    -------

    """
    for k, v in INSTALLED_OPTIMIZERS.items():
        if method.lower() in v.methods or method.upper() in v.methods:
            if initial_values is None or (hasattr(initial_values, "lower") and
                                          initial_values.lower() == "zero"):
                initial_values = {
                    assign_variable(k): 0.0
                    for k in objective.extract_variables()
                }
            elif isinstance(initial_values, numbers.Number):
                initial_values = {
                    assign_variable(k): initial_values
                    for k in objective.extract_variables()
                }
            elif hasattr(initial_values,
                         "lower") and initial_values.lower() == "random":
                initial_values = {
                    assign_variable(k): float(
                        numpy.random.uniform(-2.0 * numpy.pi, 2.0 * numpy.pi,
                                             1))
                    for k in objective.extract_variables()
                }
            elif hasattr(initial_values,
                         "lower") and initial_values.lower not in [
                             "random", "zero"
                         ]:
                raise TequilaException(
                    "Initial values needs to be Dict[hashable variable name, float] or a single float,'zero','random'. You gave\n{}={}"
                    .format(type(initial_values), initial_values))
            elif callable(initial_values):
                initial_values = {
                    k: initial_values(k)
                    for k in objective.extract_variables()
                }
            else:
                initial_values = {
                    assign_variable(k): float(v)
                    for k, v in initial_values.items()
                }
            return v.minimize(objective=objective,
                              method=method,
                              variables=variables,
                              initial_values=initial_values,
                              maxiter=maxiter,
                              *args,
                              **kwargs)

    raise TequilaOptimizerException(
        "Could not find optimization method {} in tequila optimizers. You might miss dependencies"
    )