示例#1
0
def get_internal_first_derivative(
    func, params, constraints=None, func_kwargs=None, numdiff_options=None
):
    """Get the first_derivative of func with respect to internal parameters.

    If there are no constraints, we simply call the first_derivative function.

    Args:
        func (callable): Function to take the derivative of.
        params (pandas.DataFrame): Data frame with external parameters. See
            :ref:`params`.
        constraints (list): Constraints that define how to convert between internal
            and external parameters.
        func_kwargs (dict): Additional keyword arguments for func.
        numdiff_options (dict): Additional options for first_derivative.

    Returns:
        dict: See ``first_derivative`` for details. The only difference is that the
            the "derivative" entry is always a numpy array instead of a DataFrame

    """
    numdiff_options = {} if numdiff_options is None else numdiff_options
    func_kwargs = {} if func_kwargs is None else func_kwargs
    _func = functools.partial(func, **func_kwargs)

    if constraints is None:
        out = first_derivative(
            func=_func,
            params=params,
            **numdiff_options,
        )
        out["has_transforming_constraints"] = False
    else:

        lower_bounds, upper_bounds = get_internal_bounds(params, constraints)

        _internal_func = numpy_interface(
            func=_func, params=params, constraints=constraints
        )

        _to_internal, _ = get_reparametrize_functions(params, constraints)

        _x = _to_internal(params)

        out = first_derivative(
            _internal_func,
            _x,
            lower_bounds=lower_bounds,
            upper_bounds=upper_bounds,
            **numdiff_options,
        )

        if isinstance(out["derivative"], (pd.DataFrame, pd.Series)):
            out["derivative"] = out["derivative"].to_numpy()

    return out
示例#2
0
def _compute_testable_estimagic_and_jax_derivatives(func,
                                                    params,
                                                    func_jax=None):
    """

    Computes first and second derivative using estimagic and jax. Then converts leaves
    of jax output to numpy so that we can use numpy.testing. For higher dimensional
    output we need to define two function, one with numpy array output and one with
    jax.numpy array output.

    """
    func_jax = func if func_jax is None else func_jax

    estimagic_jac = first_derivative(func, params)["derivative"]
    jax_jac = jax.jacobian(func_jax)(params)

    estimagic_hess = second_derivative(func, params)["derivative"]
    jax_hess = jax.hessian(func_jax)(params)

    out = {
        "jac": {
            "estimagic": estimagic_jac,
            "jax": jax_jac
        },
        "hess": {
            "estimagic": estimagic_hess,
            "jax": jax_hess
        },
    }
    return out
示例#3
0
def test_first_derivative_scalar(method):
    def f(x):
        return x ** 2

    calculated = first_derivative(f, 3.0, n_cores=1)
    expected = 6.0
    assert calculated == expected
示例#4
0
def test_first_derivative_scalar_with_return_func_value(method):
    def f(x):
        return x ** 2

    calculated = first_derivative(f, 3.0, return_func_value=True, n_cores=1)
    expected = (6.0, 9.0)
    assert calculated == expected
示例#5
0
def statsmodels_fixture():
    """These fixtures are taken from the statsmodels test battery and adapted towards
    a random test."""
    fix = {}
    num_obs = 100
    num_params = 3
    max_range = 10
    x = np.linspace(0, max_range, num_obs)
    x = sm.add_constant(np.column_stack((x, x**2)), prepend=False)
    beta = np.random.rand(num_params) * max_range
    y = np.dot(x, beta) + np.random.normal(size=num_obs)

    results = sm.OLS(y, x).fit()

    fix["stats_cov"] = results.cov_HC0

    params_df = pd.DataFrame({"value": results.params})

    moment_cond = np.zeros((num_obs, num_params))
    moment_jac = np.zeros((num_obs, num_params, num_params))
    for i in range(num_obs):
        moment_cond[i, :] = calc_moment_condition(params_df, x[i, :], y[i])
        moment_jac[i, :, :] = first_derivative(calc_moment_condition,
                                               params_df,
                                               func_kwargs={
                                                   "x_t": x[i, :],
                                                   "y_t": y[i]
                                               })
    fix["mom_cond"] = moment_cond
    fix["mom_cond_jacob"] = moment_jac
    fix["weighting_matrix"] = np.eye(num_params)
    return fix
示例#6
0
def flexible_sos_ls_derivative(params):
    deriv_dict = first_derivative(
        flexible_sos_ls,
        params,
        key="root_contributions",
    )

    return deriv_dict["derivative"]
示例#7
0
def test_first_derivative_jacobian_works_at_defaults(binary_choice_inputs):
    fix = binary_choice_inputs
    func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
    calculated = first_derivative(func=func,
                                  params=fix["params_np"],
                                  n_cores=1)
    expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
    aaae(calculated["derivative"], expected, decimal=6)
def test_probability_to_internal_jacobian(dim, seed):
    external = get_external_probability(dim)

    func = partial(kt.probability_to_internal, **{"constr": None})
    numerical_deriv = first_derivative(func, external)
    deriv = kt.probability_to_internal_jacobian(external, None)

    aaae(deriv, numerical_deriv, decimal=3)
def test_sdcorr_from_internal_jacobian(dim, seed):
    internal = get_internal_cholesky(dim)

    func = partial(kt.sdcorr_from_internal, **{"constr": None})
    numerical_deriv = first_derivative(func, internal)
    deriv = kt.sdcorr_from_internal_jacobian(internal, None)

    aaae(deriv, numerical_deriv, decimal=3)
示例#10
0
def test_covariance_to_internal_jacobian(dim, seed):
    external = get_external_covariance(dim)

    func = partial(kt.covariance_to_internal, **{"constr": None})
    numerical_deriv = first_derivative(func, external)
    deriv = kt.covariance_to_internal_jacobian(external, None)

    aaae(deriv, numerical_deriv["derivative"], decimal=3)
示例#11
0
def calculate_scaling_factor_and_offset(
    params,
    constraints,
    criterion,
    method="start_values",
    clipping_value=0.1,
    magnitude=1,
    numdiff_options=None,
    processed_params=None,
    processed_constraints=None,
):
    numdiff_options = {} if numdiff_options is None else numdiff_options
    to_internal, from_internal = get_reparametrize_functions(
        params=params,
        constraints=constraints,
        processed_params=processed_params,
        processed_constraints=processed_constraints,
    )

    x = to_internal(params["value"].to_numpy())

    if method in ("bounds", "gradient"):
        lower_bounds, upper_bounds = get_internal_bounds(
            params, constraints, processed_params=processed_params)

    if method == "start_values":
        raw_factor = np.clip(np.abs(x), clipping_value, np.inf)
        scaling_offset = None
    elif method == "bounds":
        raw_factor = upper_bounds - lower_bounds
        scaling_offset = lower_bounds
    elif method == "gradient":
        default_numdiff_options = {
            "scaling_factor": 100,
            "lower_bounds": lower_bounds,
            "upper_bounds": upper_bounds,
            "error_handling": "raise",
        }

        numdiff_options = {**default_numdiff_options, **numdiff_options}

        def func(x):
            p = params.copy(deep=True)
            p["value"] = from_internal(x)
            crit = criterion(p)
            if isinstance(crit, dict):
                crit = crit["value"]
            return crit

        gradient = first_derivative(func, x, **numdiff_options)["derivative"]

        raw_factor = np.clip(np.abs(gradient), clipping_value, np.inf)
        scaling_offset = None

    scaling_factor = raw_factor / magnitude

    return scaling_factor, scaling_offset
def jac(params, func_kwargs):
    derivative_dict = first_derivative(
        func=simulate_aggregated_moments,
        params=params,
        func_kwargs=func_kwargs,
    )

    g = derivative_dict["derivative"]
    return g.to_numpy()
示例#13
0
def test_first_derivative_jacobian_richardson(example_function_jacobian_fixtures):
    f = example_function_jacobian_fixtures["func"]
    fprime = example_function_jacobian_fixtures["func_prime"]

    true_fprime = fprime(np.ones(3))
    scipy_fprime = approx_derivative(f, np.ones(3))
    our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)

    aaae(scipy_fprime, our_fprime)
    aaae(true_fprime, our_fprime)
示例#14
0
def test_derivative_plot(func_and_params, n_steps):
    func, params = func_and_params
    derivative = first_derivative(
        func,
        params,
        n_steps=n_steps,
        return_func_value=True,
        return_info=True,
    )
    fig = derivative_plot(derivative)
    fig.clf()
示例#15
0
def test_derivative_plot(func_and_params, n_steps, grid):
    func, params = func_and_params
    derivative = first_derivative(
        func,
        params,
        n_steps=n_steps,
        return_func_value=True,
        return_info=True,
    )

    derivative_plot(derivative, combine_plots_in_grid=grid)
示例#16
0
def test_first_derivative_scalar_with_return_func_value(method):
    def f(x):
        return x**2

    calculated = first_derivative(f,
                                  3.0,
                                  return_func_value=True,
                                  return_info=False,
                                  n_cores=1)
    expected = {"derivative": 6.0, "func_value": 9.0}
    assert calculated == expected
示例#17
0
def test_first_derivative_gradient(binary_choice_inputs, method):
    fix = binary_choice_inputs
    func = partial(logit_loglike, y=fix["y"], x=fix["x"])

    calculated = first_derivative(
        func=func,
        method=method,
        params=fix["params_np"],
        n_steps=1,
        f0=func(fix["params_np"]),
        n_cores=1,
    )

    expected = logit_loglike_gradient(fix["params_np"], fix["y"], fix["x"])

    aaae(calculated, expected, decimal=4)
示例#18
0
def test_penalty_derivatives(func, deriv):
    np.random.seed(1234)
    x = np.random.uniform(size=5)
    x0 = np.random.uniform(size=5)
    slope = 0.3
    constant = 3
    dim_out = 8

    calculated = deriv(x, constant, slope, x0, dim_out)

    partialed = functools.partial(func,
                                  constant=constant,
                                  slope=slope,
                                  x0=x0,
                                  dim_out=dim_out)
    expected = first_derivative(partialed, x)

    aaae(calculated, expected["derivative"])
示例#19
0
def test_first_derivative_jacobian(binary_choice_inputs, method):
    fix = binary_choice_inputs
    func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])

    calculated = first_derivative(
        func=func,
        method=method,
        params=fix["params_np"],
        n_steps=1,
        base_steps=None,
        lower_bounds=np.full(fix["params_np"].shape, -np.inf),
        upper_bounds=np.full(fix["params_np"].shape, np.inf),
        min_steps=1e-8,
        step_ratio=2.0,
        f0=func(fix["params_np"]),
        n_cores=1,
    )

    expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])

    aaae(calculated, expected, decimal=6)
示例#20
0
def estimate_ml(
    loglike,
    params,
    optimize_options,
    *,
    lower_bounds=None,
    upper_bounds=None,
    constraints=None,
    logging=False,
    log_options=None,
    loglike_kwargs=None,
    numdiff_options=None,
    jacobian=None,
    jacobian_kwargs=None,
    hessian=None,
    hessian_kwargs=None,
    design_info=None,
):
    """Do a maximum likelihood (ml) estimation.

    This is a high level interface of our lower level functions for maximization,
    numerical differentiation and inference. It does the full workflow for maximum
    likelihood estimation with just one function call.

    While we have good defaults, you can still configure each aspect of each step
    via the optional arguments of this function. If you find it easier to do the
    maximization separately, you can do so and just provide the optimal parameters as
    ``params`` and set ``optimize_options=False``

    Args:
        loglike (callable): Likelihood function that takes a params (and potentially
            other keyword arguments) and returns a dictionary that has at least the
            entries "value" (a scalar float) and "contributions" (a 1d numpy array or
            pytree) with the log likelihood contribution per individual.
        params (pytree): A pytree containing the estimated or start parameters of the
            likelihood model. If the supplied parameters are estimated parameters, set
            optimize_options to False. Pytrees can be a numpy array, a pandas Series, a
            DataFrame with "value" column, a float and any kind of (nested) dictionary
            or list containing these elements. See :ref:`params` for examples.
        optimize_options (dict, str or False): Keyword arguments that govern the
            numerical optimization. Valid entries are all arguments of
            :func:`~estimagic.optimization.optimize.minimize` except for those that are
            passed explicilty to ``estimate_ml``. If you pass False as optimize_options
            you signal that ``params`` are already the optimal parameters and no
            numerical optimization is needed. If you pass a str as optimize_options it
            is used as the ``algorithm`` option.
        lower_bounds (pytree): A pytree with the same structure as params with lower
            bounds for the parameters. Can be ``-np.inf`` for parameters with no lower
            bound.
        upper_bounds (pytree): As lower_bounds. Can be ``np.inf`` for parameters with
            no upper bound.
        constraints (list, dict): List with constraint dictionaries or single dict.
            See :ref:`constraints`.
        logging (pathlib.Path, str or False): Path to sqlite3 file (which typically has
            the file extension ``.db``. If the file does not exist, it will be created.
            The dashboard can only be used when logging is used.
        log_options (dict): Additional keyword arguments to configure the logging.
            - "fast_logging": A boolean that determines if "unsafe" settings are used
            to speed up write processes to the database. This should only be used for
            very short running criterion functions where the main purpose of the log
            is a real-time dashboard and it would not be catastrophic to get a
            corrupted database in case of a sudden system shutdown. If one evaluation
            of the criterion function (and gradient if applicable) takes more than
            100 ms, the logging overhead is negligible.
            - "if_table_exists": (str) One of "extend", "replace", "raise". What to
            do if the tables we want to write to already exist. Default "extend".
            - "if_database_exists": (str): One of "extend", "replace", "raise". What to
            do if the database we want to write to already exists. Default "extend".
        loglike_kwargs (dict): Additional keyword arguments for loglike.
        numdiff_options (dict): Keyword arguments for the calculation of numerical
            derivatives for the calculation of standard errors. See
            :ref:`first_derivative` for details.
        jacobian (callable or None): A function that takes ``params`` and potentially
            other keyword arguments and returns the jacobian of loglike["contributions"]
            with respect to the params. Note that you only need to pass a Jacobian
            function if you have a closed form Jacobian. If you pass None, a numerical
            Jacobian will be calculated.
        jacobian_kwargs (dict): Additional keyword arguments for the Jacobian function.
        hessian (callable or None or False): A function that takes ``params`` and
            potentially other keyword arguments and returns the Hessian of
            loglike["value"] with respect to the params.  If you pass None, a numerical
            Hessian will be calculated. If you pass ``False``, you signal that no
            Hessian should be calculated. Thus, no result that requires the Hessian will
            be calculated.
        hessian_kwargs (dict): Additional keyword arguments for the Hessian function.
        design_info (pandas.DataFrame): DataFrame with one row per observation that
            contains some or all of the variables "psu" (primary sampling unit),
            "strata" and "fpc" (finite population corrector). See
            :ref:`robust_likelihood_inference` for details.

    Returns:
        LikelihoodResult: A LikelihoodResult object.

    """
    # ==================================================================================
    # Check and process inputs
    # ==================================================================================
    is_optimized = optimize_options is False

    if not is_optimized:
        if isinstance(optimize_options, str):
            optimize_options = {"algorithm": optimize_options}

        check_optimization_options(
            optimize_options,
            usage="estimate_ml",
            algorithm_mandatory=True,
        )

    jac_case = get_derivative_case(jacobian)
    hess_case = get_derivative_case(hessian)

    check_numdiff_options(numdiff_options, "estimate_ml")
    numdiff_options = {} if numdiff_options in (None,
                                                False) else numdiff_options
    loglike_kwargs = {} if loglike_kwargs is None else loglike_kwargs
    constraints = [] if constraints is None else constraints
    jacobian_kwargs = {} if jacobian_kwargs is None else jacobian_kwargs
    hessian_kwargs = {} if hessian_kwargs is None else hessian_kwargs

    # ==================================================================================
    # Calculate estimates via maximization (if necessary)
    # ==================================================================================

    if is_optimized:
        estimates = params
        opt_res = None
    else:
        opt_res = maximize(
            criterion=loglike,
            criterion_kwargs=loglike_kwargs,
            params=params,
            lower_bounds=lower_bounds,
            upper_bounds=upper_bounds,
            constraints=constraints,
            logging=logging,
            log_options=log_options,
            **optimize_options,
        )
        estimates = opt_res.params

    # ==================================================================================
    # Do first function evaluations at estimated parameters
    # ==================================================================================

    try:
        loglike_eval = loglike(estimates, **loglike_kwargs)
    except (KeyboardInterrupt, SystemExit):
        raise
    except Exception as e:
        msg = "Error while evaluating loglike at estimated params."
        raise InvalidFunctionError(msg) from e

    if callable(jacobian):
        try:
            jacobian_eval = jacobian(estimates, **jacobian_kwargs)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            msg = "Error while evaluating closed form jacobian at estimated params."
            raise InvalidFunctionError(msg) from e
    else:
        jacobian_eval = None

    if callable(hessian):
        try:
            hessian_eval = hessian(estimates, **hessian_kwargs)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            msg = "Error while evaluating closed form hessian at estimated params."
            raise InvalidFunctionError(msg) from e
    else:
        hessian_eval = None

    # ==================================================================================
    # Get the converter for params and function outputs
    # ==================================================================================

    converter, internal_estimates = get_converter(
        params=estimates,
        constraints=constraints,
        lower_bounds=lower_bounds,
        upper_bounds=upper_bounds,
        func_eval=loglike_eval,
        primary_key="contributions",
        scaling=False,
        scaling_options=None,
        derivative_eval=jacobian_eval,
    )

    # ==================================================================================
    # Calculate internal jacobian
    # ==================================================================================

    if jac_case == "closed-form":
        int_jac = converter.derivative_to_internal(jacobian_eval,
                                                   internal_estimates.values)
    elif jac_case == "numerical":

        def func(x):
            p = converter.params_from_internal(x)
            loglike_eval = loglike(p, **loglike_kwargs)["contributions"]
            out = converter.func_to_internal(loglike_eval)
            return out

        jac_res = first_derivative(
            func=func,
            params=internal_estimates.values,
            lower_bounds=internal_estimates.lower_bounds,
            upper_bounds=internal_estimates.upper_bounds,
            **numdiff_options,
        )

        int_jac = jac_res["derivative"]
    else:
        int_jac = None

    if constraints in [None, []
                       ] and jacobian_eval is None and int_jac is not None:
        loglike_contribs = loglike_eval
        if isinstance(loglike_contribs,
                      dict) and "contributions" in loglike_contribs:
            loglike_contribs = loglike_contribs["contributions"]

        jacobian_eval = matrix_to_block_tree(
            int_jac,
            outer_tree=loglike_contribs,
            inner_tree=estimates,
        )

    if jacobian_eval is None:
        _no_jac_reason = (
            "no closed form jacobian was provided and there are constraints")
    else:
        _no_jac_reason = None
    # ==================================================================================
    # Calculate internal Hessian
    # ==================================================================================

    if hess_case == "skip":
        int_hess = None
    elif hess_case == "numerical":

        def func(x):
            p = converter.params_from_internal(x)
            loglike_eval = loglike(p, **loglike_kwargs)["value"]
            out = converter.func_to_internal(loglike_eval)
            return out

        hess_res = second_derivative(
            func=func,
            params=internal_estimates.values,
            lower_bounds=internal_estimates.lower_bounds,
            upper_bounds=internal_estimates.upper_bounds,
            **numdiff_options,
        )
        int_hess = hess_res["derivative"]
    elif hess_case == "closed-form" and constraints:
        raise NotImplementedError(
            "Closed-form Hessians are not yet compatible with constraints.")
    elif hess_case == "closed-form":
        int_hess = block_tree_to_matrix(
            hessian_eval,
            outer_tree=params,
            inner_tree=params,
        )
    else:
        raise ValueError()

    if constraints in [None, []
                       ] and hessian_eval is None and int_hess is not None:
        hessian_eval = matrix_to_block_tree(
            int_hess,
            outer_tree=params,
            inner_tree=params,
        )

    if hessian_eval is None:
        if hess_case == "skip":
            _no_hess_reason = "the hessian calculation was explicitly skipped."
        else:
            _no_hess_reason = (
                "no closed form hessian was provided and there are constraints"
            )
    else:
        _no_hess_reason = None

    # ==================================================================================
    # create a LikelihoodResult object
    # ==================================================================================

    free_estimates = calculate_free_estimates(estimates, internal_estimates)

    res = LikelihoodResult(
        _params=estimates,
        _converter=converter,
        _optimize_result=opt_res,
        _jacobian=jacobian_eval,
        _no_jacobian_reason=_no_jac_reason,
        _hessian=hessian_eval,
        _no_hessian_reason=_no_hess_reason,
        _internal_jacobian=int_jac,
        _internal_hessian=int_hess,
        _design_info=design_info,
        _internal_estimates=internal_estimates,
        _free_estimates=free_estimates,
        _has_constraints=constraints not in [None, []],
    )

    return res
示例#21
0
def estimate_msm(
    simulate_moments,
    empirical_moments,
    moments_cov,
    params,
    optimize_options,
    *,
    lower_bounds=None,
    upper_bounds=None,
    constraints=None,
    logging=False,
    log_options=None,
    simulate_moments_kwargs=None,
    weights="diagonal",
    numdiff_options=None,
    jacobian=None,
    jacobian_kwargs=None,
):
    """Do a method of simulated moments or indirect inference estimation.

    This is a high level interface for our lower level functions for minimization,
    numerical differentiation, inference and sensitivity analysis. It does the full
    workflow for MSM or indirect inference estimation with just one function call.

    While we have good defaults, you can still configure each aspect of each steps
    vial the optional arguments of this functions. If you find it easier to do the
    minimization separately, you can do so and just provide the optimal parameters as
    ``params`` and set ``optimize_options=False``.

    Args:
        simulate_moments (callable): Function that takes params and potentially other
            keyword arguments and returns a pytree with simulated moments. If the
            function returns a dict containing the key ``"simulated_moments"`` we only
            use the value corresponding to that key. Other entries are stored in the
            log database if you use logging.

        empirical_moments (pandas.Series): A pytree with the same structure as the
            result of ``simulate_moments``.
        moments_cov (pandas.DataFrame): A block-pytree containing the covariance
            matrix of the empirical moments. This is typically calculated with
            our ``get_moments_cov`` function.
        params (pytree): A pytree containing the estimated or start parameters of the
            model. If the supplied parameters are estimated parameters, set
            optimize_options to False. Pytrees can be a numpy array, a pandas Series, a
            DataFrame with "value" column, a float and any kind of (nested) dictionary
            or list containing these elements. See :ref:`params` for examples.
        optimize_options (dict, str or False): Keyword arguments that govern the
            numerical optimization. Valid entries are all arguments of
            :func:`~estimagic.optimization.optimize.minimize` except for those that can
            be passed explicitly to ``estimate_msm``.  If you pass False as
            ``optimize_options`` you signal that ``params`` are already
            the optimal parameters and no numerical optimization is needed. If you pass
            a str as optimize_options it is used as the ``algorithm`` option.
        lower_bounds (pytree): A pytree with the same structure as params with lower
            bounds for the parameters. Can be ``-np.inf`` for parameters with no lower
            bound.
        upper_bounds (pytree): As lower_bounds. Can be ``np.inf`` for parameters with
            no upper bound.
        simulate_moments_kwargs (dict): Additional keyword arguments for
            ``simulate_moments``.
        weights (str): One of "diagonal" (default), "identity" or "optimal".
            Note that "optimal" refers to the asymptotically optimal weighting matrix
            and is often not a good choice due to large finite sample bias.
        constraints (list, dict): List with constraint dictionaries or single dict.
            See :ref:`constraints`.
        logging (pathlib.Path, str or False): Path to sqlite3 file (which typically has
            the file extension ``.db``. If the file does not exist, it will be created.
            The dashboard can only be used when logging is used.
        log_options (dict): Additional keyword arguments to configure the logging.

            - "fast_logging" (bool):
                A boolean that determines if "unsafe" settings are used to speed up
                write processes to the database. This should only be used for very short
                running criterion functions where the main purpose of the log is a
                real-time dashboard and it would not be catastrophic to get a corrupted
                database in case of a sudden system shutdown. If one evaluation of the
                criterion function (and gradient if applicable) takes more than 100 ms,
                the logging overhead is negligible.
            - "if_table_exists" (str):
                One of "extend", "replace", "raise". What to do if the tables we want to
                write to already exist. Default "extend".
            - "if_database_exists" (str):
                One of "extend", "replace", "raise". What to do if the database we want
                to write to already exists. Default "extend".
        numdiff_options (dict): Keyword arguments for the calculation of numerical
            derivatives for the calculation of standard errors. See
            :ref:`first_derivative` for details. Note that by default we increase the
            step_size by a factor of 2 compared to the rule of thumb for optimal
            step sizes. This is because many msm criterion functions are slightly noisy.
        jacobian (callable): A function that take ``params`` and
            potentially other keyword arguments and returns the jacobian of
            simulate_moments with respect to the params.
        jacobian_kwargs (dict): Additional keyword arguments for the jacobian function.

        Returns:
            dict: The estimated parameters, standard errors and sensitivity measures
                and covariance matrix of the parameters.

    """
    # ==================================================================================
    # Check and process inputs
    # ==================================================================================

    if weights not in ["diagonal", "optimal"]:
        raise NotImplementedError(
            "Custom weighting matrices are not yet implemented.")

    is_optimized = optimize_options is False

    if not is_optimized:
        if isinstance(optimize_options, str):
            optimize_options = {"algorithm": optimize_options}

        check_optimization_options(
            optimize_options,
            usage="estimate_msm",
            algorithm_mandatory=True,
        )

    jac_case = get_derivative_case(jacobian)

    check_numdiff_options(numdiff_options, "estimate_msm")

    numdiff_options = {} if numdiff_options in (
        None, False) else numdiff_options.copy()
    if "scaling_factor" not in numdiff_options:
        numdiff_options["scaling_factor"] = 2

    weights, internal_weights = get_weighting_matrix(
        moments_cov=moments_cov,
        method=weights,
        empirical_moments=empirical_moments,
        return_type="pytree_and_array",
    )

    internal_moments_cov = block_tree_to_matrix(
        moments_cov,
        outer_tree=empirical_moments,
        inner_tree=empirical_moments,
    )

    constraints = [] if constraints is None else constraints
    jacobian_kwargs = {} if jacobian_kwargs is None else jacobian_kwargs
    simulate_moments_kwargs = ({} if simulate_moments_kwargs is None else
                               simulate_moments_kwargs)

    # ==================================================================================
    # Calculate estimates via minimization (if necessary)
    # ==================================================================================

    if is_optimized:
        estimates = params
        opt_res = None
    else:
        funcs = get_msm_optimization_functions(
            simulate_moments=simulate_moments,
            empirical_moments=empirical_moments,
            weights=weights,
            simulate_moments_kwargs=simulate_moments_kwargs,
            # Always pass None because we do not support closed form jacobians during
            # optimization yet. Otherwise we would get a NotImplementedError
            jacobian=None,
            jacobian_kwargs=jacobian_kwargs,
        )

        opt_res = minimize(
            lower_bounds=lower_bounds,
            upper_bounds=upper_bounds,
            constraints=constraints,
            logging=logging,
            log_options=log_options,
            params=params,
            **funcs,  # contains the criterion func and possibly more
            **optimize_options,
        )

        estimates = opt_res.params

    # ==================================================================================
    # do first function evaluations
    # ==================================================================================

    try:
        sim_mom_eval = simulate_moments(estimates, **simulate_moments_kwargs)
    except (KeyboardInterrupt, SystemExit):
        raise
    except Exception as e:
        msg = "Error while evaluating simulate_moments at estimated params."
        raise InvalidFunctionError(msg) from e

    if callable(jacobian):
        try:
            jacobian_eval = jacobian(estimates, **jacobian_kwargs)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            msg = "Error while evaluating derivative at estimated params."
            raise InvalidFunctionError(msg) from e

    else:
        jacobian_eval = None

    # ==================================================================================
    # get converter for params and function outputs
    # ==================================================================================

    def helper(params):
        raw = simulate_moments(params, **simulate_moments_kwargs)
        if isinstance(raw, dict) and "simulated_moments" in raw:
            out = {"contributions": raw["simulated_moments"]}
        else:
            out = {"contributions": raw}
        return out

    if isinstance(sim_mom_eval, dict) and "simulated_moments" in sim_mom_eval:
        func_eval = {"contributions": sim_mom_eval["simulated_moments"]}
    else:
        func_eval = {"contributions": sim_mom_eval}

    converter, internal_estimates = get_converter(
        params=estimates,
        constraints=constraints,
        lower_bounds=lower_bounds,
        upper_bounds=upper_bounds,
        func_eval=func_eval,
        primary_key="contributions",
        scaling=False,
        scaling_options=None,
        derivative_eval=jacobian_eval,
    )

    # ==================================================================================
    # Calculate internal jacobian
    # ==================================================================================

    if jac_case == "closed-form":
        x = converter.params_to_internal(estimates)
        int_jac = converter.derivative_to_internal(jacobian_eval, x)
    else:

        def func(x):
            p = converter.params_from_internal(x)
            sim_mom_eval = helper(p)
            out = converter.func_to_internal(sim_mom_eval)
            return out

        int_jac = first_derivative(
            func=func,
            params=internal_estimates.values,
            lower_bounds=internal_estimates.lower_bounds,
            upper_bounds=internal_estimates.upper_bounds,
            **numdiff_options,
        )["derivative"]

    # ==================================================================================
    # Calculate external jac (if no constraints and not closed form )
    # ==================================================================================

    if constraints in [None, []
                       ] and jacobian_eval is None and int_jac is not None:
        jacobian_eval = matrix_to_block_tree(
            int_jac,
            outer_tree=empirical_moments,
            inner_tree=estimates,
        )

    if jacobian_eval is None:
        _no_jac_reason = (
            "no closed form jacobian was provided and there are constraints")
    else:
        _no_jac_reason = None

    # ==================================================================================
    # Create MomentsResult
    # ==================================================================================

    free_estimates = calculate_free_estimates(estimates, internal_estimates)

    res = MomentsResult(
        _params=estimates,
        _weights=weights,
        _converter=converter,
        _internal_weights=internal_weights,
        _internal_moments_cov=internal_moments_cov,
        _internal_jacobian=int_jac,
        _jacobian=jacobian_eval,
        _no_jacobian_reason=_no_jac_reason,
        _empirical_moments=empirical_moments,
        _internal_estimates=internal_estimates,
        _free_estimates=free_estimates,
        _has_constraints=constraints not in [None, []],
    )
    return res
示例#22
0
    internal_p = params[f"internal_value{number}"][keep].to_numpy()
    fixed_val = pp["_internal_fixed_value"].to_numpy()
    pre_repl = pp["_pre_replacements"].to_numpy()
    post_repl = pp["_post_replacements"].to_numpy()

    func = partial(
        reparametrize_from_internal,
        **{
            "fixed_values": fixed_val,
            "pre_replacements": pre_repl,
            "processed_constraints": pc,
            "post_replacements": post_repl,
        },
    )
    numerical_jacobian = first_derivative(func, internal_p)

    # calling convert_external_derivative with identity matrix as external derivative
    # is just a trick to get out the jacobian of reparametrize_from_internal
    jacobian = convert_external_derivative_to_internal(
        external_derivative=np.eye(len(fixed_val)),
        internal_values=internal_p,
        fixed_values=fixed_val,
        pre_replacements=pre_repl,
        processed_constraints=pc,
        post_replacements=post_repl,
    )

    aaae(jacobian, numerical_jacobian)

示例#23
0
    processed_constraints, processed_params = process_constraints(
        constraints, params)

    internal_params = reparametrize_to_internal(
        external=params["value"].to_numpy(),
        internal_free=processed_params["_internal_free"],
        processed_constraints=processed_constraints,
    )

    if cov_type == "jacobian":
        numdiff_options = numdiff_options.copy()
        numdiff_options["key"] = "contributions"

        internal_jac = first_derivative(
            internal_loglike,
            internal_params,
            **numdiff_options,
        )
        internal_cov = cov_jacobian(internal_jac)
    else:
        raise NotImplementedError(
            "Hessian calculation is not yet implemented.")

    # transform internal covariance matrix
    free_cov = transform_covariance(
        params=params,
        internal_cov=internal_cov,
        constraints=constraints,
        n_samples=n_samples,
        bounds_handling=bounds_handling,
    )
示例#24
0
    _, pp = process_constraints(constraints, params)

    n_free = int(pp._internal_free.sum())
    scaling_factor = np.ones(n_free) * 2  # np.arange(n_free) + 1
    scaling_offset = np.arange(n_free) - 1

    params_to_internal, params_from_internal = get_reparametrize_functions(
        params=params,
        constraints=constraints,
        scaling_factor=scaling_factor,
        scaling_offset=scaling_offset,
    )

    internal_p = params_to_internal(params["value"].to_numpy())

    numerical_jacobian = first_derivative(params_from_internal, internal_p)

    derivative_to_internal = get_derivative_conversion_function(
        params=params,
        constraints=constraints,
        scaling_factor=scaling_factor,
        scaling_offset=scaling_offset,
    )

    # calling convert_external_derivative with identity matrix as external derivative
    # is just a trick to get out the jacobian of reparametrize_from_internal
    jacobian = derivative_to_internal(
        external_derivative=np.eye(len(params)),
        internal_values=internal_p,
    )
示例#25
0
def internal_criterion_and_derivative_template(
    x,
    *,
    task,
    direction,
    criterion,
    params,
    reparametrize_from_internal,
    convert_derivative,
    algorithm_info,
    derivative,
    criterion_and_derivative,
    numdiff_options,
    logging,
    db_kwargs,
    error_handling,
    error_penalty,
    first_criterion_evaluation,
    cache,
    cache_size,
    fixed_log_data,
):
    """Template for the internal criterion and derivative function.

    The internal criterion and derivative function only has the arguments x and task
    and algorithm_info. The other arguments will be partialed in by estimagic at some
    point. Algorithm_info and possibly even task will be partialed in by the algorithm.

    That is the reason why this function is called a template.

    Args:
        x (np.ndarray): 1d numpy array with internal parameters.
        task (str): One of "criterion", "derivative" and "criterion_and_derivative".
        direction (str): One of "maximize" or "minimize"
        criterion (callable): (partialed) user provided criterion function that takes a
            parameter dataframe as only argument and returns a scalar, an array like
            object or a dictionary. See :ref:`criterion`.
        params (pd.DataFrame): see :ref:`params`
        reparametrize_from_internal (callable): Function that takes x and returns a
            numpy array with the values of the external parameters.
        convert_derivative (callable): Function that takes the derivative of criterion
            at the external version of x and x and returns the derivative
            of the internal criterion.
        algorithm_info (dict): Dict with the following entries:
            "primary_criterion_entry": One of "value", "contributions" and
                "root_contributions" or "dict".
            "parallelizes": Bool that indicates if the algorithm calls the internal
                criterion function in parallel. If so, caching is disabled.
            "needs_scaling": bool
            "name": string
        derivative (callable, optional): (partialed) user provided function that
            calculates the first derivative of criterion. For most algorithm, this is
            the gradient of the scalar output (or "value" entry of the dict). However
            some algorithms (e.g. bhhh) require the jacobian of the "contributions"
            entry of the dict. You will get an error if you provide the wrong type of
            derivative.
        criterion_and_derivative (callable): Function that returns criterion
            and derivative as a tuple. This can be used to exploit synergies in the
            evaluation of both functions. The fist element of the tuple has to be
            exactly the same as the output of criterion. The second has to be exactly
            the same as the output of derivative.
        numdiff_options (dict): Keyword arguments for the calculation of numerical
            derivatives. See :ref:`first_derivative` for details. Note that the default
            method is changed to "forward" for speed reasons.
        logging (bool): Wether logging is used.
        db_kwargs (dict): Dictionary with entries "database", "path" and "fast_logging".
        error_handling (str): Either "raise" or "continue". Note that "continue" does
            not absolutely guarantee that no error is raised but we try to handle as
            many errors as possible in that case without aborting the optimization.
        error_penalty (dict): Dict with the entries "constant" (float) and "slope"
            (float). If the criterion or derivative raise an error and error_handling is
            "continue", return ``constant + slope * norm(params - start_params)`` where
            ``norm`` is the euclidean distance as criterion value and adjust the
            derivative accordingly. This is meant to guide the optimizer back into a
            valid region of parameter space (in direction of the start parameters).
            Note that the constant has to be high enough to ensure that the penalty is
            actually a bad function value. The default constant is 2 times the criterion
            value at the start parameters. The default slope is 0.1.
        first_criterion_evaluation (dict): Dictionary with entries "internal_params",
            "external_params", "output".
        cache (dict): Dictionary used as cache for criterion and derivative evaluations.
        cache_size (int): Number of evaluations that are kept in cache. Default 10.
        fixed_log_data (dict): Dictionary with fixed data to be saved in the database.
            Has the entries "stage" (str) and "substage" (int).

    Returns:
        float, np.ndarray or tuple: If task=="criterion" it returns the output of
            criterion which can be a float or 1d numpy array. If task=="derivative" it
            returns the first derivative of criterion, which is a numpy array.
            If task=="criterion_and_derivative" it returns both as a tuple.

    """
    if algorithm_info["primary_criterion_entry"] == "root_contributions":
        if direction == "maximize":
            msg = (
                "Optimizers that exploit a least squares structure like {} can only be "
                "used for minimization.")
            raise ValueError(msg.format(algorithm_info["name"]))

    x_hash = hash_array(x)
    cache_entry = cache.get(x_hash, {})

    to_dos = _determine_to_dos(task, cache_entry, derivative,
                               criterion_and_derivative)

    caught_exceptions = []
    new_criterion, new_derivative, new_external_derivative = None, None, None
    current_params = params.copy()
    external_x = reparametrize_from_internal(x)
    current_params["value"] = external_x

    if to_dos == []:
        pass
    elif "numerical_criterion_and_derivative" in to_dos:

        def func(x):
            external_x = reparametrize_from_internal(x)
            p = params.copy()
            p["value"] = external_x
            return criterion(p)

        options = numdiff_options.copy()
        options["key"] = algorithm_info["primary_criterion_entry"]
        options["f0"] = cache_entry.get("criterion", None)
        options["return_func_value"] = True

        try:
            derivative_dict = first_derivative(func, x, **options)
            new_derivative = {
                algorithm_info["primary_criterion_entry"]:
                derivative_dict["derivative"]
            }
            new_criterion = derivative_dict["func_value"]
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            caught_exceptions.append(get_traceback())
            if "criterion" in cache_entry:
                raise Exception(DERIVATIVE_ERROR_MESSAGE) from e

    elif "criterion_and_derivative" in to_dos:
        try:
            new_criterion, new_external_derivative = criterion_and_derivative(
                current_params)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            caught_exceptions.append(get_traceback())
            if "criterion" in cache_entry:
                raise Exception(DERIVATIVE_ERROR_MESSAGE) from e

    else:
        if "criterion" in to_dos:
            try:
                new_criterion = criterion(current_params)
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                caught_exceptions.append(get_traceback())
                if "derivative" in cache_entry:
                    raise Exception(CRITERION_ERROR_MESSAGE) from e

        if "derivative" in to_dos:
            try:
                new_external_derivative = derivative(current_params)
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                caught_exceptions.append(get_traceback())
                if "criterion" in cache_entry:
                    raise Exception(DERIVATIVE_ERROR_MESSAGE) from e

    if new_derivative is None and new_external_derivative is not None:
        if not isinstance(new_external_derivative, dict):
            new_external_derivative = {
                algorithm_info["primary_criterion_entry"]:
                new_external_derivative
            }

        new_derivative = {
            k: convert_derivative(v, internal_values=x)
            for k, v in new_external_derivative.items()
        }

    if caught_exceptions:
        if error_handling == "continue":
            new_criterion, new_derivative = _penalty_and_derivative(
                x, first_criterion_evaluation, error_penalty, algorithm_info)
            warnings.warn("\n\n".join(caught_exceptions))
        else:
            raise Exception("\n\n".join(caught_exceptions))

    if not algorithm_info["parallelizes"] and cache_size >= 1:
        _cache_new_evaluations(new_criterion, new_derivative, x_hash, cache,
                               cache_size)

    new_criterion = _check_and_harmonize_criterion_output(
        cache_entry.get("criterion", new_criterion), algorithm_info)

    new_derivative = _check_and_harmonize_derivative(
        cache_entry.get("derivative", new_derivative), algorithm_info)

    if (new_criterion is not None or new_derivative is not None) and logging:
        _log_new_evaluations(
            new_criterion=new_criterion,
            new_derivative=new_derivative,
            external_x=external_x,
            caught_exceptions=caught_exceptions,
            db_kwargs=db_kwargs,
            fixed_log_data=fixed_log_data,
        )

    res = _get_output_for_optimizer(new_criterion, new_derivative, task,
                                    algorithm_info, direction)
    return res
示例#26
0
 def jacobian(p):
     return first_derivative(
         constraint_func,
         p,
         **options,
     )["derivative"]
示例#27
0
def internal_criterion_and_derivative_template(
    x,
    *,
    task,
    direction,
    criterion,
    converter,
    algo_info,
    derivative,
    criterion_and_derivative,
    numdiff_options,
    logging,
    db_kwargs,
    error_handling,
    error_penalty_func,
    fixed_log_data,
    history_container=None,
    return_history_entry=False,
):
    """Template for the internal criterion and derivative function.

    This function forms the basis of all functions that define the optimization problem
    and are passed to the internal optimizers in estimagic. I.e. the criterion,
    derivative and criterion_and_derivative functions.

    Most of the arguments of this function will be partialled in before the functions
    are passed to internal optimizers.

    That is the reason why this function is called a template.

    Args:
        x (np.ndarray): 1d numpy array with internal parameters.
        task (str): One of "criterion", "derivative" and "criterion_and_derivative".
        direction (str): One of "maximize" or "minimize"
        criterion (callable): (partialed) user provided criterion function that takes a
            parameter dataframe as only argument and returns a scalar, an array like
            object or a dictionary. See :ref:`criterion`.
        params (pd.DataFrame): see :ref:`params`
        converter (Converter): NamedTuple with methods to convert between internal
            and external derivatives, parameters and criterion outputs.
        algo_info (AlgoInfo): NamedTuple with attributes
            - primary_criterion_entry
            - name
            - parallelizes
            - needs_scaling
            - is_available
        derivative (callable, optional): (partialed) user provided function that
            calculates the first derivative of criterion. For most algorithm, this is
            the gradient of the scalar output (or "value" entry of the dict). However
            some algorithms (e.g. bhhh) require the jacobian of the "contributions"
            entry of the dict. You will get an error if you provide the wrong type of
            derivative.
        criterion_and_derivative (callable): Function that returns criterion
            and derivative as a tuple. This can be used to exploit synergies in the
            evaluation of both functions. The fist element of the tuple has to be
            exactly the same as the output of criterion. The second has to be exactly
            the same as the output of derivative.
        numdiff_options (dict): Keyword arguments for the calculation of numerical
            derivatives. See :ref:`first_derivative` for details. Note that the default
            method is changed to "forward" for speed reasons.
        logging (bool): Whether logging is used.
        db_kwargs (dict): Dictionary with entries "database", "path" and "fast_logging".
        error_handling (str): Either "raise" or "continue". Note that "continue" does
            not absolutely guarantee that no error is raised but we try to handle as
            many errors as possible in that case without aborting the optimization.
        error_penalty_func (callable): Function that takes ``x`` and ``task`` and
            returns a penalized criterion function, its derivative or both (depending)
            on task.
        fixed_log_data (dict): Dictionary with fixed data to be saved in the database.
            Has the entries "stage" (str) and "substage" (int).
        history_container (list or None): List to which parameter, criterion and
            derivative histories are appended. Should be set to None if an algorithm
            parallelizes over criterion or derivative evaluations.
        return_history_entry (bool): Whether the history container should be returned.

    Returns:
        float, np.ndarray or tuple: If task=="criterion" it returns the output of
            criterion which can be a float or 1d numpy array. If task=="derivative" it
            returns the first derivative of criterion, which is a numpy array.
            If task=="criterion_and_derivative" it returns both as a tuple.

    """
    now = time.perf_counter()
    to_dos = _determine_to_dos(task, derivative, criterion_and_derivative)

    caught_exceptions = []
    new_criterion, new_external_criterion = None, None
    new_derivative, new_external_derivative = None, None
    current_params, external_x = converter.params_from_internal(
        x,
        return_type="tree_and_flat",
    )
    if to_dos == []:
        pass
    elif "numerical_criterion_and_derivative" in to_dos:

        def func(x):
            p = converter.params_from_internal(x, "tree")
            crit_full = criterion(p)
            crit_relevant = converter.func_to_internal(crit_full)
            out = {"full": crit_full, "relevant": crit_relevant}
            return out

        options = numdiff_options.copy()
        options["key"] = "relevant"
        options["return_func_value"] = True

        try:
            derivative_dict = first_derivative(func, x, **options)
            new_derivative = derivative_dict["derivative"]
            new_criterion = derivative_dict["func_value"]["relevant"]
            new_external_criterion = derivative_dict["func_value"]["full"]
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            tb = get_traceback()
            caught_exceptions.append(tb)
            if error_handling == "raise":
                msg = (
                    "An error occurred when evaluating criterion to calculate a "
                    "numerical derivative during optimization.")
                raise UserFunctionRuntimeError(msg) from e
            else:

                msg = (
                    "The following exception was caught when evaluating criterion to "
                    f"calculate a numerical derivative during optimization:\n\n{tb}"
                )
                warnings.warn(msg)

    elif "criterion_and_derivative" in to_dos:
        try:
            new_external_criterion, new_external_derivative = criterion_and_derivative(
                current_params)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            tb = get_traceback()
            caught_exceptions.append(tb)
            if error_handling == "raise":
                msg = (
                    "An error ocurred when evaluating criterion_and_derivative "
                    "during optimization.")
                raise UserFunctionRuntimeError(msg) from e
            else:

                msg = (
                    "The following exception was caught when evaluating "
                    f"criterion_and_derivative during optimization:\n\n{tb}")
                warnings.warn(msg)

    else:
        if "criterion" in to_dos:
            try:
                new_external_criterion = criterion(current_params)
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                tb = get_traceback()
                caught_exceptions.append(tb)
                if error_handling == "raise":
                    msg = ("An error ocurred when evaluating criterion during "
                           "optimization.")
                    raise UserFunctionRuntimeError(msg) from e
                else:

                    msg = (
                        "The following exception was caught when evaluating "
                        f"criterion during optimization:\n\n{tb}")
                    warnings.warn(msg)

        if "derivative" in to_dos:
            try:
                new_external_derivative = derivative(current_params)
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                tb = get_traceback()
                caught_exceptions.append(tb)
                if error_handling == "raise":
                    msg = (
                        "An error ocurred when evaluating derivative during "
                        "optimization")
                    raise UserFunctionRuntimeError(msg) from e
                else:

                    msg = (
                        "The following exception was caught when evaluating "
                        f"derivative during optimization:\n\n{tb}")
                    warnings.warn(msg)

    if new_external_criterion is not None and new_criterion is None:
        new_criterion = converter.func_to_internal(new_external_criterion)

    if new_external_derivative is not None and new_derivative is None:
        new_derivative = converter.derivative_to_internal(
            new_external_derivative, x)

    if caught_exceptions:
        new_criterion, new_derivative = error_penalty_func(
            x, task="criterion_and_derivative")

    if new_criterion is not None:
        scalar_critval = aggregate_func_output_to_value(
            f_eval=new_criterion,
            primary_key=algo_info.primary_criterion_entry,
        )
    else:
        scalar_critval = None

    if (new_criterion is not None or new_derivative is not None) and logging:

        _log_new_evaluations(
            new_criterion=new_external_criterion,
            new_derivative=new_derivative,
            external_x=external_x,
            caught_exceptions=caught_exceptions,
            db_kwargs=db_kwargs,
            fixed_log_data=fixed_log_data,
            scalar_value=scalar_critval,
            now=now,
        )

    res = _get_output_for_optimizer(
        new_criterion=new_criterion,
        new_derivative=new_derivative,
        task=task,
        direction=direction,
    )

    if new_criterion is not None:
        hist_entry = {
            "params": current_params,
            "criterion": scalar_critval,
            "runtime": now,
        }
    else:
        hist_entry = None

    if history_container is not None and new_criterion is not None:
        history_container.append(hist_entry)

    if return_history_entry:
        res = (res, hist_entry)

    return res