Ejemplo n.º 1
0
def test_with_ackley():
    def ackley(x):
        out = (-20 * np.exp(-0.2 * np.sqrt(np.mean(x**2))) -
               np.exp(np.mean(np.cos(2 * np.pi * x))) + 20 + np.exp(1))
        return out

    dim = 5

    kwargs = {
        "criterion": ackley,
        "params": np.full(dim, -10),
        "lower_bounds": np.full(dim, -32),
        "upper_bounds": np.full(dim, 32),
        "algo_options": {
            "stopping.max_criterion_evaluations": 1000
        },
    }

    minimize(
        **kwargs,
        algorithm="scipy_lbfgsb",
        multistart=True,
        multistart_options={
            "n_samples": 200,
            "share_optimizations": 0.1,
            "convergence_max_discoveries": 10,
        },
    )
Ejemplo n.º 2
0
def test_invalid_derivative_versions(direction, algorithm, derivative,
                                     criterion_and_derivative):
    start_params = pd.DataFrame()
    start_params["value"] = [1, 2, 3]

    if direction == "minimize":
        with pytest.raises(ValueError):
            minimize(
                criterion=sos_dict_criterion,
                params=start_params,
                algorithm=algorithm,
                derivative=derivative,
                criterion_and_derivative=criterion_and_derivative,
            )
    else:
        deriv = derivative if derivative is None else switch_sign(derivative)
        crit_and_deriv = (criterion_and_derivative
                          if criterion_and_derivative is None else
                          switch_sign(criterion_and_derivative))
        with pytest.raises(ValueError):
            maximize(
                criterion=switch_sign(sos_dict_criterion),
                params=start_params,
                algorithm=algorithm,
                derivative=deriv,
                criterion_and_derivative=crit_and_deriv,
            )
def test_lists_different_size():
    """Test if error is raised if arguments entered as list are of different length."""
    with pytest.raises(ValueError):
        minimize(
            [rosen, rosen],
            [params, params, params],
            ["scipy_lbfgsb", "scipy_lbfgsb"],
        )
Ejemplo n.º 4
0
def test_missing_criterion_kwargs():
    def f(params, bla, blubb):
        return (params["value"].to_numpy()**2).sum()

    params = pd.DataFrame(np.ones((3, 1)), columns=["value"])

    with pytest.raises(InvalidKwargsError):
        minimize(f, params, "scipy_lbfgsb", criterion_kwargs={"bla": 3})
Ejemplo n.º 5
0
def test_criterion_fails_at_start_values():
    def just_fail(params):
        raise RuntimeError()

    params = pd.DataFrame(np.ones((3, 1)), columns=["value"])
    snippet = "Error while evaluating criterion at start params."
    with pytest.raises(InvalidFunctionError, match=snippet):
        minimize(just_fail, params, "scipy_lbfgsb")
Ejemplo n.º 6
0
def test_with_invalid_numdiff_options():

    with pytest.raises(InvalidKwargsError):
        minimize(
            criterion=lambda x: x @ x,
            params=np.arange(5),
            algorithm="scipy_lbfgsb",
            numdiff_options={"bla": 15},
        )
def test_lists_different_size():
    """Test if error is raised if arguments entered as list are of different length."""
    with pytest.raises(ValueError):
        minimize(
            [rosen, rosen],
            [params, params, params],
            ["nlopt_neldermead", "scipy_L-BFGS-B"],
            general_options={"n_cores": 4},
        )
Ejemplo n.º 8
0
def test_with_invalid_bounds():

    with pytest.raises(ValueError):
        minimize(
            criterion=lambda x: x @ x,
            params=np.arange(5),
            algorithm="scipy_neldermead",
            multistart=True,
        )
Ejemplo n.º 9
0
def test_invalid_criterion_versions(criterion, algorithm):
    start_params = pd.DataFrame()
    start_params["value"] = [1, 2, 3]

    with pytest.raises(ValueError):
        minimize(
            criterion=criterion,
            params=start_params,
            algorithm=algorithm,
        )
def test_incompatible_constraints_raise_errors(constraints):
    params = np.arange(10)

    with pytest.raises(InvalidConstraintError):
        minimize(
            criterion=lambda x: x @ x,
            params=params,
            algorithm="scipy_lbfgsb",
            constraints=constraints,
        )
def test_wrong_type_criterion():
    """Make sure an error is raised if an argument has a wrong type."""
    with pytest.raises(TypeError):
        minimize(
            [rosen, "error"],
            [params, params],
            ["scipy_lbfgsb", "scipy_lbfgsb"],
        )

    with pytest.raises(TypeError):
        minimize("error", params, "scipy_lbfgsb")
def test_n_cores_not_specified():
    """
    Make sure an error is raised if n_cores is not specified and
    multiple optimizations should be run.
    """
    with pytest.raises(ValueError):
        minimize(
            [rosen, rosen],
            [params, params, params],
            ["nlopt_neldermead", "scipy_L-BFGS-B"],
        )
Ejemplo n.º 13
0
def test_criterion_with_runtime_error_during_numerical_derivative():
    def f(params):
        x = params["value"].to_numpy()
        if (x != 1).any():
            raise RuntimeError("Great error message")

        return x @ x

    params = pd.DataFrame(np.ones((3, 1)), columns=["value"])
    snippet = "evaluating criterion to calculate a numerical derivative"
    with pytest.raises(UserFunctionRuntimeError, match=snippet):
        minimize(f, params, "scipy_lbfgsb")
Ejemplo n.º 14
0
def test_criterion_with_runtime_error_derivative_free():
    def f(params):
        x = params["value"].to_numpy()
        if x.sum() < 1:
            raise RuntimeError("Great error message")

        return x @ x

    params = pd.DataFrame(np.full((3, 1), 10), columns=["value"])
    snippet = "when evaluating criterion during optimization"
    with pytest.raises(UserFunctionRuntimeError, match=snippet):
        minimize(f, params, "scipy_neldermead")
def test_wrong_type_constraints():
    """
    Make sure an error is raised if an argument has a wrong type.
    """
    with pytest.raises(ValueError):
        minimize(
            rosen,
            params,
            "nlopt_neldermead",
            constraints={},
            general_options={"n_cores": 4},
        )
Ejemplo n.º 16
0
def test_error_is_raised_with_transforming_constraints(params):
    with pytest.raises(NotImplementedError):
        minimize(
            criterion=sos_dict_criterion,
            params=params,
            constraints=[{
                "loc": [0, 1],
                "type": "probability"
            }],
            algorithm="scipy_lbfgsb",
            multistart=True,
        )
def test_wrong_type_algorithm():
    """Make sure an error is raised if an argument has a wrong type."""
    with pytest.raises(TypeError):
        minimize(
            [rosen, rosen],
            [params, params],
            algorithm=["nlopt_neldermead", rosen],
            general_options={"n_cores": 4},
        )

    with pytest.raises(TypeError):
        minimize(rosen, params, algorithm=rosen, general_options={"n_cores": 4})
def test_wrong_type_criterion():
    """Make sure an error is raised if an argument has a wrong type."""
    with pytest.raises(TypeError):
        minimize(
            [rosen, "error"],
            [params, params],
            ["nlopt_neldermead", "scipy_L-BFGS-B"],
            general_options={"n_cores": 1},
        )

    with pytest.raises(TypeError):
        minimize("error", params, "nlopt_neldermead", general_options={"n_cores": 4})
def test_scipy_lbfgsb_actually_calls_criterion_and_derivative():
    params = pd.DataFrame(data=np.ones((10, 1)), columns=["value"])

    def raising_crit_and_deriv(params):
        raise Exception()

    with pytest.raises(Exception):
        minimize(
            criterion=sos_scalar_criterion,
            params=params,
            algorithm="scipy_lbfgsb",
            criterion_and_derivative=raising_crit_and_deriv,
        )
def test_fix_that_differs_from_start_value_raises_an_error():

    with pytest.raises(InvalidParamsError):
        minimize(
            criterion=lambda x: x @ x,
            params=np.arange(3),
            algorithm="scipy_lbfgsb",
            constraints=[{
                "loc": [1],
                "type": "fixed",
                "value": 10
            }],
        )
Ejemplo n.º 21
0
def test_typo_in_criterion_kwarg():
    def f(params, bla, foo):
        return (params["value"].to_numpy()**2).sum()

    params = pd.DataFrame(np.ones((3, 1)), columns=["value"])

    snippet = "Did you mean"
    with pytest.raises(InvalidKwargsError, match=snippet):
        minimize(f,
                 params,
                 "scipy_lbfgsb",
                 criterion_kwargs={
                     "bla": 3,
                     "foa": 4
                 })
def test_three_independent_constraints():
    params = np.arange(10)
    params[0] = 2

    constraints = [
        {
            "loc": [0, 1, 2],
            "type": "covariance"
        },
        {
            "loc": [4, 5],
            "type": "fixed"
        },
        {
            "loc": [7, 8],
            "type": "linear",
            "value": 15,
            "weights": 1
        },
    ]

    res = minimize(
        criterion=lambda x: x @ x,
        params=params,
        algorithm="scipy_lbfgsb",
        constraints=constraints,
        algo_options={"convergence.relative_criterion_tolerance": 1e-12},
    )
    expected = np.array([0] * 4 + [4, 5] + [0] + [7.5] * 2 + [0])

    aaae(res.params, expected, decimal=4)
Ejemplo n.º 23
0
def test_scipy_conference_example():
    def criterion(x):
        first = (x["a"] - jnp.pi)**2
        second = jnp.linalg.norm(x["b"] - jnp.arange(3))
        third = jnp.linalg.norm(x["c"] - jnp.eye(2))
        return first + second + third

    start_params = {
        "a": 1.0,
        "b": jnp.ones(3).astype(float),
        "c": jnp.ones((2, 2)).astype(float),
    }

    gradient = jax.grad(criterion)

    res = minimize(
        criterion=criterion,
        derivative=gradient,
        params=start_params,
        algorithm="scipy_lbfgsb",
    )

    assert isinstance(res.params["b"], jnp.ndarray)
    aaae(res.params["b"], jnp.arange(3))
    aaae(res.params["c"], jnp.eye(2))
    assert np.allclose(res.params["a"], np.pi, atol=1e-4)
Ejemplo n.º 24
0
def test_valid_derivative_versions(direction, algorithm, derivative,
                                   criterion_and_derivative):
    start_params = pd.DataFrame()
    start_params["value"] = [1, 2, 3]

    if direction == "minimize":
        res = minimize(
            criterion=sos_dict_criterion,
            params=start_params,
            algorithm=algorithm,
            derivative=derivative,
            criterion_and_derivative=criterion_and_derivative,
            error_handling="raise",
        )
    else:
        deriv = derivative if derivative is None else switch_sign(derivative)
        crit_and_deriv = (criterion_and_derivative
                          if criterion_and_derivative is None else
                          switch_sign(criterion_and_derivative))
        res = maximize(
            criterion=switch_sign(sos_dict_criterion),
            params=start_params,
            algorithm=algorithm,
            derivative=deriv,
            criterion_and_derivative=crit_and_deriv,
            error_handling="raise",
        )

    aaae(res["solution_params"]["value"].to_numpy(), np.zeros(3), decimal=4)
def test_grtol():
    np.random.seed(5474)
    true_params = get_random_params(2, 0.3, 0.4, 0, 0.3)
    start_params = true_params.copy()
    start_params["value"] = get_random_params(2, 0.1, 0.2)["value"]

    exog, endog = _simulate_ols_sample(NUM_AGENTS, true_params)
    criterion_func = functools.partial(_ols_criterion, endog=endog, exog=exog)
    result = minimize(
        criterion_func,
        start_params,
        "tao_pounders",
        algo_options={
            "convergence.absolute_gradient_tolerance": False,
            "convergence.scaled_gradient_tolerance": False,
        },
    )

    assert (result["message"]
            == "relative_gradient_tolerance below critical value"
            or result["message"] == "step size small")

    if result["convergence_code"] == 4:
        assert result["solution_criterion"][2] / result["solution_criterion"][
            1] < 10
Ejemplo n.º 26
0
def test_multistart_minimize_with_sum_of_squares_at_defaults(
        criterion, direction, params):
    if direction == "minimize":
        res = minimize(
            criterion=criterion,
            params=params,
            algorithm="scipy_lbfgsb",
            multistart=True,
        )
    else:
        res = maximize(
            criterion=switch_sign(sos_dict_criterion),
            params=params,
            algorithm="scipy_lbfgsb",
            multistart=True,
        )

    assert hasattr(res, "multistart_info")
    ms_info = res.multistart_info
    assert len(ms_info["exploration_sample"]) == 40
    assert len(ms_info["exploration_results"]) == 40
    assert all(
        isinstance(entry, float) for entry in ms_info["exploration_results"])
    assert all(
        isinstance(entry, OptimizeResult) for entry in ms_info["local_optima"])
    assert all(
        isinstance(entry, pd.DataFrame)
        for entry in ms_info["start_parameters"])
    assert np.allclose(res.criterion, 0)
    aaae(res.params["value"], np.zeros(4))
def test_constrained_minimization(criterion_name, algorithm, derivative,
                                  constraint_name, params_type):

    constraints = CONSTR_INFO[constraint_name]
    criterion = FUNC_INFO[criterion_name]["criterion"]
    if params_type == "pandas":
        params = pd.Series(START_INFO[constraint_name],
                           name="value").to_frame()
    else:
        params = np.array(START_INFO[constraint_name])

    res = minimize(
        criterion=criterion,
        params=params,
        algorithm=algorithm,
        derivative=derivative,
        constraints=constraints,
        algo_options={"convergence.relative_criterion_tolerance": 1e-12},
    )

    if params_type == "pandas":
        calculated = res.params["value"].to_numpy()
    else:
        calculated = res.params

    expected = FUNC_INFO[criterion_name].get(
        f"{constraint_name}_result",
        FUNC_INFO[criterion_name]["default_result"])

    aaae(calculated, expected, decimal=4)
def test_invalid_start_params():
    def criterion(x):
        return np.dot(x, x)

    x = np.arange(3)

    with pytest.raises(InvalidParamsError):
        minimize(
            criterion,
            params=x,
            algorithm="scipy_lbfgsb",
            constraints=[{
                "loc": [1, 2],
                "type": "probability"
            }],
        )
def test_history_collection_with_parallelization(algorithm, tmp_path):
    lb = np.zeros(5) if algorithm in BOUNDED else None
    ub = np.full(5, 10) if algorithm in BOUNDED else None

    logging = tmp_path / "log.db"

    collected_hist = minimize(
        criterion=lambda x: {
            "root_contributions": x,
            "value": x @ x
        },
        params=np.arange(5),
        algorithm=algorithm,
        lower_bounds=lb,
        upper_bounds=ub,
        algo_options={
            "n_cores": 2,
            "stopping.max_iterations": 3
        },
        logging=logging,
        log_options={
            "if_database_exists": "replace",
            "fast_logging": True
        },
    ).history

    reader = OptimizeLogReader(logging)

    log_hist = reader.read_history()

    # We cannot expect the order to be the same
    aaae(sorted(collected_hist["criterion"]), sorted(log_hist["criterion"]))
def test_single_optimization_list_len1():
    """
    Test an easy single optimization.
    """
    result = minimize([rosen], [params], ["nlopt_neldermead"])[1]["value"].to_numpy()
    expected_result = [1, 1, 1, 1, 1]

    assert_array_almost_equal(result, expected_result, decimal=4)