Esempio n. 1
0
def test_bad_parameters(data_derivative_1d):
    x, x_dot = data_derivative_1d

    with pytest.raises(ValueError):
        STLSQ(threshold=-1)

    with pytest.raises(ValueError):
        STLSQ(alpha=-1)

    with pytest.raises(ValueError):
        STLSQ(max_iter=0)

    with pytest.raises(ValueError):
        SR3(threshold=-1)

    with pytest.raises(ValueError):
        SR3(nu=0)

    with pytest.raises(ValueError):
        SR3(tol=0)

    with pytest.raises(NotImplementedError):
        SR3(thresholder="l2")

    with pytest.raises(ValueError):
        SR3(max_iter=0)
Esempio n. 2
0
def test_initial_guess_sr3():
    x = np.random.standard_normal((10, 3))
    x_dot = np.random.standard_normal((10, 2))

    control_model = SR3(max_iter=1).fit(x, x_dot)

    initial_guess = np.random.standard_normal((x_dot.shape[1], x.shape[1]))
    guess_model = SR3(max_iter=1, initial_guess=initial_guess).fit(x, x_dot)

    assert np.any(np.not_equal(control_model.coef_, guess_model.coef_))
Esempio n. 3
0
def test_sr3_enable_trimming(data_linear_oscillator_corrupted):
    x, x_dot, _ = data_linear_oscillator_corrupted

    model_plain = SR3()
    model_plain.enable_trimming(trimming_fraction=0.5)
    model_plain.fit(x, x_dot)

    model_trimming = SR3(trimming_fraction=0.5)
    model_trimming.fit(x, x_dot)

    np.testing.assert_allclose(model_plain.coef_, model_trimming.coef_)
Esempio n. 4
0
def test_complexity(n_samples, n_features, n_informative, random_state):
    """Behaviour test for complexity.

    We assume that more regularized optimizers are less complex on the same dataset.
    """
    assume(n_informative < n_features)

    # Average complexity over multiple datasets
    n_datasets = 5
    complexities = [0] * 7

    seed(random_state)
    for rs in randint(low=0, high=2**32 - 1, size=n_datasets):

        x, y = make_regression(
            n_samples=n_samples,
            n_features=n_features,
            n_informative=n_informative,
            n_targets=1,
            bias=0,
            noise=0.1,
            random_state=rs,
        )
        y = y.reshape(-1, 1)

        opt_kwargs = dict(fit_intercept=True, normalize=False)
        optimizers = [
            SR3(thresholder="l0", threshold=0.1, **opt_kwargs),
            SR3(thresholder="l1", threshold=0.1, **opt_kwargs),
            Lasso(**opt_kwargs),
            STLSQ(**opt_kwargs),
            ElasticNet(**opt_kwargs),
            Ridge(**opt_kwargs),
            LinearRegression(**opt_kwargs),
        ]

        optimizers = [SINDyOptimizer(o, unbias=True) for o in optimizers]

        for k, opt in enumerate(optimizers):
            opt.fit(x, y)
            complexities[k] += opt.complexity

    for less_complex, more_complex in zip(complexities, complexities[1:]):
        # relax the condition to account for
        # noise and non-normalized threshold parameters
        assert less_complex <= more_complex + 5
Esempio n. 5
0
def test_bad_parameters():
    with pytest.raises(ValueError):
        STLSQ(threshold=-1)

    with pytest.raises(ValueError):
        STLSQ(alpha=-1)

    with pytest.raises(ValueError):
        STLSQ(max_iter=0)

    with pytest.raises(ValueError):
        SR3(threshold=-1)

    with pytest.raises(ValueError):
        SR3(nu=0)

    with pytest.raises(ValueError):
        SR3(tol=0)

    with pytest.raises(NotImplementedError):
        SR3(thresholder="l2")

    with pytest.raises(ValueError):
        SR3(max_iter=0)

    with pytest.raises(ValueError):
        SR3(trimming_fraction=-1)

    with pytest.raises(ValueError):
        SR3(trimming_fraction=2)
Esempio n. 6
0
def test_sr3_trimming(data_linear_oscillator_corrupted):
    X, X_dot, trimming_array = data_linear_oscillator_corrupted

    optimizer_without_trimming = SINDyOptimizer(SR3(), unbias=False)
    optimizer_without_trimming.fit(X, X_dot)

    optimizer_trimming = SINDyOptimizer(SR3(trimming_fraction=0.15),
                                        unbias=False)
    optimizer_trimming.fit(X, X_dot)

    # Check that trimming found the right samples to remove
    np.testing.assert_array_equal(optimizer_trimming.optimizer.trimming_array,
                                  trimming_array)

    # Check that the coefficients found by the optimizer with trimming are closer to
    # the true coefficients than the coefficients found by the optimizer without
    # trimming
    true_coef = np.array([[-2.0, 0.0], [0.0, 1.0]])
    assert norm(true_coef - optimizer_trimming.coef_) < norm(
        true_coef - optimizer_without_trimming.coef_)
def test_complexity(n_samples, n_features, n_informative, random_state):
    """Behaviour test for complexity.

    We assume that more regularized optimizers are less complex on the same dataset.
    """
    assume(n_informative < n_features)

    x, y = make_regression(n_samples,
                           n_features,
                           n_informative,
                           1,
                           0,
                           noise=0.1,
                           random_state=random_state)
    y = y.reshape(-1, 1)

    opt_kwargs = dict(fit_intercept=True, normalize=False)
    optimizers = [
        SR3(thresholder="l0", threshold=0.1, **opt_kwargs),
        SR3(thresholder="l1", threshold=0.1, **opt_kwargs),
        Lasso(**opt_kwargs),
        STLSQ(**opt_kwargs),
        ElasticNet(**opt_kwargs),
        Ridge(**opt_kwargs),
        LinearRegression(**opt_kwargs),
    ]

    optimizers = [SINDyOptimizer(o, unbias=True) for o in optimizers]

    for opt in optimizers:
        opt.fit(x, y)

    for less_complex, more_complex in zip(optimizers, optimizers[1:]):
        # relax the condition to account for
        # noise and non-normalized threshold parameters
        assert less_complex.complexity <= more_complex.complexity + 1
Esempio n. 8
0
    with pytest.raises(ValueError):
        model.fit(x, t)
    t[2], t[4] = t[4], t[2]

    # Two matching times in t
    t[3] = t[5]
    with pytest.raises(ValueError):
        model.fit(x, t)


@pytest.mark.parametrize(
    "data, optimizer",
    [
        (pytest.lazy_fixture("data_1d"), STLSQ()),
        (pytest.lazy_fixture("data_lorenz"), STLSQ()),
        (pytest.lazy_fixture("data_1d"), SR3()),
        (pytest.lazy_fixture("data_lorenz"), SR3()),
        (pytest.lazy_fixture("data_1d"), Lasso(fit_intercept=False)),
        (pytest.lazy_fixture("data_lorenz"), Lasso(fit_intercept=False)),
        (pytest.lazy_fixture("data_1d"), ElasticNet(fit_intercept=False)),
        (pytest.lazy_fixture("data_lorenz"), ElasticNet(fit_intercept=False)),
    ],
)
def test_predict(data, optimizer):
    x, t = data
    model = SINDy(optimizer=optimizer)
    model.fit(x, t)
    x_dot = model.predict(x)

    assert x.shape == x_dot.shape
Esempio n. 9
0
def test_cad_prox_function(data_derivative_1d):
    x, x_dot = data_derivative_1d
    x = x.reshape(-1, 1)
    model = SR3(thresholder="cAd")
    model.fit(x, x_dot)
    check_is_fitted(model)
Esempio n. 10
0
    ],
)
def test_supports_multiple_targets(cls, support):
    assert supports_multiple_targets(cls()) == support


@pytest.fixture(params=["data_derivative_1d", "data_derivative_2d"])
def data(request):
    return request.getfixturevalue(request.param)


@pytest.mark.parametrize(
    "optimizer",
    [
        STLSQ(),
        SR3(),
        ConstrainedSR3(),
        TrappingSR3(),
        Lasso(fit_intercept=False),
        ElasticNet(fit_intercept=False),
        DummyLinearModel(),
    ],
)
def test_fit(data, optimizer):
    x, x_dot = data
    if len(x.shape) == 1:
        x = x.reshape(-1, 1)
    opt = SINDyOptimizer(optimizer, unbias=False)
    opt.fit(x, x_dot)

    check_is_fitted(opt)
Esempio n. 11
0
                               model_t_default.coefficients())
    np.testing.assert_almost_equal(model.score(x, t=dt),
                                   model_t_default.score(x))
    np.testing.assert_almost_equal(model.differentiate(x, t=dt),
                                   model_t_default.differentiate(x))


@pytest.mark.parametrize(
    "data",
    [pytest.lazy_fixture("data_1d"),
     pytest.lazy_fixture("data_lorenz")])
@pytest.mark.parametrize(
    "optimizer",
    [
        STLSQ(),
        SR3(),
        ConstrainedSR3(),
        Lasso(fit_intercept=False),
        ElasticNet(fit_intercept=False),
    ],
)
def test_predict(data, optimizer):
    x, t = data
    model = SINDy(optimizer=optimizer)
    model.fit(x, t)
    x_dot = model.predict(x)

    assert x.shape == x_dot.shape


@pytest.mark.parametrize(
Esempio n. 12
0
def test_sr3_prox_functions(data_derivative_1d, thresholder):
    x, x_dot = data_derivative_1d
    x = x.reshape(-1, 1)
    model = SR3(thresholder=thresholder)
    model.fit(x, x_dot)
    check_is_fitted(model)
Esempio n. 13
0
    [(Lasso, True), (STLSQ, True), (SR3, True), (DummyLinearModel, False)],
)
def test_supports_multiple_targets(cls, support):
    assert supports_multiple_targets(cls()) == support


@pytest.fixture(params=["data_derivative_1d", "data_derivative_2d"])
def data(request):
    return request.getfixturevalue(request.param)


@pytest.mark.parametrize(
    "optimizer",
    [
        STLSQ(),
        SR3(),
        Lasso(fit_intercept=False),
        ElasticNet(fit_intercept=False),
        DummyLinearModel(),
    ],
)
def test_fit(data, optimizer):
    x, x_dot = data
    if len(x.shape) == 1:
        x = x.reshape(-1, 1)
    opt = SINDyOptimizer(optimizer, unbias=False)
    opt.fit(x, x_dot)

    check_is_fitted(opt)
    assert opt.complexity >= 0
    if len(x_dot.shape) > 1:
Esempio n. 14
0
def test_sr3_warn(data_linear_oscillator_corrupted):
    x, x_dot, _ = data_linear_oscillator_corrupted
    model = SR3(max_iter=1, tol=1e-10)

    with pytest.warns(ConvergenceWarning):
        model.fit(x, x_dot)
Esempio n. 15
0
    ],
)
def test_supports_multiple_targets(cls, support):
    assert supports_multiple_targets(cls()) == support


@pytest.fixture(params=["data_derivative_1d", "data_derivative_2d"])
def data(request):
    return request.getfixturevalue(request.param)


@pytest.mark.parametrize(
    "optimizer",
    [
        STLSQ(),
        SR3(),
        ConstrainedSR3(),
        Lasso(fit_intercept=False),
        ElasticNet(fit_intercept=False),
        DummyLinearModel(),
    ],
)
def test_fit(data, optimizer):
    x, x_dot = data
    if len(x.shape) == 1:
        x = x.reshape(-1, 1)
    opt = SINDyOptimizer(optimizer, unbias=False)
    opt.fit(x, x_dot)

    check_is_fitted(opt)
    assert opt.complexity >= 0
Esempio n. 16
0
    [(Lasso, True), (STLSQ, True), (SR3, True), (DummyLinearModel, False)],
)
def test_supports_multiple_targets(cls, support):
    assert supports_multiple_targets(cls()) == support


@pytest.fixture(params=["data_derivative_1d", "data_derivative_2d"])
def data(request):
    return request.getfixturevalue(request.param)


@pytest.mark.parametrize(
    "optimizer",
    [
        STLSQ(),
        SR3(),
        Lasso(fit_intercept=False),
        ElasticNet(fit_intercept=False),
        DummyLinearModel(),
    ],
)
def test_fit(data, optimizer):
    x, x_dot = data
    if len(x.shape) == 1:
        x = x.reshape(-1, 1)
    opt = SINDyOptimizer(optimizer, unbias=False)
    opt.fit(x, x_dot)

    check_is_fitted(opt)
    assert opt.complexity >= 0
    if len(x_dot.shape) > 1: