Exemplo n.º 1
0
def test_n_info_values():
    assert_raises(
        ValueError, FROLS, n_info_values=1.2, basis_function=Polynomial(degree=2)
    )
    assert_raises(
        ValueError, FROLS, n_info_values=-1, basis_function=Polynomial(degree=2)
    )
Exemplo n.º 2
0
def test_fit_polynomial():
    basis_function = Polynomial(degree=2)
    data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
    max_lag = 1
    output = np.array([[4, 6, 8, 9, 12, 16], [9, 9, 9, 9, 9, 9]])

    r = basis_function.fit(data=data, max_lag=max_lag)

    assert_array_equal(output, r)
Exemplo n.º 3
0
def test_model_order_selection():
    assert_raises(
        TypeError, FROLS, order_selection=1, basis_function=Polynomial(degree=2)
    )
    assert_raises(
        TypeError, FROLS, order_selection="True", basis_function=Polynomial(degree=2)
    )
    assert_raises(
        TypeError, FROLS, order_selection=None, basis_function=Polynomial(degree=2)
    )
Exemplo n.º 4
0
def test_fit_polynomial_predefined():
    basis_function = Polynomial(degree=2)
    data = np.array(([1, 1, 1], [2, 3, 4], [3, 3, 3]))
    max_lag = 1
    predefined_regressors = np.array([0, 2, 4])
    output = np.array([[4, 8, 12], [9, 9, 9]])

    r = basis_function.fit(
        data=data, max_lag=max_lag, predefined_regressors=predefined_regressors
    )

    assert_array_equal(output, r)
Exemplo n.º 5
0
def test_estimate_parameter_error():
    assert_raises(
        TypeError,
        SimulateNARMAX,
        estimate_parameter=1,
        basis_function=Polynomial(degree=2),
    )
Exemplo n.º 6
0
def test_model_type_error():
    assert_raises(
        ValueError,
        SimulateNARMAX,
        model_type="NFAR",
        basis_function=Polynomial(degree=2),
    )
Exemplo n.º 7
0
def test_model_prediction():
    x, y, theta = create_test_data()
    basis_function = Polynomial(degree=2)
    train_percentage = 90
    split_data = int(len(x) * (train_percentage / 100))

    X_train = x[0:split_data, 0]
    X_test = x[split_data::, 0]

    y1 = y[0:split_data, 0]
    y_test = y[split_data::, 0]
    y_train = y1.copy()

    y_train = np.reshape(y_train, (len(y_train), 1))
    X_train = np.reshape(X_train, (len(X_train), 1))

    y_test = np.reshape(y_test, (len(y_test), 1))
    X_test = np.reshape(X_test, (len(X_test), 1))
    model = FROLS(
        n_terms=5,
        extended_least_squares=False,
        ylag=[1, 2],
        xlag=2,
        estimator="least_squares",
        basis_function=basis_function,
    )
    model.fit(X=X_train, y=y_train)
    assert_raises(Exception, model.predict, X=X_test, y=y_test[:1])
Exemplo n.º 8
0
def test_extended_least_squares():
    assert_raises(
        TypeError, FROLS, extended_least_squares=1, basis_function=Polynomial(degree=2)
    )
    assert_raises(
        TypeError,
        FROLS,
        extended_least_squares="True",
        basis_function=Polynomial(degree=2),
    )
    assert_raises(
        TypeError,
        FROLS,
        extended_least_squares=None,
        basis_function=Polynomial(degree=2),
    )
Exemplo n.º 9
0
def test_default_values():
    default = {
        "estimator": "recursive_least_squares",
        "extended_least_squares": False,
        "lam": 0.98,
        "delta": 0.01,
        "offset_covariance": 0.2,
        "mu": 0.01,
        "eps": np.finfo(np.float64).eps,
        "gama": 0.2,
        "weight": 0.02,
        "model_type": "NARMAX",
        "estimate_parameter": True,
        "calculate_err": False,
    }
    model = SimulateNARMAX(basis_function=Polynomial())
    model_values = [
        model.estimator,
        model._extended_least_squares,
        model._lam,
        model._delta,
        model._offset_covariance,
        model._mu,
        model._eps,
        model._gama,
        model._weight,
        model.model_type,
        model.estimate_parameter,
        model.calculate_err,
    ]
    assert list(default.values()) == model_values
Exemplo n.º 10
0
def test_model_prediction():
    x, y, theta = create_test_data()
    basis_function = Polynomial(degree=2)
    train_percentage = 90
    split_data = int(len(x) * (train_percentage / 100))

    X_train = x[0:split_data, 0]
    X_test = x[split_data::, 0]

    y1 = y[0:split_data, 0]
    y_test = y[split_data::, 0]
    y_train = y1.copy()

    y_train = np.reshape(y_train, (len(y_train), 1))
    X_train = np.reshape(X_train, (len(X_train), 1))

    y_test = np.reshape(y_test, (len(y_test), 1))
    X_test = np.reshape(X_test, (len(X_test), 1))
    model = MetaMSS(
        ylag=[1, 2],
        xlag=2,
        maxiter=30,
        n_agents=20,
        basis_function=basis_function,
        random_state=42,
    )
    model.fit(X_train=X_train, y_train=y_train, X_test=X_test, y_test=y_test)
    assert_raises(Exception, model.predict, X_test=X_test, y_test=y_test[:1])
Exemplo n.º 11
0
def test_default_values():
    default = {
        "ylag": 2,
        "xlag": 2,
        "model_type": "NARMAX",
        "maxiter": 30,
        "alpha": 23,
        "g_zero": 100,
        "k_agents_percent": 2,
        "norm": -2,
        "power": 2,
        "n_agents": 10,
        "p_zeros": 0.5,
        "p_ones": 0.5,
        "p_value": 0.05,
        "estimator": "least_squares",
        "extended_least_squares": False,
        "lam": 0.98,
        "delta": 0.01,
        "offset_covariance": 0.2,
        "mu": 0.01,
        "eps": np.finfo(np.float64).eps,
        "gama": 0.2,
        "weight": 0.02,
        "steps_ahead": None,
        "estimate_parameter": True,
        "loss_func": "metamss_loss",
        "random_state": None,
    }
    model = MetaMSS(basis_function=Polynomial(degree=2))
    model_values = [
        model.ylag,
        model.xlag,
        model.model_type,
        model.maxiter,
        model.alpha,
        model.g_zero,
        model.k_agents_percent,
        model._norm,
        model._power,
        model.n_agents,
        model.p_zeros,
        model.p_ones,
        model.p_value,
        model.estimator,
        model._extended_least_squares,
        model._lam,
        model._delta,
        model._offset_covariance,
        model._mu,
        model._eps,
        model._gama,
        model._weight,
        model.steps_ahead,
        model.estimate_parameter,
        model.loss_func,
        model.random_state,
    ]
    assert list(default.values()) == model_values
Exemplo n.º 12
0
def test_fit_without_information_criteria():
    x, y, theta = create_test_data()
    basis_function = Polynomial(degree=2)
    model = FROLS(
        n_terms=15, extended_least_squares=False, basis_function=basis_function
    )
    model.fit(X=x, y=y)
    assert "info_values" not in dir(model)
Exemplo n.º 13
0
def test_estimate_parameter():
    x_train, x_valid, y_train, y_valid = get_siso_data(n=1000,
                                                       colored_noise=False,
                                                       sigma=0.001,
                                                       train_percentage=90)
    assert_raises(
        TypeError,
        SimulateNARMAX,
        estimate_parameter="False",
        x_train=x_train,
        y_train=y_train,
        basis_function=Polynomial(),
    )
Exemplo n.º 14
0
def test_predict():
    X_train, X_test, y_train, y_test = get_siso_data(
        n=1000, colored_noise=False, sigma=0.0001, train_percentage=90
    )
    basis_function = Polynomial(degree=2)
    model = MetaMSS(
        ylag=[1, 2],
        xlag=2,
        maxiter=30,
        n_agents=10,
        basis_function=basis_function,
        random_state=42,
    )
    model.fit(X_train=X_train, y_train=y_train, X_test=X_test, y_test=y_test)
    yhat = model.predict(X_test=X_test, y_test=y_test)
    assert_almost_equal(yhat, y_test, decimal=2)
Exemplo n.º 15
0
def test_information_criteria_bic():
    x, y, theta = create_test_data()
    basis_function = Polynomial(degree=2)
    model = FROLS(
        n_terms=5,
        extended_least_squares=False,
        order_selection=True,
        info_criteria="bic",
        n_info_values=5,
        ylag=[1, 2],
        xlag=2,
        estimator="least_squares",
        basis_function=basis_function,
    )
    model.fit(X=x, y=y)
    info_values = np.array([-1764.885, -2320.101, -2976.391, -4461.908, -72845.768])
    assert_almost_equal(model.info_values[:4], info_values[:4], decimal=3)
Exemplo n.º 16
0
def test_information_criteria_lilc():
    x, y, theta = create_test_data()
    basis_function = Polynomial(degree=2)
    model = FROLS(
        n_terms=5,
        extended_least_squares=False,
        order_selection=True,
        info_criteria="lilc",
        n_info_values=5,
        ylag=[1, 2],
        xlag=2,
        estimator="least_squares",
        basis_function=basis_function,
    )
    model.fit(X=x, y=y)
    info_values = np.array([-1767.926, -2326.183, -2985.514, -4474.072, -72860.973])
    assert_almost_equal(model.info_values[:4], info_values[:4], decimal=3)
Exemplo n.º 17
0
def test_information_criteria_fpe():
    x, y, theta = create_test_data()
    basis_function = Polynomial(degree=2)
    model = FROLS(
        n_terms=5,
        extended_least_squares=False,
        order_selection=True,
        info_criteria="fpe",
        n_info_values=5,
        ylag=[1, 2],
        xlag=2,
        estimator="least_squares",
        basis_function=basis_function,
    )
    model.fit(X=x, y=y)
    info_values = np.array(
        [-1769.7907932, -2329.9129013, -2991.1078281, -4481.5306067, -72870.296884]
    )
    assert_almost_equal(model.info_values[:4], info_values[:4], decimal=3)
Exemplo n.º 18
0
def test_metamss():
    piv = np.array([4, 2, 7, 11, 5])
    model_code = np.array(
        [[1001, 0], [2002, 0], [2001, 1001]]  # y(k-1)  # x1(k-2)  # x1(k-1)y(k-1)
    )
    basis_function = Polynomial(degree=2)
    X_train, X_test, y_train, y_test = get_siso_data(
        n=1000, colored_noise=False, sigma=0.0001, train_percentage=90
    )

    model = MetaMSS(
        ylag=[1, 2],
        xlag=2,
        maxiter=30,
        n_agents=20,
        basis_function=basis_function,
        random_state=42,
    )
    model.fit(X_train=X_train, y_train=y_train, X_test=X_test, y_test=y_test)
    assert_array_equal(model.final_model, model_code)
Exemplo n.º 19
0
def test_error_reduction_ratio():
    piv = np.array([4, 2, 7, 11, 5])
    model_code = np.array(
        [[2002, 0], [1002, 0], [2001, 1001], [2002, 1002], [1001, 1001]]
    )
    basis_function = Polynomial(degree=2)
    x, y, theta = create_test_data()
    model = FROLS(
        n_terms=5,
        order_selection=True,
        n_info_values=5,
        info_criteria="aic",
        extended_least_squares=False,
        ylag=[1, 2],
        xlag=2,
        estimator="least_squares",
        basis_function=basis_function,
    )
    model.fit(X=x, y=y)
    assert_array_equal(model.final_model, model_code)
Exemplo n.º 20
0
def test_simulate():
    x_train, x_valid, y_train, y_valid = get_siso_data(n=1000,
                                                       colored_noise=False,
                                                       sigma=0.001,
                                                       train_percentage=90)

    s = SimulateNARMAX(basis_function=Polynomial(), estimate_parameter=False)

    # the model must be a numpy array
    model = np.array([
        [1001, 0],  # y(k-1)
        [2001, 1001],  # x1(k-1)y(k-1)
        [2002, 0],  # x1(k-2)
    ])
    # theta must be a numpy array of shape (n, 1) where n is the number of regressors
    theta = np.array([[0.2, 0.9, 0.1]]).T

    yhat = s.simulate(X_test=x_valid,
                      y_test=y_valid,
                      model_code=model,
                      theta=theta)
    assert yhat.shape == (100, 1)
Exemplo n.º 21
0
def test_default_values():
    default = {
        "ylag": 2,
        "xlag": 2,
        "order_selection": False,
        "info_criteria": "aic",
        "n_terms": None,
        "n_info_values": 10,
        "estimator": "recursive_least_squares",
        "extended_least_squares": False,
        "lam": 0.98,
        "delta": 0.01,
        "offset_covariance": 0.2,
        "mu": 0.01,
        "eps": np.finfo(np.float64).eps,
        "gama": 0.2,
        "weight": 0.02,
        "model_type": "NARMAX",
    }
    model = FROLS(basis_function=Polynomial(degree=2))
    model_values = [
        model.ylag,
        model.xlag,
        model._order_selection,
        model.info_criteria,
        model.n_terms,
        model.n_info_values,
        model.estimator,
        model._extended_least_squares,
        model._lam,
        model._delta,
        model._offset_covariance,
        model._mu,
        model._eps,
        model._gama,
        model._weight,
        model.model_type,
    ]
    assert list(default.values()) == model_values
Exemplo n.º 22
0
def test_simulate_theta():
    x_train, x_valid, y_train, y_valid = get_siso_data(n=1000,
                                                       colored_noise=False,
                                                       sigma=0.001,
                                                       train_percentage=90)

    s = SimulateNARMAX(basis_function=Polynomial(), estimate_parameter=True)

    # the model must be a numpy array
    model = np.array([
        [1001, 0],  # y(k-1)
        [2001, 1001],  # x1(k-1)y(k-1)
        [2002, 0],  # x1(k-2)
    ])

    yhat = s.simulate(
        X_train=x_train,
        y_train=y_train,
        X_test=x_valid,
        y_test=y_valid,
        model_code=model,
    )
    theta = np.array([[0.2, 0.9, 0.1]]).T
    assert_almost_equal(s.theta, theta, decimal=1)
Exemplo n.º 23
0
def test_validate_ylag():
    assert_raises(ValueError, MetaMSS, ylag=-1, basis_function=Polynomial(degree=2))
    assert_raises(ValueError, MetaMSS, ylag=1.3, basis_function=Polynomial(degree=2))
Exemplo n.º 24
0
def test_validate_xlag():
    assert_raises(ValueError, FROLS, xlag=-1, basis_function=Polynomial(degree=2))
    assert_raises(ValueError, FROLS, xlag=1.3, basis_function=Polynomial(degree=2))
Exemplo n.º 25
0
def test_info_criteria():
    assert_raises(
        ValueError, FROLS, info_criteria="AIC", basis_function=Polynomial(degree=2)
    )
Exemplo n.º 26
0
def test_calculate_error():
    assert_raises(TypeError,
                  SimulateNARMAX,
                  calculate_err=1,
                  basis_function=Polynomial(degree=2))