def test_binary_gd():
    mlp = MLP(epochs=20,
              eta=0.05,
              hidden_layers=[25],
              minibatches=5,
              random_seed=1)

    mlp.fit(X_bin, y_bin)
    assert (y_bin == mlp.predict(X_bin)).all()
def test_multiclass_minibatch_acc():
    mlp = MLP(epochs=20,
              eta=0.05,
              hidden_layers=[25],
              minibatches=5,
              random_seed=1)
    mlp.fit(X, y)
    assert round(mlp.cost_[-1], 3) == 0.024, mlp.cost_[-1]
    assert (y == mlp.predict(X)).all()
def test_multiclass_gd_acc():
    mlp = MLP(epochs=20,
              eta=0.05,
              hidden_layers=[10],
              minibatches=1,
              random_seed=1)
    mlp.fit(X, y)
    assert round(mlp.cost_[0], 2) == 0.55, mlp.cost_[0]
    assert round(mlp.cost_[-1], 2) == 0.01, mlp.cost_[-1]
    assert (y == mlp.predict(X)).all()
def test_momentum_1():
    mlp = MLP(epochs=20,
              eta=0.05,
              momentum=0.1,
              hidden_layers=[25],
              minibatches=len(y),
              random_seed=1)

    mlp.fit(X, y)
    assert round(mlp.cost_[-1], 4) == 0.0057, mlp.cost_[-1]
    assert (y == mlp.predict(X)).all()
def test_retrain():
    mlp = MLP(epochs=10,
              eta=0.05,
              hidden_layers=[25],
              minibatches=len(y),
              random_seed=1)

    mlp.fit(X, y)
    cost_1 = mlp.cost_[-1]
    mlp.fit(X, y, init_params=False)

    assert round(cost_1, 3) == 0.058, cost_1
    assert round(mlp.cost_[-1], 3) == 0.023, mlp.cost_[-1]
    assert (y == mlp.predict(X)).all()
def test_retrain():
    mlp = MLP(epochs=10,
              eta=0.05,
              hidden_layers=[25],
              minibatches=len(y),
              random_seed=1)

    mlp.fit(X, y)
    cost_1 = mlp.cost_[-1]
    mlp.fit(X, y, init_params=False)

    assert round(cost_1, 3) == 0.058, cost_1
    assert round(mlp.cost_[-1], 3) == 0.023, mlp.cost_[-1]
    assert (y == mlp.predict(X)).all()
Beispiel #7
0
def test_multiclass_gd_acc():
    mlp = MLP(epochs=20,
              eta=0.05,
              hidden_layers=[10],
              minibatches=1,
              random_seed=1)
    mlp.fit(X, y)
    assert round(mlp.cost_[0], 2) == 0.55, mlp.cost_[0]

    if round(mlp.cost_[-1], 2) == 0.25:
        warnings.warn('About 10% of the time, mlp.cost_[-1] is'
                      ' 0.247213137424 when tested via Travis CI.'
                      ' Likely, it is an architecture-related problem but'
                      ' should be looked into in future.')
    else:
        assert round(mlp.cost_[-1], 2) == 0.01, mlp.cost_[-1]
        assert (y == mlp.predict(X)).all()