Esempio n. 1
0
def test_mlp_classifier_convergence():
    # Run until convergence
    # assert that network can converge
    model = mlp.MLP(
        (2, 3, 2), transfers=SoftmaxTransfer(), error_func=CrossEntropyError())
    dataset = datasets.get_and()

    model.train(*dataset, retries=5, error_break=0.002)
    assert validation.get_error(model, *dataset) <= 0.02
Esempio n. 2
0
def test_dropout_mlp():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = mlp.DropoutMLP((2, 8, 2))
    dataset = datasets.get_and()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=20)
    assert validation.get_error(model, *dataset) < error
Esempio n. 3
0
def test_dropout_mlp_classifier():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = mlp.DropoutMLP(
        (2, 8, 2), transfers=SoftmaxTransfer(), error_func=CrossEntropyError())
    dataset = datasets.get_and()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=20)
    assert validation.get_error(model, *dataset) < error
Esempio n. 4
0
def test_LinearRegressionModel_convergence():
    # Run until convergence
    # assert that model can converge
    model = LinearRegressionModel(2, 2)
    # NOTE: We use and instead of xor, because xor is non-linear
    dataset = datasets.get_and()

    model.train(*dataset)
    # NOTE: This linear model cannot achieve 0 MSE
    assert validation.get_error(model, *dataset) <= 0.1
Esempio n. 5
0
def test_LinearRegressionModel():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = LinearRegressionModel(2, 2)
    # NOTE: We use and instead of xor, because xor is non-linear
    dataset = datasets.get_and()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=10)
    assert validation.get_error(model, *dataset) < error
Esempio n. 6
0
def test_dropout_mlp_convergence():
    # Run until convergence
    # assert that network can converge
    # Since XOR does not really need dropout, we use high probabilities
    model = mlp.DropoutMLP(
        (2, 8, 2), input_active_probability=1.0, hidden_active_probability=0.9)
    dataset = datasets.get_and()  # Easier and dataset for lienar output

    # Error break lower than cutoff, since dropout may have different error
    # after training
    model.train(*dataset, retries=5, error_break=0.002, error_improve_iters=50)

    # Dropout sacrifices training accuracy for better generalization
    # so we don't worry as much about convergence
    assert validation.get_error(model, *dataset) <= 0.1