Exemplo n.º 1
0
def test_mlp_classifier_convergence():
    # Run until convergence
    # assert that network can converge
    model = mlp.MLP(
        (2, 3, 2), transfers=SoftmaxTransfer(), error_func=CrossEntropyError())
    dataset = datasets.get_and()

    model.train(*dataset, retries=5, error_break=0.002)
    assert validation.get_error(model, *dataset) <= 0.02
Exemplo n.º 2
0
def test_mlp_classifier():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = mlp.MLP(
        (2, 2, 2), transfers=SoftmaxTransfer(), error_func=CrossEntropyError())
    dataset = datasets.get_xor()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=20)
    assert validation.get_error(model, *dataset) < error
Exemplo n.º 3
0
def test_dropout_mlp_classifier_convergence():
    # Run until convergence
    # assert that network can converge
    # Since XOR does not really need dropout, we use high probabilities
    model = mlp.DropoutMLP(
        (2, 8, 2),
        transfers=SoftmaxTransfer(),
        error_func=CrossEntropyError(),
        input_active_probability=1.0,
        hidden_active_probability=0.9)
    dataset = datasets.get_and()

    # Error break lower than cutoff, since dropout may have different error
    # after training
    model.train(*dataset, retries=5, error_break=0.002, error_improve_iters=50)

    # Dropout sacrifices training accuracy for better generalization
    # so we don't worry as much about convergence
    assert validation.get_error(model, *dataset) <= 0.1
Exemplo n.º 4
0
from learning import optimize  # To customize the training of our MLP

# Grab the popular iris dataset, from our library of datasets
dataset = datasets.get_iris()

# Make a multilayer perceptron to classify the iris dataset
model = MLP(
    # The MLP will take 4 attributes, have 1 hidden layer with 2 neurons,
    # and outputs one of 3 classes
    (4, 2, 3),

    # We will use a softmax output layer for this classification problem
    # Because we are only changing the output transfer, we pass a single
    # Transfer object. We could customize all transfer layers by passing
    # a list of Transfer objects.
    transfers=SoftmaxTransfer(),

    # Cross entropy error will pair nicely with our softmax output.
    error_func=CrossEntropyError(),

    # Lets use the quasi-newton BFGS optimizer for this problem
    # BFGS requires and n^2 operation, where n is the number of weights,
    # but this isn't a problem for our relatively small MLP.
    # If we don't want to deal with optimizers, the default
    # option will select an appropriate optimizer for us.
    optimizer=optimize.BFGS(
        # We can even customize the line search method
        step_size_getter=optimize.WolfeLineSearch(
            # And the initial step size for our line search
            initial_step_getter=optimize.FOChangeInitialStep())))
Exemplo n.º 5
0
def test_mlp_jacobian_softmax_out_ce():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=SoftmaxTransfer(), error_func=CrossEntropyError()))
Exemplo n.º 6
0
def test_mlp_jacobian_softmax_out_mse():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=SoftmaxTransfer(), error_func=MeanSquaredError()))
Exemplo n.º 7
0
def test_mlp_obj_and_obj_jac_match_softmax_out_ce():
    _check_obj_and_obj_jac_match(
        lambda s1, s2, s3: mlp.MLP(
            (s1, s2, s3), transfers=SoftmaxTransfer(), error_func=CrossEntropyError()),
        classification=True
    )
Exemplo n.º 8
0
def test_mlp_obj_and_obj_jac_match_softmax_out_mse():
    _check_obj_and_obj_jac_match(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=SoftmaxTransfer(), error_func=MeanSquaredError()))