Exemplo n.º 1
0
def test_mlp_convergence():
    # Run until convergence
    # assert that network can converge
    model = mlp.MLP((2, 4, 2))
    dataset = datasets.get_xor()

    model.train(*dataset, retries=5, error_break=0.002)
    assert validation.get_error(model, *dataset) <= 0.02
Exemplo n.º 2
0
def test_mlp():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = mlp.MLP((2, 2, 2))
    dataset = datasets.get_xor()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=10)
    assert validation.get_error(model, *dataset) < error
Exemplo n.º 3
0
def test_mlp_classifier_convergence():
    # Run until convergence
    # assert that network can converge
    model = mlp.MLP(
        (2, 3, 2), transfers=SoftmaxTransfer(), error_func=CrossEntropyError())
    dataset = datasets.get_and()

    model.train(*dataset, retries=5, error_break=0.002)
    assert validation.get_error(model, *dataset) <= 0.02
Exemplo n.º 4
0
def test_mlp_classifier():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = mlp.MLP(
        (2, 2, 2), transfers=SoftmaxTransfer(), error_func=CrossEntropyError())
    dataset = datasets.get_xor()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=20)
    assert validation.get_error(model, *dataset) < error
Exemplo n.º 5
0
def test_mlp_perceptron():
    # Given known inputs and weights, test expected outputs
    model = mlp.MLP((2, 1), transfers=mlp.LinearTransfer())
    model._weight_matrices[0][0][0] = 0.0
    model._weight_matrices[0][1][0] = 0.5
    model._weight_matrices[0][2][0] = -0.5
    assert (model.activate([1, 1]) == [0.0]).all()

    model._weight_matrices[0][1][0] = 1.0
    model._weight_matrices[0][2][0] = 2.0
    assert (model.activate([1, 1]) == [3.0]).all()
Exemplo n.º 6
0
def test_MLP_activate_matrix():
    model = mlp.MLP((2, 2, 2), transfers=[LinearTransfer(), LinearTransfer()])
    
    # Set weights for deterministic results
    model._bias_vec = numpy.ones(model._bias_vec.shape)
    model._weight_matrices = [numpy.ones(weight_matrix.shape) for weight_matrix in model._weight_matrices]

    # Activate
    assert helpers.approx_equal(model.activate([[0, 0], [0.5, 0.5]]), [[2, 2], [4, 4]])
    assert helpers.approx_equal(model.activate([[1, 0], [0.5, 1]]), [[4, 4], [5, 5]])
    assert helpers.approx_equal(model.activate([[1, 1], [0, 0.5]]), [[6, 6], [3, 3]])
Exemplo n.º 7
0
def test_mlp_bias():
    # Should have bias for each layer
    model = mlp.MLP((2, 4, 3))

    # +1 input for bias
    assert model._weight_matrices[0].shape == (3, 4)
    assert model._weight_matrices[1].shape == (5, 3)

    # First input should always be 1
    model.activate([0, 0])
    assert model._weight_inputs[0][0] == 1.0
    assert model._weight_inputs[1][0] == 1.0
    assert model._weight_inputs[2][0] == 1.0
Exemplo n.º 8
0
def test_MLP_reset():
    shape = (random.randint(1, 10), random.randint(1, 10), random.randint(1, 10))

    model = mlp.MLP(shape)
    model_2 = mlp.MLP(shape)

    # Resetting different with the same seed should give the same model
    prev_seed = random.randint(0, 2**32-1)

    try:
        random.seed(0)
        numpy.random.seed(0)
        model.reset()

        random.seed(0)
        numpy.random.seed(0)
        model_2.reset()

        assert model.serialize() == model_2.serialize()
    finally:
        random.seed(prev_seed)
        numpy.random.seed(prev_seed)
Exemplo n.º 9
0
def test_mlp_jacobian_softmax_out_ce():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP((s1, s2, s3),
                                               transfers=mlp.SoftmaxTransfer(),
                                               error_func=CrossEntropy()))
Exemplo n.º 10
0
def test_mlp_jacobian_lin_out_mse():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=mlp.LinearTransfer(), error_func=MSE()))
Exemplo n.º 11
0
def test_mlp_obj_and_obj_jac_match_softmax_out_ce():
    _check_obj_and_obj_jac_match(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3),
        transfers=mlp.SoftmaxTransfer(),
        error_func=CrossEntropy()),
                                 classification=True)
Exemplo n.º 12
0
def test_mlp_obj_and_obj_jac_match_softmax_out_mse():
    _check_obj_and_obj_jac_match(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=mlp.SoftmaxTransfer(), error_func=MSE()))
Exemplo n.º 13
0
def test_mlp_obj_and_obj_jac_match_lin_out_mse():
    _check_obj_and_obj_jac_match(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=mlp.LinearTransfer(), error_func=MSE()))
Exemplo n.º 14
0
def test_mlp_jacobian_softmax_out_mse():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=SoftmaxTransfer(), error_func=MeanSquaredError()))
Exemplo n.º 15
0
def test_mlp_jacobian_relu_out_ce():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=mlp.ReluTransfer(), error_func=CrossEntropyError()))