Beispiel #1
0
def test_dropout_transfer_probability_zero():
    length = random.randint(1, 20)

    # Can't actually be zero, but can be close enough
    dropout_transfer = mlp.DropoutTransfer(mlp.LinearTransfer(), 1e-16, length)

    # Should not allow zero active, defaults to 1
    assert list(dropout_transfer._active_neurons).count(1.0) == 1
    assert list(dropout_transfer._active_neurons).count(0.0) == length - 1
Beispiel #2
0
def test_dropout_transfer_probability_one():
    length = random.randint(1, 20)

    dropout_transfer = mlp.DropoutTransfer(mlp.LinearTransfer(), 1.0, length)
    assert (dropout_transfer._active_neurons == numpy.array(
        [1.0] * length)).all(), 'All should be active'

    # Random input
    input_vec = numpy.random.random(length)
    assert (dropout_transfer(input_vec) == input_vec).all()
Beispiel #3
0
def test_mlp_perceptron():
    # Given known inputs and weights, test expected outputs
    model = mlp.MLP((2, 1), transfers=mlp.LinearTransfer())
    model._weight_matrices[0][0][0] = 0.0
    model._weight_matrices[0][1][0] = 0.5
    model._weight_matrices[0][2][0] = -0.5
    assert (model.activate([1, 1]) == [0.0]).all()

    model._weight_matrices[0][1][0] = 1.0
    model._weight_matrices[0][2][0] = 2.0
    assert (model.activate([1, 1]) == [3.0]).all()
Beispiel #4
0
def test_mlp_jacobian_lin_out_mse():
    _check_jacobian(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=mlp.LinearTransfer(), error_func=MSE()))
Beispiel #5
0
def test_mlp_obj_and_obj_jac_match_lin_out_mse():
    _check_obj_and_obj_jac_match(lambda s1, s2, s3: mlp.MLP(
        (s1, s2, s3), transfers=mlp.LinearTransfer(), error_func=MSE()))