Example #1
0
def test_cross_entropy_matrix():
    assert error.CrossEntropyError()(numpy.array([[0.1, 1.], [1., 0.1]]),
                                     numpy.array([[0., 1.], [1., 0.]])) == 0
    assert error.CrossEntropyError()(numpy.array([[0.1, 1. / numpy.e],
                                                  [1. / numpy.e, 0.1]]),
                                     numpy.array([[0., 1.], [1., 0.]])) == 1
    assert error.CrossEntropyError()(numpy.array([[0.1, 1.],
                                                  [1. / numpy.e, 0.1]]),
                                     numpy.array([[0., 1.], [1., 0.]])) == 0.5
Example #2
0
def test_cross_entropy_derivative_equals():
    """Should not raise error or return nan, when both inputs match.

    Because the derivative includes a division, this could occur.
    """
    assert (list(error.CrossEntropyError().derivative(
        numpy.array([0., 1.]), numpy.array([0., 1.]))[1]) == [0., -0.5])
Example #3
0
def test_cross_entropy_zero_in_vec_a():
    """Should not raise error when zeros are in vec_a.

    Because CE takes log of first vector, it can have issues with vectors containing 0s.
    """
    error_func = error.CrossEntropyError()
    assert error_func(numpy.array([0., 0., 1.]), numpy.array([0., 0., 1.])) == 0
    assert error_func(numpy.array([1., 0., 1.]), numpy.array([0., 0., 1.])) == 0
Example #4
0
def test_Model_stochastic_train():
    """Train with stochastic gradient descent."""
    from learning import transfer, error, validation, MLP

    dataset = datasets.get_iris()

    model = MLP((len(dataset[0][0]), 2, len(dataset[1][0])),
                transfers=transfer.SoftmaxTransfer(),
                error_func=error.CrossEntropyError())

    # Model should be able to converge with mini-batches
    model.stochastic_train(
        *dataset,
        error_break=0.02,
        pattern_selection_func=lambda X, Y: base.select_sample(X, Y, size=30),
        train_kwargs={
            'iterations': 100,
            'error_break': 0.02
        })

    assert validation.get_error(model, *dataset) <= 0.03
Example #5
0
def test_cross_entropy_derivative():
    check_error_gradient(error.CrossEntropyError())
Example #6
0
def test_cross_entropy_error_on_negative():
    """CrossEntropy does not take negative values in vec_a."""
    error_func = error.CrossEntropyError()

    with pytest.raises(FloatingPointError):
        assert error_func(numpy.array([-1., 1.]), numpy.array([0., 1.]))
Example #7
0
def test_cross_entropy_vector():
    assert error.CrossEntropyError()(numpy.array([0.1, 1.]),
                                     numpy.array([0., 1.])) == 0
    assert error.CrossEntropyError()(numpy.array([0.1, 1. / numpy.e]),
                                     numpy.array([0., 1.])) == 1
Example #8
0
def test_cross_entropy_derivative_error_equals_call_error_matrix():
    check_derivative_error_equals_call_error(error.CrossEntropyError(),
                                             tensor_d=2)
Example #9
0
def test_cross_entropy_derivative_matrix():
    check_error_gradient(error.CrossEntropyError(), tensor_d=2)