def test_mean_squared_error_forward_zero_loss():
    data = Graph(init([0, 0, 0, 1]))
    label = Graph(init([0, 0, 0, 1]))

    mse = mean_squared_error(data, label)

    assert float(mse.data) == 0
def test_softmax_cross_entropy_forward():
    data, labels = get_data()

    softmax_loss = softmax_cross_entropy(Graph(data), Graph(labels))

    y = np.exp(data)
    expected_loss = 0
    for i in range(len(y)):
        expected_loss -= math.log(y[i, labels[i]] / y[i].sum())
    expected_loss /= len(y)

    assert math.isclose(float(softmax_loss.data), expected_loss, rel_tol=1e-4, abs_tol=1e-5)
def test_softmax_cross_entropy_backward():
    data, labels = get_data()
    gradient = init([2])

    loss_function = SoftmaxCrossEntropy()
    loss_function(Graph(data), Graph(labels))
    computed_gradient_data, computed_gradient_label = loss_function.backward(gradient)
    assert computed_gradient_label is None

    f = lambda: loss_function.internal_forward((data, labels))
    numerical_gradient_data, _ = gradient_checker.compute_numerical_gradient(f, (data, labels), (gradient,), eps=1e-2)

    gradient_checker.assert_allclose(computed_gradient_data, numerical_gradient_data, atol=1e-4)
def test_mean_squared_error_backward_with_label():
    data, data_2 = fixed_case(with_label=True)
    gradients = init([2])

    data_1_graph = Graph(data)
    data_2_graph = Graph(data_2)

    mse_function = MeanSquaredError()
    mse_function(data_1_graph, data_2_graph)
    computed_gradient_1, computed_gradient_2 = mse_function.backward(gradients)
    assert computed_gradient_2 is None

    f = lambda: mse_function.internal_forward((data, data_2))
    numerical_gradient_1, _ = gradient_checker.compute_numerical_gradient(
        f, (data, data_2), (gradients, ))

    gradient_checker.assert_allclose(computed_gradient_1, numerical_gradient_1)
Пример #5
0
def test_accuracy_backward():
    data, labels = [Graph(x) for x in get_base_data()]

    accuracy = F.accuracy(data, labels)
    accuracy.backward(None)

    assert data.grad is None
    assert labels.grad is None
Пример #6
0
def test_sigmoid_forward():
    data = init([[-0.22342056, 0.6927312], [0.4227562, -0.59764487],
                 [0.7870561, 0.372502]])

    sigmoid_output = sigmoid(Graph(data))
    desired = init([[0.44437608, 0.66657424], [0.6041426, 0.3548827],
                    [0.6871989, 0.5920634]])

    np.testing.assert_allclose(sigmoid_output.data, desired)
Пример #7
0
def test_fully_connected_forward():
    data, weights, bias, expected = fixed_case()

    layer = FullyConnected(4, 2)
    layer.weights = weights
    layer.bias = bias

    layer_output = layer(Graph(data))
    gradient_checker.assert_allclose(layer_output.data, expected)
Пример #8
0
def test_softmax_forward():
    data = np.random.uniform(-1, 1, (3, 10)).astype(constants.DTYPE)

    output = softmax(Graph(data)).data

    expected_output = np.exp(data)
    for i in range(len(output)):
        expected_output[i] /= expected_output[i].sum()

    gradient_checker.assert_allclose(output, expected_output)
Пример #9
0
def test_graph_backward_only_functions_in_graph():
    data = np.array([2], dtype=constants.DTYPE)

    data_graph_1 = Graph(data)
    data_graph_2 = Graph(data)
    h = F.add(data_graph_1, Graph(data))
    h = F.add(h, data_graph_2)

    assert int(h.data) == 6

    h.backward(None)

    assert data_graph_1.grad == 1
    assert data_graph_2.grad == 1
    assert h.grad == 1

    h.grad = 2

    h.backward(None)
    assert data_graph_1.grad == 2
    assert data_graph_2.grad == 2
Пример #10
0
def test_graph_backward_with_layers():
    # use a fully connected layer and have a look whether the backward pass distributes the gradients correctly
    data = np.random.uniform(-1, 1, (2, 2)).astype(constants.DTYPE)
    labels = np.array([1, 1], dtype=np.int32)

    fc_layer = FullyConnected(2, 2)
    fc_layer.weights[...] = np.zeros_like(fc_layer.weights)
    fc_layer.bias[...] = np.array([-10, 10])

    def run_forward(inputs, labels):
        fc_result = fc_layer(inputs)
        loss = F.softmax_cross_entropy(fc_result, labels)
        return loss

    data_graph = Graph(data)
    label_graph = Graph(labels)

    loss = run_forward(data_graph, label_graph)

    optimizer = SGD(0.001)
    loss.backward(optimizer)

    assert label_graph.grad is None
    gradient_checker.assert_allclose(data_graph.grad, np.zeros_like(data_graph.grad))
    gradient_checker.assert_allclose(fc_layer.weights, np.zeros_like(fc_layer.weights))
    gradient_checker.assert_allclose(fc_layer.bias, np.array([-10, 10]))

    # change the labels and make sure that the gradients are different now
    # the absolute values of the gradients of one sample shall be higher than the gradients of the other sample
    labels = np.array([0, 1], dtype=np.int32)
    label_graph = Graph(labels)

    loss = run_forward(data_graph, label_graph)

    loss.backward(optimizer)

    assert label_graph.grad is None
    assert (np.abs(data_graph.grad[0]) > np.abs(data_graph.grad[1])).all()
Пример #11
0
def test_relu_backward():
    data = np.random.uniform(-1, 1, (5, 4)).astype(constants.DTYPE)
    gradient = np.random.random(data.shape).astype(dtype=constants.DTYPE)

    data_graph = Graph(data)
    relu_function = Relu()
    relu_function(data_graph)
    computed_gradients, = relu_function.backward(gradient)

    f = lambda: relu_function.internal_forward((data, ))
    numerical_gradients, = gradient_checker.compute_numerical_gradient(
        f, (data, ), (gradient, ))

    gradient_checker.assert_allclose(computed_gradients, numerical_gradients)
Пример #12
0
def test_relu_forward():
    data = init([[-0.620304, -0.1285682, 0.4867715, 0.09824127],
                 [-0.37919873, -0.9272095, -0.0704312, 0.35593647],
                 [0.19380952, 0.06425636, 0.21729442, -0.3168534],
                 [-0.62586236, -0.4846, 0.84347826, 0.22025743],
                 [0.02966821, -0.2127131, -0.33760294, -0.9477733]])

    desired = init([[0., 0., 0.4867715, 0.09824127], [0., 0., 0., 0.35593647],
                    [0.19380952, 0.06425636, 0.21729442, 0.],
                    [0., 0., 0.84347826, 0.22025743], [0.02966821, 0., 0.,
                                                       0.]])

    relu_output = relu(Graph(data))
    np.testing.assert_allclose(relu_output.data, desired)
Пример #13
0
def test_softmax_backward():
    data = np.random.uniform(-1, 1, (3, 10)).astype(constants.DTYPE)
    gradient = np.random.uniform(-1, 1, (3, 10)).astype(constants.DTYPE)

    data_graph = Graph(data)
    softmax_function = Softmax()
    softmax_function(data_graph)
    computed_gradients, = softmax_function.backward(gradient)

    f = lambda: softmax_function.internal_forward((data, ))
    numerical_gradients, = gradient_checker.compute_numerical_gradient(
        f, (data, ), (gradient, ), eps=1e-2)

    gradient_checker.assert_allclose(computed_gradients, numerical_gradients)
Пример #14
0
def test_dropout_backward():
    data = get_data()
    gradient = np.random.random(data.shape).astype(constants.DTYPE)

    data_graph = Graph(data)
    dropout_function = Dropout(0.5)
    dropout_result = dropout_function(data_graph)
    computed_gradients, = dropout_function.backward(gradient)

    f = lambda: _dropout(data, dropout_result.creator)
    numerical_gradients, = gradient_checker.compute_numerical_gradient(
        f, (data, ), gradient, eps=0.1)

    gradient_checker.assert_allclose(computed_gradients, numerical_gradients)
Пример #15
0
def test_dropout_forward_ratio_0_5():
    data = get_data()

    # set the seed to get reliable results
    np.random.seed(2)

    result = dropout(Graph(data), dropout_ratio=0.5)
    # check that ca. 50% of all data points are zero now
    non_zero_elements = result.data.nonzero()
    assert math.isclose(data.size // 2,
                        non_zero_elements[0].size,
                        abs_tol=data.size * 0.1)

    # reset the seed in case this might matter
    np.random.seed()
Пример #16
0
    def forward(self, graphs):
        requirement = "The input to forward must be a list/tuple which only includes Graph objects."
        assert isinstance(graphs, (list, tuple)), requirement
        assert all(isinstance(graph, Graph) for graph in graphs), requirement

        self.inputs = tuple(graph.data for graph in graphs)
        self.outputs = self.internal_forward(self.inputs)

        output_graphs = [
            Graph(output, predecessors=graphs, creator=self)
            for output in self.outputs
        ]
        if len(output_graphs) == 1:
            return output_graphs[0]
        return output_graphs
Пример #17
0
def test_sum_backward():
    data = np.random.uniform(-1, 1, (3, 2)).astype(constants.DTYPE)
    gradient = init([2])

    data_graph = Graph(data)
    sum_function = Sum()
    sum_function(data_graph)
    computed_gradients, = sum_function.backward((gradient, ))

    f = lambda: sum_function.internal_forward((data, ))
    numerical_gradients, = gradient_checker.compute_numerical_gradient(
        f, (data, ), (gradient, ))

    gradient_checker.assert_allclose(computed_gradients,
                                     numerical_gradients,
                                     atol=1e-4,
                                     rtol=1e-3)
Пример #18
0
def test_dropout_forward_ratio_1():
    data = get_data()

    with pytest.raises(ValueError):
        dropout(Graph(data), dropout_ratio=1.)
Пример #19
0
def test_dropout_forward_ratio_0():
    data = get_data()

    result = dropout(Graph(data), dropout_ratio=0.)
    gradient_checker.assert_allclose(result.data, data)
def test_mean_squared_error_forward_loss():
    data, data_2 = fixed_case()

    mse = mean_squared_error(Graph(data), Graph(data_2))
    assert math.isclose(float(mse.data), 0.583, abs_tol=1e-3)
Пример #21
0
def test_add_forward():
    data = init([2])

    result = add(Graph(data), Graph(data))
    assert result.data == 4
Пример #22
0
def test_sum_forward():
    data = np.random.uniform(-1, 1, (3, 2)).astype(constants.DTYPE)

    sum_output = sum(Graph(data))
    np.testing.assert_allclose(sum_output.data, data.sum())
Пример #23
0
def test_correct_input():
    # this should only raise a not implemented error as internal_forward is not implemented
    with pytest.raises(NotImplementedError):
        f = Function()
        f.forward([Graph(15)])
Пример #24
0
def calc_accuracy(data, labels):
    accuracy = F.accuracy(Graph(data), Graph(labels))
    return accuracy
Пример #25
0
def test_graph_backward_no_layers():
    data = np.array([2], dtype=constants.DTYPE)
    data_graph = Graph(data)
    data_graph.backward(None)
    assert data_graph.grad is None
Пример #26
0
 def __init__(self, data, labels):
     self.data = Graph(data, name="input[data]")
     self.labels = Graph(labels, name="input[labels]")
def test_mean_squared_error_forward_int_input():
    data, labels = fixed_case(with_label=True)

    mse = mean_squared_error(Graph(data), Graph(labels))
    assert math.isclose(float(mse.data), 0.583, abs_tol=1e-3)