def test_to_categorical():
    # Test without n_classes
    one_hot = to_categorical(numpy.asarray([1, 2], dtype="i"))
    assert one_hot.shape == (2, 3)
    # From keras
    # https://github.com/keras-team/keras/blob/master/tests/keras/utils/np_utils_test.py
    nc = 5
    shapes = [(1, ), (3, ), (4, 3), (5, 4, 3), (3, 1), (3, 2, 1)]
    expected_shapes = [
        (1, nc),
        (3, nc),
        (4, 3, nc),
        (5, 4, 3, nc),
        (3, 1, nc),
        (3, 2, 1, nc),
    ]
    labels = [numpy.random.randint(0, nc, shape) for shape in shapes]
    one_hots = [to_categorical(label, nc) for label in labels]
    for label, one_hot, expected_shape in zip(labels, one_hots,
                                              expected_shapes):
        assert one_hot.shape == expected_shape
        assert numpy.array_equal(one_hot, one_hot.astype(bool))
        assert numpy.all(one_hot.sum(axis=-1) == 1)
        assert numpy.all(
            numpy.argmax(one_hot, -1).reshape(label.shape) == label)
Exemple #2
0
def Y(answer: int, n_classes: int) -> Array2d:
    ops: Ops = get_current_ops()
    return cast(
        Array2d,
        to_categorical(cast(IntsXd, ops.asarray([answer])),
                       n_classes=n_classes),
    )
def test_tensorflow_wrapper_serialize_model_subclass(
    X, Y, input_size, n_classes, answer
):
    import tensorflow as tf

    input_shape = (1, input_size)
    ops = get_current_ops()

    @keras_subclass(
        "foo.v1",
        X=ops.alloc2f(*input_shape),
        Y=to_categorical(ops.asarray1i([1]), n_classes=n_classes),
        input_shape=input_shape,
    )
    class CustomKerasModel(tf.keras.Model):
        def __init__(self, **kwargs):
            super(CustomKerasModel, self).__init__(**kwargs)
            self.in_dense = tf.keras.layers.Dense(
                12, name="in_dense", input_shape=input_shape
            )
            self.out_dense = tf.keras.layers.Dense(
                n_classes, name="out_dense", activation="softmax"
            )

        def call(self, inputs) -> tf.Tensor:
            x = self.in_dense(inputs)
            return self.out_dense(x)

    model = TensorFlowWrapper(CustomKerasModel())
    # Train the model to predict the right single answer
    optimizer = Adam()
    for i in range(50):
        guesses, backprop = model(X, is_train=True)
        d_guesses = (guesses - Y) / guesses.shape[0]
        backprop(d_guesses)
        model.finish_update(optimizer)
    predicted = model.predict(X).argmax()
    assert predicted == answer

    # Save then Load the model from bytes
    model.from_bytes(model.to_bytes())

    # The from_bytes model gets the same answer
    assert model.predict(X).argmax() == answer
def Y(answer, n_classes):
    ops = get_current_ops()
    return to_categorical(ops.asarray1i([answer]), n_classes=n_classes)
Exemple #5
0
def test_to_categorical(label_smoothing):
    # Test without n_classes
    one_hot = to_categorical(numpy.asarray([1, 2], dtype="i"))
    assert one_hot.shape == (2, 3)
    # From keras
    # https://github.com/keras-team/keras/blob/master/tests/keras/utils/np_utils_test.py
    nc = 5
    shapes = [(1, ), (3, ), (4, 3), (5, 4, 3), (3, 1), (3, 2, 1)]
    expected_shapes = [
        (1, nc),
        (3, nc),
        (4, 3, nc),
        (5, 4, 3, nc),
        (3, 1, nc),
        (3, 2, 1, nc),
    ]
    labels = [numpy.random.randint(0, nc, shape) for shape in shapes]
    one_hots = [to_categorical(label, nc) for label in labels]
    smooths = [
        to_categorical(label, nc, label_smoothing=label_smoothing)
        for label in labels
    ]
    for i in range(len(expected_shapes)):
        label = labels[i]
        one_hot = one_hots[i]
        expected_shape = expected_shapes[i]
        smooth = smooths[i]
        assert one_hot.shape == expected_shape
        assert smooth.shape == expected_shape
        assert numpy.array_equal(one_hot, one_hot.astype(bool))
        assert numpy.all(one_hot.sum(axis=-1) == 1)
        assert numpy.all(
            numpy.argmax(one_hot, -1).reshape(label.shape) == label)
        assert numpy.all(smooth.argmax(axis=-1) == one_hot.argmax(axis=-1))
        assert numpy.all(numpy.isclose(numpy.sum(smooth, axis=-1), 1.0))
        assert numpy.isclose(numpy.max(smooth), 1 - label_smoothing)
        assert numpy.isclose(numpy.min(smooth),
                             label_smoothing / (smooth.shape[-1] - 1))

    # At least one class is required without label smoothing.
    numpy.testing.assert_allclose(to_categorical(numpy.asarray([0, 0, 0]), 1),
                                  [[1.0], [1.0], [1.0]])
    numpy.testing.assert_allclose(to_categorical(numpy.asarray([0, 0, 0])),
                                  [[1.0], [1.0], [1.0]])
    with pytest.raises(ValueError, match=r"n_classes should be at least 1"):
        to_categorical(numpy.asarray([0, 0, 0]), 0)

    # At least two classes are required with label smoothing.
    numpy.testing.assert_allclose(
        to_categorical(numpy.asarray([0, 1, 0]), 2, label_smoothing=0.01),
        [[0.99, 0.01], [0.01, 0.99], [0.99, 0.01]],
    )
    numpy.testing.assert_allclose(
        to_categorical(numpy.asarray([0, 1, 0]), label_smoothing=0.01),
        [[0.99, 0.01], [0.01, 0.99], [0.99, 0.01]],
    )
    with pytest.raises(
            ValueError,
            match=r"n_classes should be greater than 1.*label smoothing.*but 1"
    ):
        to_categorical(numpy.asarray([0, 1, 0]), 1, label_smoothing=0.01),
    with pytest.raises(
            ValueError,
            match=r"n_classes should be greater than 1.*label smoothing.*but 1"
    ):
        to_categorical(numpy.asarray([0, 0, 0]), label_smoothing=0.01),