def initialize_step(steps, outputs, features, batch, return_seq,
                        activation, inpt_keras):
        # weights init
        kernel = np.random.uniform(low=-1, high=1, size=(features, outputs))
        recurrent_kernel = np.random.uniform(low=-1,
                                             high=1,
                                             size=(outputs, outputs))
        bias = np.random.uniform(low=-1, high=1, size=(outputs, ))

        # create keras model
        inp = Input(shape=inpt_keras.shape[1:])
        rnn = SimpleRNN(units=outputs,
                        activation=activation,
                        return_sequences=return_seq)(inp)
        model = Model(inputs=inp, outputs=rnn)

        # set weights for the keras model
        model.set_weights([kernel, recurrent_kernel, bias])

        # create NumPyNet layer
        layer = SimpleRNN_layer(outputs=outputs,
                                steps=steps,
                                input_shape=(batch, 1, 1, features),
                                activation=activation,
                                return_sequence=return_seq)

        # set NumPyNet weights
        layer.load_weights(
            np.concatenate(
                [bias.ravel(),
                 kernel.ravel(),
                 recurrent_kernel.ravel()]))

        return model, layer
    def test_printer(self, outputs, steps, b, w, h, c):

        layer = SimpleRNN_layer(outputs=outputs,
                                steps=steps,
                                activation='linear')

        with pytest.raises(AttributeError):
            print(layer)

        layer = SimpleRNN_layer(outputs=outputs,
                                steps=steps,
                                activation='linear',
                                input_shape=(b, w, h, c))

        print(layer)
    def test_constructor(self, outputs, steps, b, w, h, c):

        numpynet_activ = ['relu', 'logistic', 'tanh', 'linear']

        if outputs > 0:
            weights_choice = [
                np.random.uniform(low=-1, high=1., size=(w * h * c, outputs)),
                None
            ]
            bias_choice = [
                np.random.uniform(low=-1, high=1., size=(outputs, )), None
            ]

        else:
            with pytest.raises(ValueError):
                SimpleRNN_layer(outputs=outputs, steps=steps)

            outputs += 10
            weights_choice = [[
                np.random.uniform(low=-1, high=1., size=(w * h * c, outputs))
            ] * 3, None]
            bias_choice = [
                [np.random.uniform(low=-1, high=1., size=(outputs, ))] * 3,
                None
            ]

        weights = choice(weights_choice)
        bias = choice(bias_choice)

        for numpynet_act in numpynet_activ:
            layer = SimpleRNN_layer(outputs=outputs,
                                    steps=steps,
                                    activation=numpynet_act,
                                    input_shape=(b, w, h, c),
                                    weights=weights,
                                    bias=bias)

            assert layer.output is None
Beispiel #4
0
    def _forward(self, steps, outputs, features, batch, return_seq):

        activation = 'tanh'

        inpt = np.random.uniform(size=(batch, features))
        inpt_keras, _ = data_to_timesteps(inpt, steps=steps)

        assert inpt_keras.shape == (batch, steps, features)

        # weights init
        kernel = np.random.uniform(low=-1, high=1, size=(features, outputs))
        recurrent_kernel = np.random.uniform(low=-1,
                                             high=1,
                                             size=(outputs, outputs))
        bias = np.random.uniform(low=-1, high=1, size=(outputs, ))

        # create keras model
        inp = Input(shape=inpt_keras.shape[1:])
        rnn = SimpleRNN(units=outputs,
                        activation=activation,
                        return_sequences=return_seq)(inp)
        model = Model(inputs=inp, outputs=rnn)

        # set weights for the keras model
        model.set_weights([kernel, recurrent_kernel, bias])

        # create NumPyNet layer
        layer = SimpleRNN_layer(outputs=outputs,
                                steps=steps,
                                input_shape=(batch, 1, 1, features),
                                activation=activation,
                                return_sequence=return_seq)

        # set NumPyNet weights
        layer.load_weights(
            np.concatenate(
                [bias.ravel(),
                 kernel.ravel(),
                 recurrent_kernel.ravel()]))

        # FORWARD

        # forward for keras
        forward_out_keras = model.predict(inpt_keras)

        # forward NumPyNet
        layer.forward(inpt)
        forward_out_numpynet = layer.output.reshape(forward_out_keras.shape)

        assert np.allclose(forward_out_numpynet,
                           forward_out_keras,
                           atol=1e-4,
                           rtol=1e-3)
Beispiel #5
0
    def test_constructor(self, outputs, steps, b, w, h, c):

        numpynet_activ = ['relu', 'logistic', 'tanh', 'linear']

        if outputs > 0:
            weights_choice = [
                np.random.uniform(low=-1, high=1., size=(w * h * c, outputs)),
                None
            ]
            bias_choice = [
                np.random.uniform(low=-1, high=1., size=(outputs, )), None
            ]

        else:
            with pytest.raises(ValueError):
                layer = SimpleRNN_layer(outputs=outputs, steps=steps)

            outputs += 10
            weights_choice = [[
                np.random.uniform(low=-1, high=1., size=(w * h * c, outputs))
            ] * 3, None]
            bias_choice = [
                [np.random.uniform(low=-1, high=1., size=(outputs, ))] * 3,
                None
            ]

        weights = choice(weights_choice)
        bias = choice(bias_choice)

        for numpynet_act in numpynet_activ:

            layer = SimpleRNN_layer(outputs=outputs,
                                    steps=steps,
                                    activation=numpynet_act,
                                    input_shape=(b, w, h, c),
                                    weights=weights,
                                    bias=bias)

            if weights is not None:
                np.testing.assert_allclose(layer.input_layer.weights,
                                           weights[0],
                                           rtol=1e-5,
                                           atol=1e-8)
                np.testing.assert_allclose(layer.self_layer.weights,
                                           weights[1],
                                           rtol=1e-5,
                                           atol=1e-8)
                np.testing.assert_allclose(layer.output_layer.weights,
                                           weights[2],
                                           rtol=1e-5,
                                           atol=1e-8)

            if bias is not None:
                np.testing.assert_allclose(layer.input_layer.bias,
                                           bias[0],
                                           rtol=1e-5,
                                           atol=1e-8)
                np.testing.assert_allclose(layer.self_layer.bias,
                                           bias[1],
                                           rtol=1e-5,
                                           atol=1e-8)
                np.testing.assert_allclose(layer.output_layer.bias,
                                           bias[2],
                                           rtol=1e-5,
                                           atol=1e-8)

            assert layer.output == None
Beispiel #6
0
    def _backward(self, steps, outputs, features, batch, return_seq):

        return_seq = False  # fixed to "many_to_one" for now
        activation = 'tanh'

        inpt = np.random.uniform(size=(batch, features))
        inpt_keras, _ = data_to_timesteps(inpt, steps=steps)

        assert inpt_keras.shape == (batch, steps, features)

        # weights init
        kernel = np.random.uniform(low=-1, high=1, size=(features, outputs))
        recurrent_kernel = np.random.uniform(low=-1,
                                             high=1,
                                             size=(outputs, outputs))
        bias = np.random.uniform(low=-1, high=1, size=(outputs, ))

        # create keras model
        inp = Input(shape=inpt_keras.shape[1:])
        rnn = SimpleRNN(units=outputs,
                        activation=activation,
                        return_sequences=return_seq)(inp)
        model = Model(inputs=inp, outputs=rnn)

        # set weights for the keras model
        model.set_weights([kernel, recurrent_kernel, bias])

        # create NumPyNet layer
        layer = SimpleRNN_layer(outputs=outputs,
                                steps=steps,
                                input_shape=(batch, 1, 1, features),
                                activation=activation,
                                return_sequence=return_seq)

        # set NumPyNet weights
        layer.load_weights(
            np.concatenate(
                [bias.ravel(),
                 kernel.ravel(),
                 recurrent_kernel.ravel()]))

        np.testing.assert_allclose(layer.weights,
                                   model.get_weights()[0],
                                   rtol=1e-5,
                                   atol=1e-8)
        np.testing.assert_allclose(layer.recurrent_weights,
                                   model.get_weights()[1],
                                   rtol=1e-5,
                                   atol=1e-8)
        np.testing.assert_allclose(layer.bias,
                                   model.get_weights()[2],
                                   rtol=1e-5,
                                   atol=1e-8)

        # FORWARD

        # forward for keras
        forward_out_keras = model.predict(inpt_keras)

        # forward NumPyNet
        layer.forward(inpt)
        forward_out_numpynet = layer.output.reshape(forward_out_keras.shape)

        np.testing.assert_allclose(forward_out_numpynet,
                                   forward_out_keras,
                                   atol=1e-4,
                                   rtol=1e-3)

        # BACKWARD

        # Compute the gradient of output w.r.t input
        gradient1 = K.gradients(model.output, [model.input])
        gradient2 = K.gradients(model.output, model.trainable_weights)

        # Define a function to evaluate the gradient
        func1 = K.function(model.inputs + [model.output], gradient1)
        func2 = K.function(
            model.inputs + model.trainable_weights + model.outputs, gradient2)

        # Compute delta for Keras
        delta_keras = func1([inpt_keras])[0]
        updates = func2([inpt_keras])

        weights_update_keras = updates[0]
        recurrent_weights_update_keras = updates[1]
        bias_update_keras = updates[2]

        # backward pass for NumPyNet
        delta = np.zeros(shape=inpt_keras.shape, dtype=float)
        layer.delta = np.ones(shape=layer.output.shape, dtype=float)
        layer.backward(inpt, delta, copy=True)

        np.testing.assert_allclose(layer.bias_update,
                                   bias_update_keras,
                                   atol=1e-8,
                                   rtol=1e-5)
        np.testing.assert_allclose(layer.weights_update,
                                   weights_update_keras,
                                   atol=1e-8,
                                   rtol=1e-5)
        np.testing.assert_allclose(delta, delta_keras, atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(layer.recurrent_weights_update,
                                   recurrent_weights_update_keras,
                                   atol=1e-8,
                                   rtol=1e-5)