Esempio n. 1
0
    def test_forward(self, batch, w, h, c, idx_act):

        nn_act = nn_activations[idx_act]

        # negative value for Relu testing
        inpt = np.random.uniform(low=-1., high=1.,
                                 size=(batch, w, h, c)).astype(float)

        # numpynet model init
        numpynet = Activation_layer(input_shape=inpt.shape, activation=nn_act)

        # tensorflow model
        if isinstance(nn_act, Leaky):
            model = tf.keras.LeakyReLU()
        else:
            model = tf.keras.layers.Activation(
                activation=tf_activations[idx_act])

        # FORWARD

        # Keras Forward
        forward_out_keras = model(inpt).numpy()

        # numpynet forwrd
        numpynet.forward(inpt=inpt)
        forward_out_numpynet = numpynet.output

        # Forward check (Shape and Values)
        assert forward_out_keras.shape == forward_out_numpynet.shape
        np.testing.assert_allclose(forward_out_keras,
                                   forward_out_numpynet,
                                   atol=1e-4,
                                   rtol=1e-5)
Esempio n. 2
0
    def test_backward(self, b, w, h, c):

        # TODO: test backward correctly

        input = np.random.uniform(low=-10, high=10., size=(b, w, h, c))
        tf_input = tf.Variable(input)

        # init keras model
        inp = Input(batch_shape=(b, w, h, c))
        x = Activation(activation='relu')(inp)
        y = Activation(activation='tanh')(x)
        Concat = Concatenate(axis=-1)([x, y])  # concatenate of x and y
        model = Model(inputs=[inp], outputs=Concat)
        model.compile(optimizer='sgd', loss='mse')

        # init NumPyNet model
        net = Network(batch=b, input_shape=(w, h, c))
        net.add(Activation_layer(activation='relu'))  # layer 1
        net.add(Activation_layer(activation='tanh'))  # layer 2
        net.add(Route_layer(input_layers=(1, 2), by_channels=True))
        net.add(
            Cost_layer(cost_type='mse',
                       scale=1.,
                       ratio=0.,
                       noobject_scale=1.,
                       threshold=0.,
                       smoothing=0.))
        net.compile(optimizer=SGD())

        net._fitted = True

        # FORWARDS

        fwd_out_numpynet = net.predict(X=input)

        with tf.GradientTape() as tape:
            preds = model(tf_input)
            grads = tape.gradient(preds, tf_input)

            fwd_out_keras = preds.numpy()
            delta_keras = grads.numpy()

        np.testing.assert_allclose(fwd_out_keras,
                                   fwd_out_numpynet,
                                   rtol=1e-5,
                                   atol=1e-8)

        net._fitted = False

        # BACKWARD

        net._net[3].delta = np.ones(shape=fwd_out_numpynet.shape, dtype=float)
        net._backward(X=input)

        delta_numpynet = net._net[0].delta

        assert delta_numpynet.shape == delta_keras.shape
Esempio n. 3
0
    def test_printer(self, b, w, h, c):

        net = Network(batch=b, input_shape=(w, h, c))
        net.add(Activation_layer(activation='relu'))  # layer 1
        net.add(Activation_layer(activation='tanh'))  # layer 2
        net.add(Route_layer(input_layers=(1, 2), by_channels=True))
        net.add(
            Cost_layer(cost_type='mse',
                       scale=1.,
                       ratio=0.,
                       noobject_scale=1.,
                       threshold=0.,
                       smoothing=0.))
        net.compile(optimizer=SGD())

        net.summary()
Esempio n. 4
0
    def test_constructor(self, act_fun):

        layer = Activation_layer(activation=act_fun)

        assert layer.output is None
        assert layer.delta is None

        assert layer.activation is not Activations.activate
        assert layer.gradient is not Activations.gradient
Esempio n. 5
0
def test_route_layer():

  np.random.seed(123)

  batch, w, h, c = (1, 5, 5, 3)
  input = np.random.uniform(low=-10, high=10. ,size=(batch, w, h, c)) # from -10 to 10 to see both the effect of Relu and TanH activation

  # init keras model
  inp    = Input(shape=(w, h, c), batch_shape=(batch, w, h, c))
  x      = Activation(activation='relu')(inp)
  y      = Activation(activation='tanh')(x)
  Concat = Concatenate( axis=-1)([x, y]) # concatenate of x and y
  model  = Model(inputs=[inp], outputs=Concat)

  # init NumPyNet model
  net = Network(batch=batch, input_shape=(w, h, c))

  net.add(Activation_layer(activation='relu')) # layer 1
  net.add(Activation_layer(activation='tanh')) # layer 2
  net.add(Route_layer(input_layers=(1,2), by_channels=True))

  net._fitted = True # False control

  # FORWARDS

  fwd_out_numpynet = net.predict(X=input)
  fwd_out_keras    = model.predict(x=input, batch_size=batch)

  assert np.allclose(fwd_out_keras, fwd_out_numpynet) # ok

  net.fitted = False # the correct state of the network

  # BACKWARD

  # try some derivatives
  gradient    = K.gradients(model.output, model.inputs)
  func        = K.function(model.inputs + model.outputs ,gradient)
  delta_keras = func([input])[0]

  net._net[3].delta = np.ones(shape=fwd_out_numpynet.shape)
  net._backward(X=input)

  delta_numpynet = net._net[0].delta
Esempio n. 6
0
    def test_forward(self, b, w, h, c):

        input = np.random.uniform(low=-10, high=10.,
                                  size=(b, w, h, c)).astype(float)

        # init keras model
        inp = Input(batch_shape=(b, w, h, c))
        x = Activation(activation='relu')(inp)
        y = Activation(activation='tanh')(x)
        Concat = Concatenate(axis=-1)([x, y])  # concatenate of x and y
        model = Model(inputs=[inp], outputs=Concat)
        model.compile(optimizer='sgd', loss='mse')

        # init NumPyNet model
        net = Network(batch=b, input_shape=(w, h, c))
        net.add(Activation_layer(activation='relu'))  # layer 1
        net.add(Activation_layer(activation='tanh'))  # layer 2
        net.add(Route_layer(input_layers=(1, 2), by_channels=True))
        net.add(
            Cost_layer(cost_type='mse',
                       scale=1.,
                       ratio=0.,
                       noobject_scale=1.,
                       threshold=0.,
                       smoothing=0.))
        net.compile(optimizer=SGD())

        net.summary()

        assert net._fitted == False
        net._fitted = True  # False control

        # FORWARDS

        fwd_out_numpynet = net.predict(X=input)
        fwd_out_keras = model.predict(x=input, batch_size=b)

        np.testing.assert_allclose(fwd_out_keras,
                                   fwd_out_numpynet,
                                   rtol=1e-5,
                                   atol=1e-8)
Esempio n. 7
0
    def test_printer(self, act_fun):

        layer = Activation_layer(activation=act_fun)

        with pytest.raises(TypeError):
            print(layer)

        layer.input_shape = 1
        with pytest.raises(TypeError):
            print(layer)

        layer.input_shape = (1, 2)
        with pytest.raises(ValueError):
            print(layer)

        layer.input_shape = (1, 2, 3)
        with pytest.raises(ValueError):
            print(layer)

        layer.input_shape = (1, 2, 3, 4)
        print(layer)

        assert layer.out_shape == (1, 2, 3, 4)
Esempio n. 8
0
def test_activation_layer(batch, w, h, c):
    '''
  Tests:
    if the forward and the backward of Numpy_net are consistent with keras.
    if all the possible activation functions works with different batch_size
  to be:
  '''
    np.random.seed(123)

    keras_activ = ['relu', 'sigmoid', 'tanh', 'linear']
    numpynet_activ = [Relu, Logistic, Tanh, Linear]

    # negative value for Relu testing
    inpt = np.random.uniform(low=-1., high=1., size=(batch, w, h, c))

    for act_fun in range(0, 4):
        # numpynet model init
        numpynet = Activation_layer(activation=numpynet_activ[act_fun])

        # Keras Model init
        inp = Input(shape=inpt.shape[1:], batch_shape=(batch, w, h, c))
        x = Activation(activation=keras_activ[act_fun])(inp)
        model = Model(inputs=[inp], outputs=x)

        # FORWARD

        # Keras Forward
        forward_out_keras = model.predict(inpt)

        # numpynet forwrd
        numpynet.forward(inpt)
        forward_out_numpynet = numpynet.output

        # Forward check (Shape and Values)
        assert forward_out_keras.shape == forward_out_numpynet.shape
        assert np.allclose(forward_out_keras, forward_out_numpynet)

        # BACKWARD

        # Gradient computation (Analytical)
        grad = K.gradients(model.output, [model.input])

        # Define a function to compute the gradient numerically
        func = K.function(model.inputs + [model.output], grad)

        # Keras delta
        keras_delta = func([inpt
                            ])[0]  # It returns a list with one array inside.

        # numpynet delta init. (Multiplication with gradients)
        numpynet.delta = np.ones(shape=inpt.shape, dtype=float)

        # Global delta init.
        delta = np.empty(shape=inpt.shape, dtype=float)

        # numpynet Backward
        numpynet.backward(delta)

        # Check dimension and delta
        assert keras_delta.shape == delta.shape
        assert np.allclose(keras_delta, delta)
Esempio n. 9
0
    def test_backward(self, batch, w, h, c, idx_act):

        nn_act = nn_activations[idx_act]

        # negative value for Relu testing
        inpt = np.random.uniform(low=-1., high=1.,
                                 size=(batch, w, h, c)).astype(float)
        tf_input = tf.Variable(inpt)

        # numpynet model init
        numpynet = Activation_layer(input_shape=inpt.shape, activation=nn_act)

        # tensorflow model
        if isinstance(nn_act, Leaky):
            model = tf.keras.LeakyReLU()
        else:
            model = tf.keras.layers.Activation(
                activation=tf_activations[idx_act])

        # try to backward
        with pytest.raises(NotFittedError):
            # Global delta init.
            delta = np.empty(shape=inpt.shape, dtype=float)

            # numpynet Backward
            numpynet.backward(delta=delta)

        # FORWARD

        # Tensorflow Forward and backward
        with tf.GradientTape() as tape:
            preds = model(tf_input)
            grads = tape.gradient(preds, tf_input)

            forward_out_keras = preds.numpy()
            delta_keras = grads.numpy()

        # numpynet forward
        numpynet.forward(inpt=inpt)
        forward_out_numpynet = numpynet.output

        # Forward check (Shape and Values)
        assert forward_out_keras.shape == forward_out_numpynet.shape
        np.testing.assert_allclose(forward_out_keras,
                                   forward_out_numpynet,
                                   atol=1e-4,
                                   rtol=1e-5)

        # BACKWARD

        # numpynet delta init. (Multiplication with gradients)
        numpynet.delta = np.ones(shape=inpt.shape, dtype=float)

        # Global delta init.
        delta = np.empty(shape=inpt.shape, dtype=float)

        # numpynet Backward
        numpynet.backward(delta=delta)

        # Check dimension and delta
        assert delta_keras.shape == delta.shape
        np.testing.assert_allclose(delta_keras, delta, atol=1e-4, rtol=1e-4)