Exemple #1
0
    def test_backward(self, b, w, h, c):

        # TODO: test backward correctly

        input = np.random.uniform(low=-10, high=10., size=(b, w, h, c))
        tf_input = tf.Variable(input)

        # init keras model
        inp = Input(batch_shape=(b, w, h, c))
        x = Activation(activation='relu')(inp)
        y = Activation(activation='tanh')(x)
        Concat = Concatenate(axis=-1)([x, y])  # concatenate of x and y
        model = Model(inputs=[inp], outputs=Concat)
        model.compile(optimizer='sgd', loss='mse')

        # init NumPyNet model
        net = Network(batch=b, input_shape=(w, h, c))
        net.add(Activation_layer(activation='relu'))  # layer 1
        net.add(Activation_layer(activation='tanh'))  # layer 2
        net.add(Route_layer(input_layers=(1, 2), by_channels=True))
        net.add(
            Cost_layer(cost_type='mse',
                       scale=1.,
                       ratio=0.,
                       noobject_scale=1.,
                       threshold=0.,
                       smoothing=0.))
        net.compile(optimizer=SGD())

        net._fitted = True

        # FORWARDS

        fwd_out_numpynet = net.predict(X=input)

        with tf.GradientTape() as tape:
            preds = model(tf_input)
            grads = tape.gradient(preds, tf_input)

            fwd_out_keras = preds.numpy()
            delta_keras = grads.numpy()

        np.testing.assert_allclose(fwd_out_keras,
                                   fwd_out_numpynet,
                                   rtol=1e-5,
                                   atol=1e-8)

        net._fitted = False

        # BACKWARD

        net._net[3].delta = np.ones(shape=fwd_out_numpynet.shape, dtype=float)
        net._backward(X=input)

        delta_numpynet = net._net[0].delta

        assert delta_numpynet.shape == delta_keras.shape
Exemple #2
0
def test_route_layer():

  np.random.seed(123)

  batch, w, h, c = (1, 5, 5, 3)
  input = np.random.uniform(low=-10, high=10. ,size=(batch, w, h, c)) # from -10 to 10 to see both the effect of Relu and TanH activation

  # init keras model
  inp    = Input(shape=(w, h, c), batch_shape=(batch, w, h, c))
  x      = Activation(activation='relu')(inp)
  y      = Activation(activation='tanh')(x)
  Concat = Concatenate( axis=-1)([x, y]) # concatenate of x and y
  model  = Model(inputs=[inp], outputs=Concat)

  # init NumPyNet model
  net = Network(batch=batch, input_shape=(w, h, c))

  net.add(Activation_layer(activation='relu')) # layer 1
  net.add(Activation_layer(activation='tanh')) # layer 2
  net.add(Route_layer(input_layers=(1,2), by_channels=True))

  net._fitted = True # False control

  # FORWARDS

  fwd_out_numpynet = net.predict(X=input)
  fwd_out_keras    = model.predict(x=input, batch_size=batch)

  assert np.allclose(fwd_out_keras, fwd_out_numpynet) # ok

  net.fitted = False # the correct state of the network

  # BACKWARD

  # try some derivatives
  gradient    = K.gradients(model.output, model.inputs)
  func        = K.function(model.inputs + model.outputs ,gradient)
  delta_keras = func([input])[0]

  net._net[3].delta = np.ones(shape=fwd_out_numpynet.shape)
  net._backward(X=input)

  delta_numpynet = net._net[0].delta