Ejemplo n.º 1
0
    def test_printer(self, b, w, h, c):

        layer = Input_layer(input_shape=(b, w, h, c))

        print(layer)

        layer.input_shape = (3.14, w, h, c)

        with pytest.raises(ValueError):
            print(layer)
Ejemplo n.º 2
0
    def __init__(self, batch, input_shape=None, train=None):
        '''
    '''
        self.batch = batch
        self.train = train

        if input_shape is not None:

            try:

                self.w, self.h, self.c = input_shape

            except:
                raise ValueError(
                    'Network model : incorrect input_shape. Expected a 3D array (width, height, channel). Given {}'
                    .format(input_shape))

            self._net = [
                Input_layer(input_shape=(self.batch, self.w, self.h, self.c))
            ]

        else:
            self._net = []

        self.metrics = None
        self._fitted = False
Ejemplo n.º 3
0
    def test_constructor(self, b, w, h, c):

        input_shape = choice([(b, w, h, c), (b, w, h), b, None])

        if input_shape != (b, w, h, c):
            with pytest.raises(ValueError):
                layer = Input_layer(input_shape=input_shape)

        else:
            layer = Input_layer(input_shape=input_shape)

            layer.input_shape == (b, w, h, c)

            assert layer.output == None
            assert layer.delta == None
            assert layer.out_shape == (b, w, h, c)
Ejemplo n.º 4
0
    def test_backward(self, b, w, h, c):

        inpt = np.random.uniform(low=-1, high=1.,
                                 size=(b, w, h, c)).astype(float)
        tf_input = tf.Variable(inpt)

        # numpynet model init
        layer = Input_layer(input_shape=inpt.shape)

        # Keras Model init
        model = tf.keras.layers.InputLayer(input_shape=(w, h, c))

        # FORWARD

        # Tensorflow Forward and backward
        with tf.GradientTape() as tape:
            preds = model(tf_input)
            grads = tape.gradient(preds, tf_input)

            forward_out_keras = preds.numpy()
            delta_keras = grads.numpy()

        # layer forward
        layer.forward(inpt=inpt)
        forward_out_numpynet = layer.output

        # Forward check (Shape and Values)
        assert forward_out_keras.shape == forward_out_numpynet.shape
        np.testing.assert_allclose(forward_out_keras, forward_out_numpynet)

        # BACKWARD

        # layer delta init.
        layer.delta = np.ones(shape=inpt.shape, dtype=float)

        # Global delta init.
        delta = np.empty(shape=inpt.shape, dtype=float)

        # layer Backward
        layer.backward(delta=delta)

        # Check dimension and delta
        assert delta_keras.shape == delta.shape
        np.testing.assert_allclose(delta_keras, delta)

        delta = np.zeros(shape=(1, 2, 3, 4), dtype=float)

        with pytest.raises(ValueError):
            layer.backward(delta)
Ejemplo n.º 5
0
  def load(self, cfg_filename, weights=None):
    '''
    Load network model from config file in INI fmt
    '''

    model = net_config(cfg_filename)

    self.batch = model.get('net1', 'batch', 1)
    self.w = model.get('net1', 'width', 416)
    self.h = model.get('net1', 'height', 416)
    self.c = model.get('net1', 'channels', 3)
    # TODO: add other network parameters

    input_shape = (self.batch, self.w, self.h, self.c)
    self._net = [ Input_layer(input_shape=input_shape) ]

    print('layer     filters    size              input                output')

    for i, layer in enumerate(model):
      layer_t = re.split(r'\d+', layer)[0]
      params = dict(model.get_params(layer))

      layer_params = {}
      for k, v in params.items():
        try:
          val = eval(v)
        except NameError:
          val = v
        except:
          raise DataVariableError('Type variable not recognized! Possible variables are only [int, float, string, vector<float>].')

        layer_params[k] = val

      if layer_t == 'shortcut':
        _from = model.get(layer, 'from', 0)
        self._net.append( self.LAYERS[layer_t](input_shape=input_shape, **layer_params)([self._net[-1], self._net[_from]]) )

      elif layer_t == 'route':
        _layers = model.get(layer, 'layers', [])
        self._net.append( self.LAYERS[layer_t](input_shape=input_shape, **layer_params)(self._net[_layers]) )

      else:
        self._net.append( self.LAYERS[layer_t](input_shape=input_shape, **layer_params)(self._net[-1]) )

      print('{:>4d} {}'.format(i, self._net[-1]), flush=True, end='\n')

      #if model.get(layer, 'batch_normalize', 0): # wrong because it add a new layer and so the shortcut is broken
      #  self._net.append( BatchNorm_layer()(self._net[-1]) )
      #  print('{:>4d} {}'.format(i, self._net[-1]), flush=True, end='\n')

    return self


    if weights is not None:
      self.load_weights(weights)
Ejemplo n.º 6
0
    def load(self, cfg_filename, weights=None):
        '''
    Load network model from config file in INI fmt
    '''

        model = net_config(cfg_filename)

        self.batch = model.get('net0', 'batch', 1)
        self.w = model.get('net0', 'width', 416)
        self.h = model.get('net0', 'height', 416)
        self.c = model.get('net0', 'channels', 3)
        # TODO: add other network parameters

        input_shape = (self.batch, self.w, self.h, self.c)
        self._net = [Input_layer(input_shape=input_shape)]

        print(
            'layer     filters    size              input                output'
        )

        for i, layer in enumerate(model):
            layer_t = re.split(r'\d+', layer)[0]
            params = model.get_params(layer)

            if layer_t == 'shortcut':
                _from = model.get(layer, 'from', 0)
                self._net.append(self.LAYERS[layer_t](
                    input_shape=input_shape,
                    **params)([self._net[-1], self._net[_from]]))

            elif layer_t == 'route':
                _layers = model.get(layer, 'layers', [])
                self._net.append(self.LAYERS[layer_t](input_shape=input_shape,
                                                      **params)(
                                                          self._net[_layers]))

            else:
                self._net.append(self.LAYERS[layer_t](input_shape=input_shape,
                                                      **params)(self._net[-1]))

            input_shape = self._net[-1].out_shape

            print('{:>4d} {}'.format(i, self._net[-1]), end='\n')  # flush=True
            sys.stdout.flush()  # compatibility with pythonn 2.7

            # if model.get(layer, 'batch_normalize', 0): # wrong because it add a new layer and so the shortcut is broken
            #   self._net.append( BatchNorm_layer()(self._net[-1]) )
            #   print('{:>4d} {}'.format(i, self._net[-1]), flush=True, end='\n')

        if weights is not None:
            self.load_weights(weights)

        return self
Ejemplo n.º 7
0
def test_input_layer(batch, w, h, c):
    '''
  Tests:
    if the forward and the backward of Numpy_net are consistent with keras.

  to be:
  '''

    inpt = np.random.uniform(low=-1, high=1., size=(batch, w, h, c))

    # numpynet model init
    numpynet = Input_layer(input_shape=inpt.shape)

    # Keras Model init
    inp = Input(shape=(w, h, c), batch_shape=(batch, w, h, c))
    x = Activation(activation='linear')(inp)
    model = Model(inputs=[inp], outputs=x)

    # FORWARD

    # Keras Forward
    forward_out_keras = model.predict(inpt)

    # numpynet forwrd
    numpynet.forward(inpt)
    forward_out_numpynet = numpynet.output

    # Forward check (Shape and Values)
    assert forward_out_keras.shape == forward_out_numpynet.shape
    assert np.allclose(forward_out_keras, forward_out_numpynet)

    # BACKWARD

    # Gradient computation (Analytical)
    grad = K.gradients(model.output, [model.input])

    # Define a function to compute the gradient numerically
    func = K.function(model.inputs + [model.output], grad)

    # Keras delta
    keras_delta = func([inpt])[0]  # It returns a list with one array inside.

    # numpynet delta init.
    numpynet.delta = np.ones(shape=inpt.shape, dtype=float)

    # Global delta init.
    delta = np.empty(shape=inpt.shape, dtype=float)

    # numpynet Backward
    numpynet.backward(delta)

    # Check dimension and delta
    assert keras_delta.shape == delta.shape
    assert np.allclose(keras_delta, delta)
Ejemplo n.º 8
0
    def test_forward(self, b, w, h, c):

        inpt = np.random.uniform(low=-1, high=1.,
                                 size=(b, w, h, c)).astype(float)

        # numpynet model init
        layer = Input_layer(input_shape=inpt.shape)

        # Keras Model init
        model = tf.keras.layers.InputLayer(input_shape=(w, h, c))

        # FORWARD

        # Keras Forward
        forward_out_keras = model(inpt)

        # numpynet forwrd
        layer.forward(inpt=inpt)
        forward_out_numpynet = layer.output

        # Forward check (Shape and Values)
        assert forward_out_keras.shape == forward_out_numpynet.shape
        np.testing.assert_allclose(forward_out_keras, forward_out_numpynet)