Exemple #1
0
  def test_constructor (self):

    here = os.path.dirname(__file__)
    filename = os.path.join(here, '..', 'cfg', 'yolov3.cfg')

    cfg = net_config(filename)
    assert len(cfg) == 108

    print(cfg)

    with pytest.raises(IOError):
      filename = ''
      cfg = net_config(filename)
Exemple #2
0
  def load(self, cfg_filename, weights=None):
    '''
    Load network model from config file in INI fmt
    '''

    model = net_config(cfg_filename)

    self.batch = model.get('net1', 'batch', 1)
    self.w = model.get('net1', 'width', 416)
    self.h = model.get('net1', 'height', 416)
    self.c = model.get('net1', 'channels', 3)
    # TODO: add other network parameters

    input_shape = (self.batch, self.w, self.h, self.c)
    self._net = [ Input_layer(input_shape=input_shape) ]

    print('layer     filters    size              input                output')

    for i, layer in enumerate(model):
      layer_t = re.split(r'\d+', layer)[0]
      params = dict(model.get_params(layer))

      layer_params = {}
      for k, v in params.items():
        try:
          val = eval(v)
        except NameError:
          val = v
        except:
          raise DataVariableError('Type variable not recognized! Possible variables are only [int, float, string, vector<float>].')

        layer_params[k] = val

      if layer_t == 'shortcut':
        _from = model.get(layer, 'from', 0)
        self._net.append( self.LAYERS[layer_t](input_shape=input_shape, **layer_params)([self._net[-1], self._net[_from]]) )

      elif layer_t == 'route':
        _layers = model.get(layer, 'layers', [])
        self._net.append( self.LAYERS[layer_t](input_shape=input_shape, **layer_params)(self._net[_layers]) )

      else:
        self._net.append( self.LAYERS[layer_t](input_shape=input_shape, **layer_params)(self._net[-1]) )

      print('{:>4d} {}'.format(i, self._net[-1]), flush=True, end='\n')

      #if model.get(layer, 'batch_normalize', 0): # wrong because it add a new layer and so the shortcut is broken
      #  self._net.append( BatchNorm_layer()(self._net[-1]) )
      #  print('{:>4d} {}'.format(i, self._net[-1]), flush=True, end='\n')

    return self


    if weights is not None:
      self.load_weights(weights)
Exemple #3
0
    def load(self, cfg_filename, weights=None):
        '''
    Load network model from config file in INI fmt
    '''

        model = net_config(cfg_filename)

        self.batch = model.get('net0', 'batch', 1)
        self.w = model.get('net0', 'width', 416)
        self.h = model.get('net0', 'height', 416)
        self.c = model.get('net0', 'channels', 3)
        # TODO: add other network parameters

        input_shape = (self.batch, self.w, self.h, self.c)
        self._net = [Input_layer(input_shape=input_shape)]

        print(
            'layer     filters    size              input                output'
        )

        for i, layer in enumerate(model):
            layer_t = re.split(r'\d+', layer)[0]
            params = model.get_params(layer)

            if layer_t == 'shortcut':
                _from = model.get(layer, 'from', 0)
                self._net.append(self.LAYERS[layer_t](
                    input_shape=input_shape,
                    **params)([self._net[-1], self._net[_from]]))

            elif layer_t == 'route':
                _layers = model.get(layer, 'layers', [])
                self._net.append(self.LAYERS[layer_t](input_shape=input_shape,
                                                      **params)(
                                                          self._net[_layers]))

            else:
                self._net.append(self.LAYERS[layer_t](input_shape=input_shape,
                                                      **params)(self._net[-1]))

            input_shape = self._net[-1].out_shape

            print('{:>4d} {}'.format(i, self._net[-1]), end='\n')  # flush=True
            sys.stdout.flush()  # compatibility with pythonn 2.7

            # if model.get(layer, 'batch_normalize', 0): # wrong because it add a new layer and so the shortcut is broken
            #   self._net.append( BatchNorm_layer()(self._net[-1]) )
            #   print('{:>4d} {}'.format(i, self._net[-1]), flush=True, end='\n')

        if weights is not None:
            self.load_weights(weights)

        return self
Exemple #4
0
  def test_getter (self):

    here = os.path.dirname(__file__)
    filename = os.path.join(here, '..', 'cfg', 'yolov3.cfg')

    cfg = net_config(filename)

    with pytest.raises(DataVariableError):
      res = cfg.get('net', 'batch', 42)
      assert res == 42

    assert cfg.get('net0', 'batch', 42) == 1
    assert cfg.get('convolutional1', 'stride', 3) == 1