def __init__(self, size, stride=None, pad=False, input_shape=None, **kwargs): self.size = size if not hasattr(self.size, '__iter__'): self.size = (int(self.size), int(self.size)) if self.size[0] <= 0. or self.size[1] <= 0.: raise LayerError('Avgpool layer. Incompatible size dimensions. They must be both > 0') if not stride: self.stride = size else: self.stride = stride if not hasattr(self.stride, '__iter__'): self.stride = (int(self.stride), int(self.stride)) if len(self.size) != 2 or len(self.stride) != 2: raise LayerError('Avgpool layer. Incompatible stride/size dimensions. They must be a 1D-2D tuple of values') # for padding self.pad = pad self.pad_left, self.pad_right, self.pad_bottom, self.pad_top = (0, 0, 0, 0) super(Avgpool_layer, self).__init__(input_shape=input_shape) self._build(input_shape)
def __call__(self, previous_layer): self._out_shape = [0, 0, 0, 0] if self.axis: # by channels for prev in previous_layer: if prev.out_shape is None: class_name = self.__class__.__name__ prev_name = previous_layer.__class__.__name__ raise LayerError( 'Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.' .format(class_name, prev_name)) c = prev.out_shape[3] self._out_shape[3] += c self._out_shape[0:3] = prev.out_shape[0:3] else: # by batch for prev in previous_layer: if prev.out_shape is None: class_name = self.__class__.__name__ prev_name = previous_layer.__class__.__name__ raise LayerError( 'Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.' .format(class_name, prev_name)) b = prev.out_shape[0] self._out_shape[0] += b self._out_shape[1:4] = prev.out_shape[1:4] self._out_shape = tuple(self._out_shape) return self
def add(self, layer): ''' Add a new layer to the network model. Layers are progressively appended to the tail of the model. ''' try: type_layer = layer.__class__.__name__.lower().split('_layer')[0] except: raise LayerError('Incorrect Layer type found. Given {}'.format(type_layer.__class__.__name__)) if type_layer not in self.LAYERS.keys(): raise LayerError('Incorrect Layer type found.') if type_layer == 'input': self._net.append(layer) elif type_layer == 'route': prev_layers = [] for idx in layer.input_layers: prev_layers.append(self._net[idx]) # i need layers' info to init route self._net.append(layer(prev_layers)) else: self._net.append(layer(self._net[-1])) return self
def __call__(self, previous_layer): if previous_layer.out_shape is None: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) b, w, h, c = previous_layer.out_shape if b < self.steps: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect steps found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) self.batch = b // self.steps self.input_shape = (self.batch, w, h, c) indices = np.arange(0, b) self.batches = np.lib.stride_tricks.as_strided(indices, shape=(self.steps, self.batch), strides=(self.batch * 8, 8)).copy() self.input_layer = Connected_layer(self.outputs, self.activation, input_shape=(self.batches[-1][-1] + 1, w, h, c)) self.self_layer = Connected_layer(self.outputs, self.activation)(self.input_layer) self.output_layer = Connected_layer(self.outputs, self.activation)(self.self_layer) self.state = np.zeros(shape=(self.batch, w, h, self.outputs), dtype=float) self.output, self.delta = (None, None) return self
def __init__(self, filters, size, stride=None, input_shape=None, weights=None, bias=None, pad=False, activation=Activations, **kwargs): if isinstance(filters, int) and filters > 0: self.channels_out = filters else: raise ValueError('Parameter "filters" must be an integer and > 0') self.size = size if not hasattr(self.size, '__iter__'): self.size = (int(self.size), int(self.size)) if self.size[0] <= 0. or self.size[1] <= 0.: raise LayerError('Convolutional layer. Incompatible size values. They must be both > 0') if not stride: self.stride = size else: self.stride = stride if not hasattr(self.stride, '__iter__'): self.stride = (int(self.stride), int(self.stride)) if self.stride[0] <= 0. or self.stride[1] <= 0.: raise LayerError('Convolutional layer. Incompatible stride values. They must be both > 0') if len(self.size) != 2 or len(self.stride) != 2: raise LayerError('Convolutional layer. Incompatible stride/size dimensions. They must be a 1D-2D tuple of values') # Weights and bias self.weights = weights self.bias = bias # Activation function activation = _check_activation(self, activation) self.activation = activation.activate self.gradient = activation.gradient # Padding self.pad = pad self.pad_left, self.pad_right, self.pad_bottom, self.pad_top = (0, 0, 0, 0) # Output, Delta and Updates self.weights_update = None self.bias_update = None self.optimizer = None if input_shape is not None: super(Convolutional_layer, self).__init__(input_shape=input_shape) self._build()
def __init__(self, size, stride=None, pad=False, input_shape=None, **kwargs): ''' Avgpool layer Parameters ---------- size : tuple with two integers (kx, ky) or integer, size of the kernel to be slided over the input image. stride : tuple of two integers, default None. Represents the horizontal and vertical stride of the kernel. If None or 0, stride is assigned the values of size. input_shape : tuple of 4 integers: input shape of the layer. pad : boolean, default False. If False the image is cut to fit the size and stride dimensions, if True the image is padded following keras SAME padding, as indicated here: https://stackoverflow.com/questions/53819528/how-does-tf-keras-layers-conv2d-with-padding-same-and-strides-1-behave ''' self.size = size if not hasattr(self.size, '__iter__'): self.size = (int(self.size), int(self.size)) if self.size[0] <= 0. or self.size[1] <= 0.: raise LayerError( 'Avgpool layer. Incompatible size dimensions. They must be both > 0' ) if not stride: self.stride = size else: self.stride = stride if not hasattr(self.stride, '__iter__'): self.stride = (int(self.stride), int(self.stride)) if len(self.size) != 2 or len(self.stride) != 2: raise LayerError( 'Avgpool layer. Incompatible stride/size dimensions. They must be a 1D-2D tuple of values' ) # for padding self.pad = pad self.pad_left, self.pad_right, self.pad_bottom, self.pad_top = (0, 0, 0, 0) super(Avgpool_layer, self).__init__(input_shape=input_shape) self._build(input_shape)
def __call__(self, previous_layer): if previous_layer.out_shape is None: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) b, w, h, c = previous_layer.out_shape if b < self.steps: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect steps found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) self.batch = b // self.steps self.input_shape = (self.batch, w, h, c) indices = np.arange(0, b, dtype='int64') self.batches = np.lib.stride_tricks.as_strided(indices, shape=(b - self.steps + 1, self.steps), strides=(8, 8)).T.copy() initial_shape = (self.batch, w, h, c) self.uf = Connected_layer(self.outputs, activation='Linear', input_shape=initial_shape) self.ui = Connected_layer(self.outputs, activation='Linear', input_shape=initial_shape) self.ug = Connected_layer(self.outputs, activation='Linear', input_shape=initial_shape) self.uo = Connected_layer(self.outputs, activation='Linear', input_shape=initial_shape) self.wf = Connected_layer(self.outputs, activation='Linear', input_shape=(self.batch, 1, 1, self.outputs)) self.wi = Connected_layer(self.outputs, activation='Linear', input_shape=(self.batch, 1, 1, self.outputs)) self.wg = Connected_layer(self.outputs, activation='Linear', input_shape=(self.batch, 1, 1, self.outputs)) self.wo = Connected_layer(self.outputs, activation='Linear', input_shape=(self.batch, 1, 1, self.outputs)) self.uf.input_shape = (b, w, h, c) self.ui.input_shape = (b, w, h, c) self.ug.input_shape = (b, w, h, c) self.uo.input_shape = (b, w, h, c) self.wf.input_shape = (b, w, h, self.outputs) self.wi.input_shape = (b, w, h, self.outputs) self.wg.input_shape = (b, w, h, self.outputs) self.wo.input_shape = (b, w, h, self.outputs) self.state = np.zeros(shape=(self.batch, w, h, self.outputs), dtype=float) self.output = np.empty(shape=self.uf.out_shape, dtype=float) self.cell = np.empty(shape=self.uf.out_shape, dtype=float) self.delta = None self.optimizer = None return self
def add(self, layer): ''' Add a new layer to the network model. Layers are progressively appended to the tail of the model. Parameters ---------- layer : Layer object Layer object to append to the current architecture Returns ------- self Notes ----- .. note:: If the architecture is empty a default InputLayer is used to start the model. .. warning:: The input layer type must be one of the types stored into the LAYERS dict, otherwise a LayerError is raised. ''' try: type_layer = layer.__class__.__name__.lower().split('_layer')[0] except: raise LayerError('Incorrect Layer type found. Given {}'.format( type_layer.__class__.__name__)) if type_layer not in self.LAYERS.keys(): raise LayerError('Incorrect Layer type found.') if type_layer == 'input': self._net.append(layer) elif type_layer == 'route': prev_layers = [] for idx in layer.input_layers: prev_layers.append( self._net[idx]) # i need layers' info to init route self._net.append(layer(prev_layers)) else: self._net.append(layer(self._net[-1])) return self
def __init__(self, input_shape=None, stride=(2, 2), scale=1., **kwargs): ''' Upsample / Downsample layer Parameters ---------- input_shape : tuple of 4 integers: input shape of the layer. stride : scaling factor of the input; Repeats the rows and columns of the data by stride[0] and stride[1] respectively. scale : floating point scale factor of the input ''' self.scale = float(scale) self.stride = stride if not hasattr(self.stride, '__iter__'): self.stride = (int(stride), int(stride)) if len(self.stride) != 2: raise LayerError( 'Upsample layer. Incompatible stride dimensions. It must be a 1D-2D tuple of values' ) if self.stride[0] < 0 and self.stride[1] < 0: # downsample self.stride = (-self.stride[0], -self.stride[1]) self.reverse = True elif self.stride[0] > 0 and self.stride[1] > 0: # upsample self.reverse = False else: raise NotImplementedError( 'Mixture upsample/downsample are not yet implemented') super(Upsample_layer, self).__init__(input_shape=input_shape)
def _stride_index(self, shape1, shape2): ''' Evaluate the strided indexes if the input shapes are different ''' _, w2, h2, c2 = shape1 _, w1, h1, c1 = shape2 stride = w1 // w2 sample = w2 // w1 stride = stride if stride > 0 else 1 sample = sample if sample > 0 else 1 if not (stride == h1 // h2 and sample == h2 // h1): class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError( 'Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.' .format(class_name, prev_name)) idx = product(range(0, w2, sample), range(0, h2, sample), range(0, c2, sample)) self.ix, self.jx, self.kx = zip(*idx) idx = product(range(0, w1, stride), range(0, w1, stride), range(0, c1, stride)) self.iy, self.jy, self.ky = zip(*idx)
def __call__(self, previous_layer): if previous_layer.out_shape is None: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) self._out_shape = previous_layer.out_shape return self
def __call__(self, previous_layer): for prev in previous_layer: if prev.out_shape is None: class_name = self.__class__.__name__ prev_name = prev.__class__.__name__ raise LayerError( 'Incorrect shapes found. Layer {0} cannot be connected to the previous {1} layer.' .format(class_name, prev_name)) self._build(previous_layer) return self
def __call__(self, previous_layer): prev1, prev2 = previous_layer if prev1.out_shape is None or prev2.out_shape is None: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) self._out_shape = [prev1.out_shape, prev2.out_shape] self._stride_index(prev1.out_shape, prev2.out_shape) return self
def __call__(self, previous_layer): if previous_layer.out_shape is None: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError( 'Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.' .format(class_name, prev_name)) self.batch, self.w, self.h, self.c = previous_layer.out_shape if self.pad: self._evaluate_padding() return self
def __call__(self, previous_layer): ''' Overload operator () ''' if previous_layer.out_shape is None: class_name = self.__class__.__name__ prev_name = previous_layer.__class__.__name__ raise LayerError('Incorrect shapes found. Layer {0} cannot be connected to the previous {1} layer.'.format(class_name, prev_name)) self.input_shape = previous_layer.out_shape self._build() return self
def __call__(self, previous_layer): if previous_layer.out_shape is None: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError( 'Incorrect shapes found. Layer {} cannot be connected to the previous {} layer.' .format(class_name, prev_name)) self.batch, self.w, self.h, self.c = previous_layer.out_shape if self.pad: self._evaluate_padding() self.out_w = 1 + (self.w + self.pad_top + self.pad_bottom - self.size[0]) // self.stride[0] self.out_h = 1 + (self.h + self.pad_left + self.pad_right - self.size[1]) // self.stride[1] return self
def __init__(self, filters, size, stride=None, input_shape=None, weights=None, bias=None, pad=False, activation=Activations, **kwargs): ''' Convolution Layer: the output is the convolution of the input images with a group of kernel of shape size = (kx,ky) with step stride. Parameters ---------- filters : integer. Number of filters to be slided over the input, and also the number of channels of the output (channels_out) size : tuple of int, size of the kernel of shape (kx, ky). stride : tuple of int, default None. Step of the kernel, with shape (st1, st2). If None, stride is assigned size values. input_shape : tuple, default None. Shape of the input in the format (batch, w, h, c), None is used when the layer is part of a Network model. weights : numpy array, default None. filters of the convolutionanl layer, with shape (kx, ky, channels_in, filters). If None, random weights are initialized bias : numpy array, default None. Bias of the convolutional layer. If None, bias init is random with shape (filters, ) pad : boolean, default False. If False the image is cutted along the last raws and columns, if True the input is padded following keras SAME padding activation : activation function of the layer ''' if isinstance(filters, int) and filters > 0: self.channels_out = filters else: raise ValueError('Parameter "filters" must be an integer and > 0') self.size = size if not hasattr(self.size, '__iter__'): self.size = (int(self.size), int(self.size)) if self.size[0] <= 0. or self.size[1] <= 0.: raise LayerError( 'Convolutional layer. Incompatible size values. They must be both > 0' ) if not stride: self.stride = size else: self.stride = stride if not hasattr(self.stride, '__iter__'): self.stride = (int(self.stride), int(self.stride)) if self.stride[0] <= 0. or self.stride[1] <= 0.: raise LayerError( 'Convolutional layer. Incompatible stride values. They must be both > 0' ) if len(self.size) != 2 or len(self.stride) != 2: raise LayerError( 'Convolutional layer. Incompatible stride/size dimensions. They must be a 1D-2D tuple of values' ) # Weights and bias self.weights = weights self.bias = bias # Activation function activation = _check_activation(self, activation) self.activation = activation.activate self.gradient = activation.gradient # Padding self.pad = pad self.pad_left, self.pad_right, self.pad_bottom, self.pad_top = (0, 0, 0, 0) # Output, Delta and Updates self.weights_update = None self.bias_update = None self.optimizer = None if input_shape is not None: super(Convolutional_layer, self).__init__(input_shape=input_shape) self._build()
def __init__(self, outputs, steps, activation=Activations, input_shape=None, weights=None, bias=None, **kwargs): ''' RNN layer Parameters ---------- outputs : integer, number of outputs of the layers steps : integer, number of mini-batch/steps to perform. activation : activation function of the layer input_shape : tuple, default None. Shape of the input in the format (batch, w, h, c), None is used when the layer is part of a Network model. weights : array of shape (w * h * c, outputs), default is None. Weights of the dense layer. If None, weights init is random. bias : array of shape (outputs, ), default None. Bias of the connected layer. If None, bias init is random ''' if isinstance(outputs, int) and outputs > 0: self.outputs = outputs else : raise ValueError('Parameter "outputs" must be an integer and > 0') if isinstance(steps, int) and steps > 0: self.steps = steps else : raise ValueError('Parameter "steps" must be an integer and > 0') if weights is None: weights = (None, None, None) else: if np.shape(weights)[0] != 3: raise ValueError('Wrong number of init "weights". There are 3 connected layers into the RNN cell.') if bias is None: bias = (None, None, None) else: if np.shape(bias)[0] != 3: raise ValueError('Wrong number of init "biases". There are 3 connected layers into the RNN cell.') self.activation = _check_activation(self, activation) # if input shape is passed, init of weights, else done in __call__ if input_shape is not None: b, w, h, c = input_shape if b < self.steps: class_name = self.__class__.__name__ prev_name = layer.__class__.__name__ raise LayerError('Incorrect steps found. Layer {} cannot be connected to the previous {} layer.'.format(class_name, prev_name)) self.batch = b // self.steps self.input_shape = (self.batch, w, h, c) indices = np.arange(0, b) self.batches = np.lib.stride_tricks.as_strided(indices, shape=(self.steps, self.batch), strides=(self.batch * 8, 8)).copy() self.input_layer = Connected_layer(self.outputs, self.activation, input_shape=(self.batches[-1][-1] + 1, w, h, c), weights=weights[0], bias=bias[0]) self.self_layer = Connected_layer(self.outputs, self.activation, weights=weights[1], bias=bias[1])(self.input_layer) self.output_layer = Connected_layer(self.outputs, self.activation, weights=weights[2], bias=bias[2])(self.self_layer) self.state = np.zeros(shape=(self.batch, w, h, self.outputs), dtype=float) else: self.batch = None self.input_shape = None self.batches = None self.input_layer = None self.self_layer = None self.output_layer = None self.state = None self.prev_state = None self.output, self.delta = (None, None)
# FORWARD layer.forward(inpt) forward_out = layer.output print(layer) # BACKWARD layer.delta = layer.output.copy() delta = np.ones(inpt.shape) layer.backward(delta) if not np.allclose(delta, inpt): raise LayerError('Shuffler layer. Wrong backward results') # Visualizations fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(10, 5)) fig.subplots_adjust(left=0.1, right=0.95, top=0.95, bottom=0.15) fig.suptitle('Shuffler Layer\nscale : {}'.format(scale)) ax1.imshow(inpt[0, :, :, 0]) ax1.set_title('Original Image') ax1.axis('off') ax2.imshow(forward_out[0, :, :, 0]) ax2.set_title('Forward') ax2.axis('off')