Пример #1
0
    def call(self, inputs, training=None):

        if self.trainable:
            return BatchNormalization.call(self,
                                           inputs=inputs,
                                           training=training)
        else:
            return BatchNormalization.call(self, inputs=inputs, training=False)
Пример #2
0
class Convolutional(Layer):
    """
    Darknet "convolutional" layer.
    
    Differs from keras' "Conv2D" layer - has optional batch normalization inside.
    """
    def __init__(self, filters=1, size=1, stride=1, 
            batch_normalize=0, pad=0, activation="linear", **params):
        super(Convolutional, self).__init__(**params)
        data_format = K.image_data_format()
        if data_format not in {'channels_last', 'channels_first'}:
            raise ValueError('data_format must be in '
                             '{"channels_last", "channels_first"}')
        self.data_format = data_format
        axis = -1 if self.data_format == 'channels_last' else 1
        
        self.batch_normalize = batch_normalize
        self.padding = "same" if pad else "valid"
        self.filters = filters
        self.kernel_size = size if isinstance(size, tuple) else (size, size)
        self.strides = stride
        self.pad = pad
        #return Conv2D(
        #    #filters=params.get('filters', 1),
        #    #kernel_size=params.get('size', 1),
        #    #strides=params.get('stride', 1),
        #    #padding=padding,
        #    activation=activation) 
        
        if self.batch_normalize: #don't use biases in conv layer, use them in batchnorm
            self.convolutional_layer = Conv2D(
                filters=self.filters,
                kernel_size=self.kernel_size,
                strides=self.strides,
                padding=self.padding, 
                use_bias=False)
            self.batchnorm_layer = BatchNormalization(
                axis=axis, 
                center=True, 
                scale=True, 
                epsilon=0.000001)
        else: #use biases in conv layer
            self.convolutional_layer = Conv2D(
                filters=self.filters,
                kernel_size=self.kernel_size,
                strides=self.strides,
                padding=self.padding)
        self.activation = get_activation(activation)
        
    
    def build(self, input_shape):
        super(Convolutional, self).build(input_shape) 
        self.convolutional_layer.build(input_shape)
        output_shape = self.convolutional_layer.compute_output_shape(input_shape)
        
        if self.batch_normalize:
            self.batchnorm_layer.build(output_shape)
            
        #self.activation_layer.build(output_shape)
        
    
    def call(self, x, training=None):
        output = self.convolutional_layer.call(x)
        
        if self.batch_normalize:
            output = self.batchnorm_layer.call(output)
        output = self.activation(output)
        return output
    
    def compute_output_shape(self, input_shape):
        shape = self.convolutional_layer.compute_output_shape(input_shape)
        return shape
        #return self.batchnorm_layer.compute_output_shape(shape) 
        # suppose that the shape doesn't change during activation or batchnorm
        
    def set_weights(self, weights_data):
        if self.batch_normalize:
            (weights, scales, biases, rolling_mean, rolling_variance) = weights_data
            self.convolutional_layer.set_weights((weights,))
            self.batchnorm_layer.set_weights((scales, biases, rolling_mean, rolling_variance))
        else:
            self.convolutional_layer.set_weights(weights_data)
        
    def get_weights(self):
        if self.batch_normalize:
            return self.convolutional_layer.get_weights() + self.batchnorm_layer.get_weights()
        return self.convolutional_layer.get_weights()
Пример #3
0
class Connected(Layer):
    """
    Darknet "connected" layer. Main difference vs. keras Dense layer is that 
    input also becomes flatten.
    The same as 
    
    ```
    def get_connected(params):
        activation = get_activation(params.get('activation', "linear"))
        def _connected(x):
            y = Flatten()(x)
            return Dense(params.get('output', 1), activation=activation)(y)
        
        return Lambda(_connected)
    ```
    
    - but also has weights.
    """
    def __init__(self, output=1, activation=None, batch_normalize=0, **kwargs):
        self.units = output
        self.batch_normalize = batch_normalize
        super(Connected, self).__init__(**kwargs)
        self.dense_layer = Dense(self.units, **kwargs)
        # TODO: axis check
        if self.batch_normalize:
            self.batchnorm_layer = BatchNormalization(scale=True, center=False)
        self.activation_layer = get_activation(activation)

    def build(self, input_shape):
        super(Connected, self).build(input_shape)
        densed_shape = (input_shape[0], np.prod(input_shape[1:]))
        self.dense_layer.build(densed_shape)
        if self.batch_normalize:
            densed_shape = self.dense_layer.output_shape(densed_shape)
            self.batchnorm_layer.build(densed_shape)
        self.activation_layer.build(densed_shape)

    def call(self, x, training=None):
        flatten_inputs = K.batch_flatten(x)
        output = self.dense_layer.call(flatten_inputs)
        if self.batch_normalize:
            output = self.batchnorm_layer.call(output)
        output = self.activation_layer.call(output)
        return output

    def compute_output_shape(self, input_shape):
        dense_input_shape = (input_shape[0], np.prod(input_shape[1:]))
        shape = self.dense_layer.compute_output_shape(dense_input_shape)
        #if self.batch_normalize:
        #    shape = self.batch_normalize.compute_output_shape(shape)
        return shape

    def set_weights(self, weights):
        if self.batch_normalize:
            (weights, bias, scales, rolling_mean, rolling_variance) = weights
            self.dense_layer.set_weights((weights, bias))
            self.batchnorm_layer.set_weights(
                (scales, rolling_mean, rolling_variance))
        else:
            self.dense_layer.set_weights(weights)

    def get_weights(self):
        if self.batch_normalize:
            return self.dense_layer.get_weights(
            ) + self.batchnorm_layer.get_weights()
        return self.dense_layer.get_weights()