def make_spp_depthwise_separable_last_layers(x, num_filters, out_filters, block_id_str=None, predict_filters=None, predict_id='1'): '''6 Conv2D_BN_Leaky layers followed by a Conv2D_linear layer''' if not block_id_str: block_id_str = str(K.get_uid()) x = compose( DarknetConv2D_BN_Leaky(num_filters, (1, 1)), Depthwise_Separable_Conv2D_BN_Leaky(filters=num_filters * 2, kernel_size=(3, 3), block_id_str=block_id_str + '_1'), DarknetConv2D_BN_Leaky(num_filters, (1, 1)))(x) x = Spp_Conv2D_BN_Leaky(x, num_filters) x = compose( Depthwise_Separable_Conv2D_BN_Leaky(filters=num_filters * 2, kernel_size=(3, 3), block_id_str=block_id_str + '_2'), DarknetConv2D_BN_Leaky(num_filters, (1, 1)))(x) if predict_filters is None: predict_filters = num_filters * 2 y = compose( Depthwise_Separable_Conv2D_BN_Leaky(filters=predict_filters, kernel_size=(3, 3), block_id_str=block_id_str + '_3'), DarknetConv2D(out_filters, (1, 1), name='predict_conv_' + predict_id))(x) return x, y
def Depthwise_Conv2D_BN_Leaky(kernel_size=(3, 3), block_id_str=None): """Depthwise Convolution2D.""" if not block_id_str: block_id_str = str(K.get_uid()) return compose( DepthwiseConv2D(kernel_size, padding='same', name='conv_dw_' + block_id_str), BatchNormalization(name='conv_dw_%s_bn' % block_id_str), LeakyReLU(alpha=0.1, name='conv_dw_%s_leaky_relu' % block_id_str) )
def __init__(self, **kwargs): super(SwitchLayer, self).__init__(**kwargs) self.switch = self.add_weight( 'switch_' + str(K.get_uid('switch')), shape=(), initializer=tf.constant_initializer(2), # we add a big initializer # to take into account the adjacent scales by default # but not too big because we want to have some gradient flowing )
def __init__(self, shape, my_initializer='RandomNormal', dtype=None, name=None, mult=1.0, **kwargs): # some input checking if not name: prefix = 'local_param' name = prefix + '_' + str(backend.get_uid(prefix)) if not dtype: dtype = backend.floatx() self.shape = [1, *shape] self.my_initializer = my_initializer self.mult = mult if not name: prefix = 'param' name = '%s_%d' % (prefix, K.get_uid(prefix)) Layer.__init__(self, name=name, **kwargs) # Create a trainable weight variable for this layer. with K.name_scope(self.name): self.kernel = self.add_weight(name='kernel', shape=shape, initializer=self.my_initializer, dtype=dtype, trainable=True) # prepare output tensor, which is essentially the kernel. output_tensor = K.expand_dims(self.kernel, 0) * self.mult output_tensor._keras_shape = self.shape output_tensor._uses_learning_phase = False output_tensor._keras_history = base_layer.KerasHistory(self, 0, 0) output_tensor._batch_input_shape = self.shape self.trainable = True self.built = True self.is_placeholder = False # create new node Node(self, inbound_layers=[], node_indices=[], tensor_indices=[], input_tensors=[], output_tensors=[output_tensor], input_masks=[], output_masks=[None], input_shapes=[], output_shapes=self.shape)
def Depthwise_Separable_Conv2D_BN_Leaky(filters, kernel_size=(3, 3), block_id_str=None): """Depthwise Separable Convolution2D.""" if not block_id_str: block_id_str = str(K.get_uid()) return compose( YoloDepthwiseConv2D(kernel_size, padding='same', name='conv_dw_' + block_id_str), CustomBatchNormalization(name='conv_dw_%s_bn' % block_id_str), LeakyReLU(alpha=0.1, name='conv_dw_%s_leaky_relu' % block_id_str), YoloConv2D(filters, (1,1), padding='same', use_bias=False, strides=(1, 1), name='conv_pw_%s' % block_id_str), CustomBatchNormalization(name='conv_pw_%s_bn' % block_id_str), LeakyReLU(alpha=0.1, name='conv_pw_%s_leaky_relu' % block_id_str))
def make_yolo_depthwise_separable_head(x, num_filters, block_id_str=None): '''6 Conv2D_BN_Leaky layers followed by a Conv2D_linear layer''' if not block_id_str: block_id_str = str(K.get_uid()) x = compose( DarknetConv2D_BN_Leaky(num_filters, (1,1)), Depthwise_Separable_Conv2D_BN_Leaky(filters=num_filters*2, kernel_size=(3, 3), block_id_str=block_id_str+'_1'), DarknetConv2D_BN_Leaky(num_filters, (1,1)), Depthwise_Separable_Conv2D_BN_Leaky(filters=num_filters*2, kernel_size=(3, 3), block_id_str=block_id_str+'_2'), DarknetConv2D_BN_Leaky(num_filters, (1,1)))(x) return x
def __init__( self, input_shape=None, batch_size=None, dtype=None, input_tensor=None, sparse=False, name=None, **kwargs, ): if "batch_input_shape" in kwargs: batch_input_shape = kwargs.pop("batch_input_shape") if input_shape and batch_input_shape: raise ValueError( "Only provide the input_shape OR " "batch_input_shape argument to " "InputLayer, not both at the same time." ) batch_size = batch_input_shape[0] input_shape = batch_input_shape[1:] if kwargs: raise ValueError("Unrecognized keyword arguments:", kwargs.keys()) if not name: prefix = "input" name = prefix + "_" + str(backend.get_uid(prefix)) if batch_size is None: raise NotImplementedError() if input_tensor is not None: raise NotImplementedError() if dtype is not None: raise NotImplementedError() if sparse: raise NotImplementedError() super(InputLayer, self).__init__() self.built = True self.batch_size = batch_size if isinstance(input_shape, tensor_shape.TensorShape): input_shape = tuple(input_shape.as_list()) elif isinstance(input_shape, int): input_shape = (input_shape,) if input_shape is not None: self._batch_input_shape = (batch_size,) + tuple(input_shape) else: raise ValueError("Input shape must be defined for the first layer.") # Create a graph placeholder to call the layer on. self.placeholder = tfe.define_private_placeholder(self._batch_input_shape)
def Darknet_Depthwise_Separable_Conv2D_BN_Leaky(filters, kernel_size=(3, 3), block_id_str=None, **kwargs): """Depthwise Separable Convolution2D.""" if not block_id_str: block_id_str = str(K.get_uid()) no_bias_kwargs = {'use_bias': False} no_bias_kwargs.update(kwargs) return compose( DarknetDepthwiseConv2D(kernel_size, name='conv_dw_' + block_id_str, **no_bias_kwargs), BatchNormalization(name='conv_dw_%s_bn' % block_id_str), LeakyReLU(alpha=0.1, name='conv_dw_%s_leaky_relu' % block_id_str), Conv2D(filters, (1,1), padding='same', use_bias=False, strides=(1, 1), name='conv_pw_%s' % block_id_str), BatchNormalization(name='conv_pw_%s_bn' % block_id_str), LeakyReLU(alpha=0.1, name='conv_pw_%s_leaky_relu' % block_id_str))
def Darknet_Depthwise_Separable_Conv2D_BN_Swish(filters, kernel_size=(3, 3), block_id_str=None, **kwargs): """Depthwise Separable Convolution2D.""" if not block_id_str: block_id_str = str(K.get_uid()) no_bias_kwargs = {'use_bias': False} no_bias_kwargs.update(kwargs) return compose( DarknetDepthwiseConv2D(kernel_size, name='conv_dw_' + block_id_str, **no_bias_kwargs), CustomBatchNormalization(name='conv_dw_%s_bn' % block_id_str), Activation(swish, name='conv_dw_%s_swish' % block_id_str), YoloConv2D(filters, (1,1), padding='same', use_bias=False, strides=(1, 1), name='conv_pw_%s' % block_id_str), CustomBatchNormalization(name='conv_pw_%s_bn' % block_id_str), Activation(swish, name='conv_pw_%s_swish' % block_id_str))
def __init__(self, input_shape=None, batch_input_shape=None, input_dtype=None, sparse=False, name=None): self.input_spec = None self.supports_masking = False self.uses_learning_phase = False self.trainable = False self.built = True self.inbound_nodes_list = [] self.outbound_nodes_list = [] self.trainable_weights_list = [] self.non_trainable_weights_list = [] self.constraints_dict = {} self.sparse = sparse if not name: prefix = 'input' name = prefix + '_' + str(K.get_uid(prefix)) self.name_new = name if not batch_input_shape: assert input_shape, 'An Input layer should be passed either a `batch_input_shape` or an `input_shape`.' batch_input_shape = (None, ) + tuple(input_shape) else: batch_input_shape = tuple(batch_input_shape) if not input_dtype: input_dtype = K.floatx() self.batch_input_shape = batch_input_shape self.input_dtype = input_dtype input_tensor = K.placeholder(shape=batch_input_shape, dtype=input_dtype, sparse=self.sparse, name=self.name_new) input_tensor._uses_learning_phase = False input_tensor._keras_history = (self, 0, 0) shape = input_tensor._keras_shape
def make_ultralite_last_layers(x, num_filters, out_filters, block_id_str=None, predict_filters=None): '''3 Conv2D_BN_Leaky layers followed by a Conv2D_linear layer''' if not block_id_str: block_id_str = str(K.get_uid()) x = compose( #DarknetConv2D_BN_Leaky(num_filters, (1,1)), #Depthwise_Separable_Conv2D_BN_Leaky(filters=num_filters*2, kernel_size=(3, 3), block_id_str=block_id_str+'_1'), #DarknetConv2D_BN_Leaky(num_filters, (1,1)), #Depthwise_Separable_Conv2D_BN_Leaky(filters=num_filters*2, kernel_size=(3, 3), block_id_str=block_id_str+'_2'), #DarknetConv2D_BN_Leaky(num_filters, (1,1)) Depthwise_Conv2D_BN_Leaky(kernel_size=(3, 3), block_id_str=block_id_str+'_1'), )(x) if predict_filters is None: predict_filters = num_filters*2 y = compose( #Depthwise_Separable_Conv2D_BN_Leaky(filters=predict_filters, kernel_size=(3, 3), block_id_str=block_id_str+'_3'), Depthwise_Conv2D_BN_Leaky(kernel_size=(3, 3), block_id_str=block_id_str+'_3'), DarknetConv2D(out_filters, (1,1)))(x) return x, y
def __init__(self, name=None, **kwargs): if not name: prefix = 'optional_input_placeholder' name = prefix + '_' + str(K.get_uid(prefix)) kwargs['batch_input_shape'] = (2, ) super(_OptionalInputPlaceHolder, self).__init__(**kwargs) self.tensor = K.zeros(shape=(2, )) self.tensor._keras_shape = (2, ) self.tensor._uses_learning_phase = False self.tensor._keras_history = (self, 0, 0) Node(self, inbound_layers=[], node_indices=[], tensor_indices=[], input_tensors=[], output_tensors=[self.tensor], input_masks=[None], output_masks=[None], input_shapes=[], output_shapes=[(2, )]) self.build((2, ))
def make_csp_yolo_depthwise_separable_head(x, num_filters, block_id_str=None): '''6 Conv2D_BN_Mish layers followed by a Conv2D_linear layer''' if not block_id_str: block_id_str = str(K.get_uid()) x = DarknetConv2D_BN_Mish(num_filters, (1, 1))(x) res_connection = DarknetConv2D_BN_Mish(num_filters, (1, 1))(x) x = compose( DarknetConv2D_BN_Mish(num_filters, (1, 1)), Depthwise_Separable_Conv2D_BN_Mish(filters=num_filters, kernel_size=(3, 3), block_id_str=block_id_str + '_1'), DarknetConv2D_BN_Mish(num_filters, (1, 1)), Depthwise_Separable_Conv2D_BN_Mish(filters=num_filters, kernel_size=(3, 3), block_id_str=block_id_str + '_2'))(x) x = Concatenate()([x, res_connection]) x = DarknetConv2D_BN_Mish(num_filters, (1, 1))(x) return x