def conv_block8(feat_maps_out, prev):
    prev = BatchNormalization(axis=CHANNEL_AXIS, name='BN8_1', freeze=False)(prev) # Specifying the axis and mode allows for later merging
    prev = layers.LeakyReLU(name='Activation8_1')(prev)
    prev = layers.Conv2D(feat_maps_out, (3,3), padding = 'same', kernel_initializer = 'he_normal', name='Conv8_1')(prev) 
    prev = BatchNormalization(axis=CHANNEL_AXIS, name='BN8_2', freeze=False)(prev) # Specifying the axis and mode allows for later merging
    prev = layers.LeakyReLU(name='Activation8_2')(prev)
    prev = layers.Conv2D(feat_maps_out, (3,3), padding = 'same', kernel_initializer = 'he_normal', name='Conv8_2')(prev) 
    return prev
Exemplo n.º 2
0
 def f(input):
     y = Conv2D(filters, 3, strides=stride, padding="same", kernel_initializer="he_normal", kernel_regularizer=l2(1e-4))(input)
     y = BatchNormalization(axis=3, freeze=False)(y)
     y = LeakyReLU()(y)
     y = Conv2D(filters, 3, strides=1, padding="same", kernel_initializer="he_normal", kernel_regularizer=l2(1e-4))(y)
     y = BatchNormalization(axis=3, freeze=False)(y)
     if block == 0:
         shortcut = Conv2D(filters, 1, strides=stride, padding="same", kernel_initializer="he_normal", kernel_regularizer=l2(1e-4))(input)
         shortcut = BatchNormalization(axis=3, freeze=False)(shortcut)
     else:
         shortcut = input
     y = Add()([y, shortcut])
     y = LeakyReLU()(y)
     return y
Exemplo n.º 3
0
 def f(input):
     
     x = Conv2D(48, 3, padding='same', use_bias=False, kernel_initializer='he_normal',
                kernel_regularizer=l2(weight_decay))(input)
     x = BatchNormalization(axis=3, freeze=False)(x)
     x = LeakyReLU()(x)
     
     return x 
def initial_conv_block1(input, weight_decay=5e-4):
    x = layers.Conv2D(32, (3, 3),
                      padding='same',
                      use_bias=False,
                      kernel_initializer='he_normal',
                      kernel_regularizer=l2(weight_decay),
                      name='conv1_1')(input)
    x = BatchNormalization(axis=CHANNEL_AXIS, name='BN1_1', freeze=True)(x)
    x = layers.LeakyReLU(name='Activation1_1')(x)
    return x
Exemplo n.º 5
0
 def f(input):
     b = Conv1D(filters=filters, kernel_size=1,
                strides=1, padding="same",
                #dilation_rate=dilationrate1D,
                kernel_initializer="he_normal",
                kernel_regularizer=l2(1e-4))(input)
     b = BatchNormalization(axis=3, freeze=False)(b)
     b = LeakyReLU()(b)
     b_res = b
 
     b = Conv1D(filters=filters, kernel_size=1,
                strides=1, padding="same",
                kernel_initializer="he_normal",
                kernel_regularizer=l2(1e-4))(b)
     b = BatchNormalization(axis=3, freeze=False)(b)
 
     b_attention = Activation("softmax")(b)
     
     b = Conv2D(filters=filters, kernel_size=3,
                strides=(1,1), padding="same",
                #dilation_rate=dilationrate2D,
                kernel_initializer="he_normal",
                kernel_regularizer=l2(1e-4))(b)
     b = BatchNormalization(axis=3, freeze=False)(b)
     b = Multiply()([b, b_attention])
     b = LeakyReLU()(b)
 
     b = Conv1D(filters=filters, kernel_size=1,
                strides=1, padding="same",
                kernel_initializer="he_normal",
                kernel_regularizer=l2(1e-4))(b)
     b = BatchNormalization(axis=3, freeze=False)(b)
     b = Add()([b, b_res])
     b = LeakyReLU()(b)
 
     return b
Exemplo n.º 6
0
def _bn_relu(input):
    """Helper to build a BN -> relu block
    """
    norm = BatchNormalization(axis=CHANNEL_AXIS, freeze=False)(input)
    return Activation("relu")(norm)