Exemple #1
0
def conv(x, filters, kernel_size, strides=(1, 1), padding='same', activation="Mish", use_bias=True):
    x = Conv2D(filters, kernel_size, strides, padding, use_bias=use_bias)(x)
    x = BatchNormalization()(x)
    if activation == 'LeakRelu':
        x = LeakyReLU()(x)
    if activation == 'Mish':
        x = Activation('Mish')(x)
    if activation == 'Linear':
        x = linear(x)
    return x
Exemple #2
0
 def compute_similarity(self, repeated_context_vectors,
                        repeated_query_vectors):
     element_wise_multiply = repeated_context_vectors * repeated_query_vectors
     concatenated_tensor = K.concatenate([
         repeated_context_vectors, repeated_query_vectors,
         element_wise_multiply
     ],
                                         axis=-1)
     dot_product = K.squeeze(K.dot(concatenated_tensor, self.kernel),
                             axis=-1)
     return linear(dot_product + self.bias)
Exemple #3
0
def Conv2D_BN_Act(x,
                  filters,
                  size,
                  strides=1,
                  batch_norm=True,
                  activation="mish"):
    padding = ('same' if strides == 1 else 'valid')
    x = Conv2D(filters=filters,
               kernel_size=size,
               strides=strides,
               padding=padding,
               use_bias=not batch_norm,
               kernel_regularizer=l2(5e-4))(x)
    if batch_norm:
        x = BatchNormalization()(x)

    if activation in ["Mish", "mish"]:
        x = Mish()(x)
    elif activation in ["LeakyReLU", "leakyrelu"]:
        x = LeakyReLU(alpha=0.1)(x)
    elif activation in ["Linear", "linear"]:
        x = linear(x)
    return x