Example #1
0
 def __init__(self,
              input_dim,
              kernel_size=32,
              dropout=0.0,
              name="conv_module",
              **kwargs):
     super(ConvModule, self).__init__(name=name, **kwargs)
     self.ln = tf.keras.layers.LayerNormalization()
     self.pw_conv_1 = tf.keras.layers.Conv1D(filters=2 * input_dim,
                                             kernel_size=1,
                                             strides=1,
                                             padding="same",
                                             name="pw_conv_1")
     self.glu = GLU()
     self.dw_conv = tf.keras.layers.SeparableConv1D(filters=2 * input_dim,
                                                    kernel_size=kernel_size,
                                                    strides=1,
                                                    padding="same",
                                                    depth_multiplier=1,
                                                    name="dw_conv")
     self.bn = SwitchNormalization()
     self.swish = tf.keras.layers.Activation(tf.keras.activations.swish,
                                             name="swish_activation")
     self.pw_conv_2 = tf.keras.layers.Conv1D(filters=input_dim,
                                             kernel_size=1,
                                             strides=1,
                                             padding="same",
                                             name="pw_conv_2")
     self.do = tf.keras.layers.Dropout(dropout)
     self.res_add = tf.keras.layers.Add()
Example #2
0
class ConvModule(tf.keras.layers.Layer):
    def __init__(self,
                 input_dim,
                 kernel_size=32,
                 dropout=0.0,
                 name="conv_module",
                 **kwargs):
        super(ConvModule, self).__init__(name=name, **kwargs)
        self.ln = tf.keras.layers.LayerNormalization()
        self.pw_conv_1 = tf.keras.layers.Conv1D(filters=2 * input_dim,
                                                kernel_size=1,
                                                strides=1,
                                                padding="same",
                                                name="pw_conv_1")
        self.glu = GLU()
        self.dw_conv = tf.keras.layers.SeparableConv1D(filters=2 * input_dim,
                                                       kernel_size=kernel_size,
                                                       strides=1,
                                                       padding="same",
                                                       depth_multiplier=1,
                                                       name="dw_conv")
        self.bn = SwitchNormalization()
        self.swish = tf.keras.layers.Activation(tf.keras.activations.swish,
                                                name="swish_activation")
        self.pw_conv_2 = tf.keras.layers.Conv1D(filters=input_dim,
                                                kernel_size=1,
                                                strides=1,
                                                padding="same",
                                                name="pw_conv_2")
        self.do = tf.keras.layers.Dropout(dropout)
        self.res_add = tf.keras.layers.Add()

    # @tf.function(experimental_relax_shapes=True)
    def call(self, inputs, training=False, **kwargs):
        outputs = self.ln(inputs, training=training)
        outputs = self.pw_conv_1(outputs, training=training)
        outputs = self.glu(outputs)
        outputs = self.dw_conv(outputs, training=training)
        outputs = self.bn(outputs, training=training)
        outputs = self.swish(outputs)
        outputs = self.pw_conv_2(outputs, training=training)
        outputs = self.do(outputs, training=training)
        outputs = self.res_add([inputs, outputs])
        return outputs

    def get_config(self):
        conf = super(ConvModule, self).get_config()
        conf.update(self.ln.get_config())
        conf.update(self.pw_conv_1.get_config())
        conf.update(self.glu.get_config())
        conf.update(self.dw_conv.get_config())
        conf.update(self.bn.get_config())
        conf.update(self.swish.get_config())
        conf.update(self.pw_conv_2.get_config())
        conf.update(self.do.get_config())
        conf.update(self.res_add.get_config())
        return conf
Example #3
0
 def __init__(self,filter_size,kernel_size):
     super(ESP_alhpa,self).__init__()
     filter_size//=4
     self.chanle_projecter=tf.keras.layers.Conv2D(filter_size,kernel_size,padding='same')
     self.dilated_conv1 = tf.keras.layers.Conv2D(filter_size,kernel_size,padding='same',
                                kernel_initializer=tf.random_uniform_initializer(seed=42),dilation_rate=(1, 1))
     self.dilated_conv2 = tf.keras.layers.Conv2D(filter_size,kernel_size,padding='same',
                                kernel_initializer=tf.random_uniform_initializer(seed=42), dilation_rate=(2, 2))
     self.dilated_conv4 = tf.keras.layers.Conv2D(filter_size,kernel_size,padding='same',
                                kernel_initializer=tf.random_uniform_initializer(seed=42), dilation_rate=(4, 4))
     self.dilated_conv8 = tf.keras.layers.Conv2D(filter_size,kernel_size,padding='same',
                                kernel_initializer=tf.random_uniform_initializer(seed=42), dilation_rate=(8, 8))
     self.dilated_conv16 = tf.keras.layers.Conv2D(filter_size,kernel_size,padding='same',
                                kernel_initializer=tf.random_uniform_initializer(seed=42), dilation_rate=(16, 16))
     
     self.sw=SwitchNormalization()
Example #4
0
 def __init__(self):
     super(BN_PRelu,self).__init__()
     self.sw=SwitchNormalization()