def __init__(self, filters, sn=False, use_bias=False, **kwargs): super(rk_3D, self).__init__() self.l1_conv3d = BaseLayers.Conv3D(filters=filters, kernel_size=[3, 3, 3], strides=[1, 1, 1], padding="REFLECT", use_bias=use_bias, sn=sn, **kwargs) self.l2_norm = BaseLayers.InstanceNorm3D() self.l3_activation = BaseLayers.ReLU() self.l4_conv3d = BaseLayers.Conv3D(filters=filters, kernel_size=[3, 3, 3], strides=[1, 1, 1], padding="REFLECT", use_bias=use_bias, sn=sn, **kwargs) self.l5_norm = BaseLayers.InstanceNorm3D()
def __init__(self,filters,sn=False,use_bias=False,activation="relu",**kwargs): super(c7s1_k_3D,self).__init__() self.filters_zp = filters self.l1_conv3d = BaseLayers.Conv3D(filters=filters,kernel_size=[7,7,7],strides=[1,1,1],padding="REFLECT",use_bias=use_bias,sn=sn,**kwargs) self.l2_norm = BaseLayers.InstanceNorm3D() if activation.lower()=="relu": self.l3_activation = BaseLayers.ReLU() elif activation.lower()=="sigmoid": self.l3_activation = tf.keras.layers.Activation("sigmoid") elif activation.lower()=="tanh": self.l3_activation = tf.keras.layers.Activation("tanh") else: raise ValueError("Un supported activation "+activation)
def __init__(self, use_sigmoid=True, sn=False, **kwargs): super(last_conv_3D, self).__init__() self.l1_conv3d = BaseLayers.Conv3D(filters=1, kernel_size=[4, 4, 4], strides=[1, 1, 1], padding='SAME', use_bias=True, sn=sn, **kwargs) if use_sigmoid: self.l2_activation = tf.keras.layers.Activation("sigmoid") else: self.l2_activation = tf.keras.layers.Activation("linear")
def __init__(self, filters, sn=False, norm=True, use_bias=False, **kwargs): super(ck_3D, self).__init__() self.filters_zp = filters self.l1_conv3d = BaseLayers.Conv3D(filters=filters, kernel_size=[4, 4, 4], strides=[2, 2, 2], padding='SAME', use_bias=use_bias, sn=sn, **kwargs) if norm: self.l2_norm = BaseLayers.InstanceNorm3D() else: self.l2_norm = tf.keras.layers.Activation("linear") self.l3_activation = BaseLayers.LeakyReLU(alpha=0.2)