def __init__(self, filters, sn=False, **kwargs):
     super(uk_5s1_2D, self).__init__()
     self.filters_zp = filters
     self.l1_conv2d = BaseLayers.Conv2DTranspose(filters=filters,
                                                 kernel_size=[5, 5],
                                                 strides=[1, 1],
                                                 padding="SAME",
                                                 use_bias=False,
                                                 sn=sn,
                                                 **kwargs)
     self.l2_norm = BaseLayers.BatchNormalization()
     self.l3_activation = BaseLayers.LeakyReLU()
Beispiel #2
0
 def __init__(self,filters,sn=False,activation=None,**kwargs):
     super(lats_up,self).__init__()
     self.filters_zp = filters
     self.l1_conv2d = BaseLayers.Conv2DTranspose(filters=filters,kernel_size=[5,5],strides=[2,2],padding="SAME",use_bias=False,sn=sn,**kwargs)
     if activation == None:
         self.l2_activation = tf.keras.layers.Activation("linear")
     elif activation.lower()=="relu":
         self.l2_activation = BaseLayers.ReLU()
     elif activation.lower()=="leaky_relu":
         self.l2_activation = BaseLayers.LeakyReLU()
     elif activation.lower()=="sigmoid":
         self.l2_activation = tf.keras.layers.Activation("sigmoid")
     elif activation.lower()=="tanh":
         self.l2_activation = tf.keras.layers.Activation("tanh")
     else:
         raise ValueError("Un supported activation"+activation)