def __init__(self, filters, sn=False, use_bias=False, **kwargs):
     super(uk_3D, self).__init__()
     self.filters_zp = filters
     self.l1_up = BaseLayers.UpSample3D(filters=filters,
                                        kernel_size=[3, 3, 3],
                                        strides=[2, 2, 2],
                                        padding="SAME",
                                        use_bias=use_bias,
                                        sn=sn,
                                        **kwargs)
     self.l2_norm = BaseLayers.InstanceNorm3D()
     self.l3_activation = BaseLayers.ReLU()
 def __init__(self, filters, sn=False, **kwargs):
     super(uk_5s1_2D, self).__init__()
     self.filters_zp = filters
     self.l1_conv2d = BaseLayers.Conv2DTranspose(filters=filters,
                                                 kernel_size=[5, 5],
                                                 strides=[1, 1],
                                                 padding="SAME",
                                                 use_bias=False,
                                                 sn=sn,
                                                 **kwargs)
     self.l2_norm = BaseLayers.BatchNormalization()
     self.l3_activation = BaseLayers.LeakyReLU()
Beispiel #3
0
 def __init__(self,filters,sn=False,use_bias=False,activation="relu",**kwargs):
     super(c7s1_k_2D,self).__init__()
     self.filters_zp = filters
     self.l1_conv2d = BaseLayers.Conv2D(filters=filters,kernel_size=[7,7],strides=[1,1],padding="REFLECT",use_bias=use_bias,sn=sn,**kwargs)
     self.l2_norm = BaseLayers.InstanceNorm2D()
     if activation.lower()=="relu":
         self.l3_activation = BaseLayers.ReLU()
     elif activation.lower()=="sigmoid":
         self.l3_activation = tf.keras.layers.Activation("sigmoid")
     elif activation.lower()=="tanh":
         self.l3_activation = tf.keras.layers.Activation("tanh")
     else:
         raise ValueError("Un supported activation "+activation)
Beispiel #4
0
 def __init__(self,units,sn=False,activation=None,**kwargs):
     super(Flatten_Dense,self).__init__()
     self.units_zp = units
     self.l1_flatten = tf.keras.layers.Flatten()
     self.l2_dense = BaseLayers.Dense(units=units,use_bias=True,sn=sn,**kwargs)
     if activation == None:
         self.l3_activation = tf.keras.layers.Activation("linear")
     elif activation.lower()=="relu":
         self.l3_activation = BaseLayers.ReLU()
     elif activation.lower()=="sigmoid":
         self.l3_activation = tf.keras.layers.Activation("sigmoid")
     elif activation.lower()=="tanh":
         self.l3_activation = tf.keras.layers.Activation("tanh")
     else:
         raise ValueError("Un supported activation "+activation)
 def __init__(self, filters, sn=False, norm=True, use_bias=False, **kwargs):
     super(ck_2D, self).__init__()
     self.filters_zp = filters
     self.l1_conv2d = BaseLayers.Conv2D(filters=filters,
                                        kernel_size=[4, 4],
                                        strides=[2, 2],
                                        padding='SAME',
                                        use_bias=use_bias,
                                        sn=sn,
                                        **kwargs)
     if norm:
         self.l2_norm = BaseLayers.InstanceNorm2D()
     else:
         self.l2_norm = tf.keras.layers.Activation("linear")
     self.l3_activation = BaseLayers.LeakyReLU(alpha=0.2)
Beispiel #6
0
 def __init__(self,filters,sn=False,activation=None,**kwargs):
     super(lats_up,self).__init__()
     self.filters_zp = filters
     self.l1_conv2d = BaseLayers.Conv2DTranspose(filters=filters,kernel_size=[5,5],strides=[2,2],padding="SAME",use_bias=False,sn=sn,**kwargs)
     if activation == None:
         self.l2_activation = tf.keras.layers.Activation("linear")
     elif activation.lower()=="relu":
         self.l2_activation = BaseLayers.ReLU()
     elif activation.lower()=="leaky_relu":
         self.l2_activation = BaseLayers.LeakyReLU()
     elif activation.lower()=="sigmoid":
         self.l2_activation = tf.keras.layers.Activation("sigmoid")
     elif activation.lower()=="tanh":
         self.l2_activation = tf.keras.layers.Activation("tanh")
     else:
         raise ValueError("Un supported activation"+activation)
 def __init__(self, filters, sn=False, use_bias=False, **kwargs):
     super(rk_2D, self).__init__()
     self.l1_conv2d = BaseLayers.Conv2D(filters=filters,
                                        kernel_size=[3, 3],
                                        strides=[1, 1],
                                        padding="REFLECT",
                                        use_bias=use_bias,
                                        sn=sn,
                                        **kwargs)
     self.l2_norm = BaseLayers.InstanceNorm2D()
     self.l3_activation = BaseLayers.ReLU()
     self.l4_conv2d = BaseLayers.Conv2D(filters=filters,
                                        kernel_size=[3, 3],
                                        strides=[1, 1],
                                        padding="REFLECT",
                                        use_bias=use_bias,
                                        sn=sn,
                                        **kwargs)
     self.l5_norm = BaseLayers.InstanceNorm2D()
 def __init__(self, use_sigmoid=True, sn=False, **kwargs):
     super(last_conv_2D, self).__init__()
     self.l1_conv2d = BaseLayers.Conv2D(filters=1,
                                        kernel_size=[4, 4],
                                        strides=[1, 1],
                                        padding='SAME',
                                        use_bias=True,
                                        sn=sn,
                                        **kwargs)
     if use_sigmoid:
         self.l2_activation = tf.keras.layers.Activation("sigmoid")
     else:
         self.l2_activation = tf.keras.layers.Activation("linear")
Beispiel #9
0
tf.config.experimental.set_memory_growth(physical_devices[0], True)
from tensorflow.keras.mixed_precision import experimental as mixed_precision
policy = mixed_precision.Policy('mixed_float16')
mixed_precision.set_policy(policy)

# a = c7s1_k_2D(6,sn=True,use_bias=True)
# a.build(input_shape=[None,256,256,3])
# w = tf.random.normal([1,256,256,3])
# import time
# for __ in range(5):
#     start = time.time()
#     for _ in range(1000):
#         y = a(w)
#     print(time.time()-start)

l1_conv2d = BaseLayers.Conv2D(filters=64,kernel_size=[7,7],strides=[1,1],padding="REFLECT",use_bias=False,sn=False)
# l1_conv2d = tf.keras.layers.Conv2D(filters=64,kernel_size=[7,7],strides=[1,1],padding="VALID",use_bias=False)
l1_conv2d.build(input_shape=[None,256,256,8])
w = tf.random.normal([8,256,256,8])
import time
for __ in range(5):
    start = time.time()
    for _ in range(1000):
        y = l1_conv2d(w)
    print(time.time()-start)

# l1_conv2d = tf.keras.layers.Dense(4096)
# l1_conv2d.build(input_shape=[None,784])
# w = tf.random.normal([4096,784])
# import time
# for __ in range(5):
Beispiel #10
0
 def __init__(self,units,sn=False,**kwargs):
     super(vec2img,self).__init__()
     self.units_zp = units
     self.l1_dense = BaseLayers.Dense(units=units,use_bias=False,sn=sn,**kwargs)
     self.l2_norm = BaseLayers.BatchNormalization()
     self.l3_activation = BaseLayers.LeakyReLU()
Beispiel #11
0
 def __init__(self,filters,sn=False,**kwargs):
     super(ckd_5s2_2D,self).__init__()
     self.filters_zp = filters
     self.l1_conv2d= BaseLayers.Conv2D(filters=filters,kernel_size=[5,5],strides=[2,2],padding='SAME',sn=sn,**kwargs)
     self.l2_activation = BaseLayers.LeakyReLU()
     self.l3_dropout = BaseLayers.Dropout(rate=0.3)