def __init__(self, in_shape): super(Discriminator, self).__init__() """ 反卷积和dense层采用偏置 各自2参数 2+2+2=6 一共六个参数个数(指独立大参数self.w self.b的个数) """ self.Conv2d_1 = my_layers.Conv2D(input_shape=in_shape, out_depth=64, filter_size=[5, 5], strides=[2, 2], use_bias=True, pandding_way="SAME") self.LeakyReLU_1 = my_layers.LeakyReLU( in_shape=self.Conv2d_1.out_shape) self.DropOut_1 = my_layers.Dropout(in_shape=self.LeakyReLU_1.out_shape, dropout_rate=0.3) self.Conv2d_2 = my_layers.Conv2D(input_shape=self.DropOut_1.out_shape, out_depth=128, filter_size=[5, 5], strides=[2, 2], use_bias=True, pandding_way="SAME") self.LeakyReLU_2 = my_layers.LeakyReLU( in_shape=self.Conv2d_2.out_shape) self.DropOut_2 = my_layers.Dropout(in_shape=self.LeakyReLU_2.out_shape, dropout_rate=0.3) next_shape = 1 for i in self.DropOut_2.out_shape: next_shape *= i self.Dense = my_layers.Dense(next_shape, units=1)
def __init__(self, in_dim): super(Generator, self).__init__() """ bn层两个参数 反卷积和dense层不采用偏置 各自只有一个参数 1+2+1+2+1+2+1=10 一共十个参数个数(指独立大参数self.w self.b的个数) """ self.Dense_1 = my_layers.Dense(in_dim, 7 * 7 * 256, use_bias=False) self.BacthNormalization_1 = my_layers.BatchNormalization( in_shape=self.Dense_1.out_dim) self.LeakyReLU_1 = my_layers.LeakyReLU( in_shape=self.BacthNormalization_1.out_shape) self.Conv2dTranspose_2 = my_layers.Conv2DTranspose( in_shape=[7, 7, 256], out_depth=128, kernel_size=[5, 5], strides=[1, 1], pandding_way="SAME", use_bias=False) assert self.Conv2dTranspose_2.out_shape == [7, 7, 128] self.BacthNormalization_2 = my_layers.BatchNormalization( in_shape=self.Conv2dTranspose_2.out_shape) self.LeakyReLU_2 = my_layers.LeakyReLU( in_shape=self.BacthNormalization_2.out_shape) self.Conv2dTranspose_3 = my_layers.Conv2DTranspose( in_shape=self.LeakyReLU_2.out_shape, out_depth=64, kernel_size=[5, 5], strides=[2, 2], pandding_way="SAME", use_bias=False) assert self.Conv2dTranspose_3.out_shape == [14, 14, 64] self.BacthNormalization_3 = my_layers.BatchNormalization( in_shape=self.Conv2dTranspose_3.out_shape) self.LeakyReLU_3 = my_layers.LeakyReLU( in_shape=self.BacthNormalization_3.out_shape) self.Conv2dTranspose_4 = my_layers.Conv2DTranspose( in_shape=self.LeakyReLU_3.out_shape, out_depth=1, kernel_size=[5, 5], strides=[2, 2], pandding_way="SAME", use_bias=False) assert self.Conv2dTranspose_4.out_shape == [28, 28, 1]