def residual_block(self, x_c_code, name, training=True): node0_0 = x_c_code # -->s4 * s4 * gf_dim * 4 node0_1 = Conv2d(x_c_code, 3, 3, self.gf_dim * 4, 1, 1, name=name + '/conv2d') node0_1 = conv_batch_normalization(node0_1, name + '/batch_norm', is_training=training, activation_fn=tf.nn.relu) node0_1 = Conv2d(node0_1, 3, 3, self.gf_dim * 4, 1, 1, name=name + '/conv2d2') node0_1 = conv_batch_normalization(node0_1, name + '/batch_norm2', is_training=training) output_tensor = add([node0_0, node0_1], name='resid_block/add') output_tensor = tf.nn.relu(output_tensor) return output_tensor
def generator(self, z_var, training=True): node1_0 = fc(z_var, self.s16 * self.s16 * self.gf_dim * 8, 'g_n1.0/fc') node1_0 = fc_batch_normalization(node1_0, 'g_n1.0/batch_norm') node1_0 = reshape(node1_0, [-1, self.s16, self.s16, self.gf_dim * 8], name='g_n1.0/reshape') node1_1 = Conv2d(node1_0, 1, 1, self.gf_dim * 2, 1, 1, name='g_n1.1/conv2d') node1_1 = conv_batch_normalization(node1_1, 'g_n1.1/batch_norm_1', activation_fn=tf.nn.relu, is_training=training) node1_1 = Conv2d(node1_1, 3, 3, self.gf_dim * 2, 1, 1, name='g_n1.1/conv2d2') node1_1 = conv_batch_normalization(node1_1, 'g_n1.1/batch_norm_2', activation_fn=tf.nn.relu, is_training=training) node1_1 = Conv2d(node1_1, 3, 3, self.gf_dim * 8, 1, 1, name='g_n1.1/conv2d3') node1_1 = conv_batch_normalization(node1_1, 'g_n1.1/batch_norm_3', activation_fn=tf.nn.relu, is_training=training) node1 = add([node1_0, node1_1], name='g_n1_res/add') node1_output = tf.nn.relu(node1) node2_0 = UpSample(node1_output, size=[self.s8, self.s8], method=1, align_corners=False, name='g_n2.0/upsample') node2_0 = Conv2d(node2_0, 3, 3, self.gf_dim * 4, 1, 1, name='g_n2.0/conv2d') node2_0 = conv_batch_normalization(node2_0, 'g_n2.0/batch_norm', is_training=training) node2_1 = Conv2d(node2_0, 1, 1, self.gf_dim * 1, 1, 1, name='g_n2.1/conv2d') node2_1 = conv_batch_normalization(node2_1, 'g_n2.1/batch_norm', activation_fn=tf.nn.relu, is_training=training) node2_1 = Conv2d(node2_1, 3, 3, self.gf_dim * 1, 1, 1, name='g_n2.1/conv2d2') node2_1 = conv_batch_normalization(node2_1, 'g_n2.1/batch_norm2', activation_fn=tf.nn.relu, is_training=training) node2_1 = Conv2d(node2_1, 3, 3, self.gf_dim * 4, 1, 1, name='g_n2.1/conv2d3') node2_1 = conv_batch_normalization(node2_1, 'g_n2.1/batch_norm3', is_training=training) node2 = add([node2_0, node2_1], name='g_n2_res/add') node2_output = tf.nn.relu(node2) output_tensor = UpSample(node2_output, size=[self.s4, self.s4], method=1, align_corners=False, name='g_OT/upsample') output_tensor = Conv2d(output_tensor, 3, 3, self.gf_dim * 2, 1, 1, name='g_OT/conv2d') output_tensor = conv_batch_normalization(output_tensor, 'g_OT/batch_norm', activation_fn=tf.nn.relu, is_training=training) output_tensor = UpSample(output_tensor, size=[self.s2, self.s2], method=1, align_corners=False, name='g_OT/upsample2') output_tensor = Conv2d(output_tensor, 3, 3, self.gf_dim, 1, 1, name='g_OT/conv2d2') output_tensor = conv_batch_normalization(output_tensor, 'g_OT/batch_norm2', activation_fn=tf.nn.relu, is_training=training) output_tensor = UpSample(output_tensor, size=[self.s, self.s], method=1, align_corners=False, name='g_OT/upsample3') output_tensor = Conv2d(output_tensor, 3, 3, 3, 1, 1, activation_fn=tf.nn.tanh, name='g_OT/conv2d3') return output_tensor
def d_encode_image(self, training=True, inputs=None, if_reuse=None): node1_0 = Conv2d(inputs, 4, 4, self.df_dim, 2, 2, name='d_n1.0/conv2d', activation_fn=tf.nn.leaky_relu, reuse=if_reuse) node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 2, 2, 2, name='d_n1.0/conv2d2', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'd_n1.0/batch_norm', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 4, 2, 2, name='d_n1.0/conv2d3', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'd_n1.0/batch_norm2', is_training=training, reuse=if_reuse) node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 8, 2, 2, name='d_n1.0/conv2d4', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'd_n1.0/batch_norm3', is_training=training, reuse=if_reuse) node1_1 = Conv2d(node1_0, 1, 1, self.df_dim * 2, 1, 1, name='d_n1.1/conv2d', reuse=if_reuse) node1_1 = conv_batch_normalization(node1_1, 'd_n1.1/batch_norm', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) node1_1 = Conv2d(node1_1, 3, 3, self.df_dim * 2, 1, 1, name='d_n1.1/conv2d2', reuse=if_reuse) node1_1 = conv_batch_normalization(node1_1, 'd_n1.1/batch_norm2', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) node1_1 = Conv2d(node1_1, 3, 3, self.df_dim * 8, 1, 1, name='d_n1.1/conv2d3', reuse=if_reuse) node1_1 = conv_batch_normalization(node1_1, 'd_n1.1/batch_norm3', is_training=training, reuse=if_reuse) node1 = add([node1_0, node1_1], name='d_n1_res/add') node1 = tf.nn.leaky_relu(node1) return node1
def hr_d_encode_image(self, inputs=None, training=True, if_reuse=None): # input: 4s * 4s * 3 node1_0 = Conv2d(inputs, 4, 4, self.df_dim, 2, 2, activation_fn=tf.nn.leaky_relu, name='hr_d_encode_n1.0/conv2d1', reuse=if_reuse) # 2s * 2s * df_dim # s * s * df_dim*2 node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 2, 2, 2, name='hr_d_encode_n1.0/conv2d2', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) # s2 * s2 * df_dim*4 node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 4, 2, 2, name='hr_d_encode_n1.0/conv2d3', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm2', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) # s4 * s4 * df_dim*8 node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 8, 2, 2, name='hr_d_encode_n1.0/conv2d4', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm3', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) # s8 * s8 * df_dim*16 node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 16, 2, 2, name='hr_d_encode_n1.0/conv2d5', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm4', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) # s16 * s16 * df_dim*32 node1_0 = Conv2d(node1_0, 4, 4, self.df_dim * 32, 2, 2, name='hr_d_encode_n1.0/conv2d6', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm5', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) # s16 * s16 * df_dim*16 node1_0 = Conv2d(node1_0, 1, 1, self.df_dim * 16, 1, 1, name='hr_d_encode_n1.0/conv2d7', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm6', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) # s16 * s16 * df_dim*8 node1_0 = Conv2d(node1_0, 1, 1, self.df_dim * 8, 1, 1, name='hr_d_encode_n1.0/conv2d8', reuse=if_reuse) node1_0 = conv_batch_normalization(node1_0, 'hr_d_encode_n1.0/batch_norm7', is_training=training, reuse=if_reuse) node1_1 = Conv2d(node1_0, 1, 1, self.df_dim * 2, 1, 1, name='hr_d_encode_n1.1/conv2d', reuse=if_reuse) node1_1 = conv_batch_normalization(node1_1, 'hr_d_encode_n1.1/batch_norm', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) node1_1 = Conv2d(node1_1, 3, 3, self.df_dim * 2, 1, 1, name='hr_d_encode_n1.1/conv2d2', reuse=if_reuse) node1_1 = conv_batch_normalization(node1_1, 'hr_d_encode_n1.1/batch_norm2', is_training=training, activation_fn=tf.nn.leaky_relu, reuse=if_reuse) node1_1 = Conv2d(node1_1, 3, 3, self.df_dim * 8, 1, 1, name='hr_d_encode_n1.1/conv2d3', reuse=if_reuse) node1_1 = conv_batch_normalization(node1_1, 'hr_d_encode_n1.1/batch_norm3', is_training=training, reuse=if_reuse) node1 = add([node1_0, node1_1], name='hr_d_encode_n1/add') node1 = tf.nn.leaky_relu(node1) return node1