def discriminator_dcgan(hparams, x, scope_name, train, reuse): """ The architecture is from https://github.com/carpedm20/DCGAN-tensorflow License: https://github.com/carpedm20/DCGAN-tensorflow/blob/master/LICENSE """ df_dim = 64 # dimension of discriminator filters in first conv layer with tf.variable_scope(scope_name) as scope: if reuse: scope.reuse_variables() d_bn1 = ops.batch_norm(name='d_bn1') d_bn2 = ops.batch_norm(name='d_bn2') d_bn3 = ops.batch_norm(name='d_bn3') h0 = ops.lrelu(ops.conv2d(x, df_dim, name='d_h0_conv')) h1 = ops.conv2d(h0, df_dim * 2, name='d_h1_conv') h1 = ops.lrelu(d_bn1(h1, train=train)) h2 = ops.conv2d(h1, df_dim * 4, name='d_h2_conv') h2 = ops.lrelu(d_bn2(h2, train=train)) h3 = ops.conv2d(h2, df_dim * 8, name='d_h3_conv') h3 = ops.lrelu(d_bn3(h3, train=train)) h4 = ops.linear(tf.reshape(h3, [hparams.batch_size, -1]), 1, 'd_h3_lin') d_logit = h4 d = tf.nn.sigmoid(d_logit) return d, d_logit
def discriminator_fc(hparams, x, scope_name, train, reuse): # pylint: disable = W0613 with tf.variable_scope(scope_name) as scope: if reuse: scope.reuse_variables() d_bn0 = ops.batch_norm(name='d_bn0') h0 = ops.linear(x, 100, 'd_h0_lin') h0 = ops.lrelu(d_bn0(h0, train=train)) d_bn1 = ops.batch_norm(name='d_bn1') h1 = ops.linear(h0, 100, 'd_h1_lin') h1 = ops.lrelu(d_bn1(h1, train=train)) d_logit = ops.linear(h1, 1, 'd_h2_lin') d = tf.nn.sigmoid(d_logit) return d, d_logit
def generator_dcgan(hparams, y, z, scope_name, train, reuse): """ The architecture is from https://github.com/carpedm20/DCGAN-tensorflow License: https://github.com/carpedm20/DCGAN-tensorflow/blob/master/LICENSE """ s = 28 gf_dim = 64 # dimension of generator filters in first conv layer gfc_dim = 1024 # dimension of generator units for for fully connected layer with tf.variable_scope(scope_name) as scope: if reuse: scope.reuse_variables() s2, s4 = int(s / 2), int(s / 4) # yb = tf.expand_dims(tf.expand_dims(y, 1),2) yb = tf.reshape(y, [hparams.batch_size, 1, 1, hparams.y_dim]) z = tf.concat([z, y], 1) g_bn0 = ops.batch_norm(name='g_bn0') h0 = ops.linear(z, gfc_dim, 'g_h0_lin') h0 = tf.nn.relu(g_bn0(h0, train=train)) h0 = tf.concat([h0, y], 1) g_bn1 = ops.batch_norm(name='g_bn1') h1 = ops.linear(h0, gf_dim * 2 * s4 * s4, 'g_h1_lin') h1 = tf.nn.relu(g_bn1(h1, train=train)) h1 = tf.reshape(h1, [hparams.batch_size, s4, s4, gf_dim * 2]) h1 = ops.conv_cond_concat(h1, yb) g_bn2 = ops.batch_norm(name='g_bn2') h2 = ops.deconv2d(h1, [hparams.batch_size, s2, s2, gf_dim * 2], name='g_h2') h2 = tf.nn.relu(g_bn2(h2, train=train)) h2 = ops.conv_cond_concat(h2, yb) x_gen = tf.nn.sigmoid( ops.deconv2d(h2, [hparams.batch_size, s, s, hparams.c_dim], name='g_h3')) return x_gen
def discriminator_dcgan(hparams, x, y, scope_name, train, reuse): """ The architecture is from https://github.com/carpedm20/DCGAN-tensorflow License: https://github.com/carpedm20/DCGAN-tensorflow/blob/master/LICENSE """ df_dim = 64 # dimension of discriminator filters in first conv layer dfc_dim = 1024 # dimension of discriminator units for fully connected layer with tf.variable_scope(scope_name) as scope: if reuse: scope.reuse_variables() yb = tf.reshape(y, [hparams.batch_size, 1, 1, hparams.y_dim]) x = ops.conv_cond_concat(x, yb) h0 = ops.lrelu( ops.conv2d(x, hparams.c_dim + hparams.y_dim, name='d_h0_conv')) h0 = ops.conv_cond_concat(h0, yb) d_bn1 = ops.batch_norm(name='d_bn1') h1 = ops.conv2d(h0, df_dim + hparams.y_dim, name='d_h1_conv') h1 = ops.lrelu(d_bn1(h1, train=train)) h1 = tf.reshape(h1, [hparams.batch_size, -1]) h1 = tf.concat([h1, y], 1) d_bn2 = ops.batch_norm(name='d_bn2') h2 = ops.linear(h1, dfc_dim, 'd_h2_lin') h2 = ops.lrelu(d_bn2(h2, train=train)) h2 = tf.concat([h2, y], 1) h3 = ops.linear(h2, 1, 'd_h3_lin') d_logit = h3 d = tf.nn.sigmoid(d_logit) return d, d_logit
def generator_dcgan(hparams, z, scope_name, train, reuse): """ The architecture is from https://github.com/carpedm20/DCGAN-tensorflow License: https://github.com/carpedm20/DCGAN-tensorflow/blob/master/LICENSE """ gf_dim = 64 # dimension of generator filters in first conv layer with tf.variable_scope(scope_name) as scope: if reuse: scope.reuse_variables() s = 64 s2, s4, s8, s16 = int(s / 2), int(s / 4), int(s / 8), int(s / 16) g_bn0 = ops.batch_norm(name='g_bn0') g_bn1 = ops.batch_norm(name='g_bn1') g_bn2 = ops.batch_norm(name='g_bn2') g_bn3 = ops.batch_norm(name='g_bn3') # project `z` and reshape h0 = tf.reshape(ops.linear(z, gf_dim * 8 * s16 * s16, 'g_h0_lin'), [-1, s16, s16, gf_dim * 8]) h0 = tf.nn.relu(g_bn0(h0, train=train)) h1 = ops.deconv2d(h0, [hparams.batch_size, s8, s8, gf_dim * 4], name='g_h1') h1 = tf.nn.relu(g_bn1(h1, train=train)) h2 = ops.deconv2d(h1, [hparams.batch_size, s4, s4, gf_dim * 2], name='g_h2') h2 = tf.nn.relu(g_bn2(h2, train=train)) h3 = ops.deconv2d(h2, [hparams.batch_size, s2, s2, gf_dim * 1], name='g_h3') h3 = tf.nn.relu(g_bn3(h3, train=train)) h4 = ops.deconv2d(h3, [hparams.batch_size, s, s, hparams.c_dim], name='g_h4') x_gen = tf.nn.tanh(h4) return x_gen