def generator(self, z, c, reuse=None, is_train=True): """ :param z: 139 z-noise :param c: 10 categories * 10 dimensions :param reuse: re-usable :param is_train: trainable :return: prob """ with tf.variable_scope("generator", reuse=reuse): x = tf.concat([z, c], axis=1) # (-1, 128 + 1 + 10) x = t.dense(x, 2 * 2 * 512, name='gen-fc-1') x = t.batch_norm(x, is_train=is_train, name='gen-bn-1') x = tf.nn.relu(x) x = tf.reshape(x, (-1, 2, 2, 512)) x = t.deconv2d(x, self.gf_dim * 8, 4, 2, name='gen-deconv2d-1') x = t.batch_norm(x, is_train=is_train, name='gen-bn-2') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 4, 4, 2, name='gen-deconv2d-2') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 2, 4, 2, name='gen-deconv2d-3') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 1, 4, 2, name='gen-deconv2d-4') x = tf.nn.relu(x) x = t.deconv2d(x, 3, 4, 2, name='gen-deconv2d-5') x = tf.nn.tanh(x) return x
def generator(self, z, y, reuse=None, is_train=True): """ # Following a G Network, CiFar-like-hood, referred in the paper :param z: noise :param y: image label :param reuse: re-usable :param is_train: trainable :return: prob """ with tf.variable_scope("generator", reuse=reuse): x = tf.concat([z, y], axis=1) # (-1, 110) x = t.dense(x, self.gf_dim, name='gen-fc-1') x = tf.nn.relu(x) x = tf.reshape(x, (-1, 4, 4, 24)) for i in range(1, 3): x = t.deconv2d(x, self.gf_dim // (2**i), 5, 2, name='gen-deconv2d-%d' % (i + 1)) x = t.batch_norm(x, is_train=is_train, reuse=reuse, name="gen-bn-%d" % i) x = tf.nn.relu(x) x = t.deconv2d(x, self.channel, 5, 2, name='gen-deconv2d-4') x = tf.nn.tanh(x) # scaling to [-1, 1] return x
def generator(self, z, y=None, share_params=False, reuse=False, training=True, name=""): if y is None: x = tf.concat([z, y], axis=1) else: x = z x = tf.layers.flatten(x) x = tf.layers.dense(x, self.fc_unit, reuse=share_params, name='gen-dense-0') x = t.prelu(x, reuse=share_params, name='gen-prelu-0') x = tf.layers.dense(x, self.gf_dim * 8 * 7 * 7, reuse=share_params, name='gen-dense-1') x = t.batch_norm(x, reuse=share_params, is_train=training, name='gen-bn-0') x = t.prelu(x, reuse=share_params, name='gen-prelu-1') x = tf.reshape(x, (self.batch_size, 7, 7, self.gf_dim * 8)) # x = deconv2d(x, f=self.gf_dim * 16, k=4, s=1, reuse=share_params, name='gen-deconv2d-0') # x = batch_norm(x, reuse=share_params, training=training, name="gen-bn-0") # x = prelu(x, reuse=share_params, name='gen-prelu-1') x = t.deconv2d(x, f=self.gf_dim * 4, k=3, s=2, reuse=share_params, name='gen-deconv2d-1') x = t.batch_norm(x, reuse=share_params, is_train=training, name="gen-bn-1") x = t.prelu(x, reuse=share_params, name='gen-prelu-2') x = t.deconv2d(x, f=self.gf_dim * 2, k=3, s=2, reuse=share_params, name='gen-deconv2d-2') x = t.batch_norm(x, reuse=share_params, is_train=training, name="gen-bn-2") x = t.prelu(x, reuse=share_params, name='gen-prelu-3') with tf.variable_scope("generator-%s" % name, reuse=reuse): x = t.deconv2d(x, f=self.channel, k=6, s=1, reuse=False, name='gen-' + name + '-deconv2d-3') x = tf.nn.sigmoid(x, name='gen' + name + '-sigmoid-0') return x
def generator(self, z, y=None, reuse=None, is_train=True): """ :param z: embeddings :param y: labels :param reuse: re-usable :param is_train: en/disable batch_norm, default True :return: prob """ with tf.variable_scope("generator", reuse=reuse): if y: raise NotImplemented("[-] Not Implemented Yet...") x = t.dense(z, f=self.fc_unit, name='gen-fc-0') x = tf.nn.leaky_relu(x) x = tf.reshape(x, [-1, 8, 8, self.fc_unit // (8 * 8)]) for i in range(1, 4): x = t.deconv2d(x, f=self.gf_dim * (2 ** i), name="gen-conv2d-%d" % i) x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) x = t.deconv2d(x, f=3, s=1, name="gen-conv2d-4") # (-1, 64, 64, 3) x = tf.sigmoid(x) # [0, 1] return x
def generator(self, z, reuse=None, is_train=True): """ :param z: embeddings :param reuse: re-usable :param is_train: trainable :return: prob """ with tf.variable_scope("generator", reuse=reuse): x = tf.reshape(z, (-1, 1, 1, self.z_dim)) x = t.deconv2d(x, self.df_dim * 8, 4, 1, pad='VALID', name='gen-deconv2d-1') x = t.batch_norm(x, is_train=is_train, name='gen-bn-1') x = tf.nn.relu(x) for i in range(1, 4): x = t.deconv2d(x, self.df_dim * 8 // (2**i), 4, 2, name='gen-deconv2d-%d' % (i + 1)) x = t.batch_norm(x, is_train=is_train, name='gen-bn-%d' % (i + 1)) x = tf.nn.relu(x) x = t.deconv2d(x, self.channel, 4, 2, name='gen-deconv2d-5') x = tf.nn.tanh(x) return x
def generator(self, y, z, reuse=None, is_train=True): """ # Following a G Network, CiFar-like-hood, referred in the paper :param y: image label :param z: image noise :param reuse: re-usable :param is_train: trainable :return: prob """ with tf.variable_scope("generator", reuse=reuse): x = tf.concat([z, y], axis=1) x = tf.layers.dense(x, self.gf_dim * 2, name='g-fc-0') x = tf.nn.relu(x) x = tf.layers.dense(x, self.gf_dim * 7 * 7, name='g-fc-1') x = tf.nn.relu(x) x = tf.reshape(x, [-1, 7, 7, self.gf_dim]) x = t.deconv2d(x, f=self.gf_dim // 2, k=5, s=2, name='g-deconv-1') x = t.batch_norm(x, is_train=is_train) x = tf.nn.relu(x) x = t.deconv2d(x, f=1, k=5, s=2, name='g-deconv-2') # channel x = tf.nn.sigmoid(x) # x = tf.nn.tanh(x) return x
def generator(self, z, reuse=None, is_train=True): """ # referred architecture in the paper : (1024)4c - (512)4c2s - (256)4c2s - (128)4c2s - (3)4c2s :param z: embeddings :param reuse: re-usable :param is_train: trainable :return: prob """ with tf.variable_scope("generator", reuse=reuse): x = t.dense(z, self.gf_dim * 8 * 4 * 4, name='gen-fc-1') x = tf.reshape(x, (-1, 4, 4, self.gf_dim * 8)) x = t.batch_norm(x, is_train=is_train, name='gen-bn-1') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 4, 4, 2, name='gen-deconv2d-1') x = t.batch_norm(x, is_train=is_train, name='gen-bn-2') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 2, 4, 2, name='gen-deconv2d-2') x = t.batch_norm(x, is_train=is_train, name='gen-bn-3') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 1, 4, 2, name='gen-deconv2d-3') x = t.batch_norm(x, is_train=is_train, name='gen-bn-4') x = tf.nn.relu(x) x = t.deconv2d(x, self.channel, 4, 2, name='gen-deconv2d-4') x = tf.nn.tanh(x) return x
def generator(self, z, reuse=None): with tf.variable_scope('generator', reuse=reuse): x = t.dense(z, self.gf_dim * 4 * 4 * 4, name='gen-fc-1') x = t.batch_norm(x, reuse=reuse, name='gen-bn-1') x = tf.nn.relu(x) x = tf.reshape(x, (-1, 4, 4, self.gf_dim * 4)) for i in range(1, 4): x = t.deconv2d(x, self.gf_dim * 4 // (2 ** (i - 1)), 5, 2, name='gen-deconv2d-%d' % i) x = t.batch_norm(x, reuse=reuse, name='gen-bn-%d' % (i + 1)) x = tf.nn.relu(x) x = t.deconv2d(x, self.channel, 5, 1, name='gen-deconv2d-5') x = tf.nn.tanh(x) return x
def generator(self, z, y=None, share_params=False, reuse=False, name=""): x = t.dense(z, self.fc_g_unit, reuse=share_params, name='gen-fc-1') x = t.batch_norm(x, reuse=share_params, name='gen-bn-1') x = t.prelu(x, reuse=share_params, name='gen-prelu-1') x = t.dense(x, self.gf_dim * 8 * 7 * 7, reuse=share_params, name='gen-fc-2') x = t.batch_norm(x, reuse=share_params, name='gen-bn-2') x = t.prelu(x, reuse=share_params, name='gen-prelu-2') x = tf.reshape(x, (-1, 7, 7, self.gf_dim * 8)) for i in range(1, 3): x = t.deconv2d(x, f=self.gf_dim * 4 // i, k=3, s=2, reuse=share_params, name='gen-deconv2d-%d' % i) x = t.batch_norm(x, reuse=share_params, name="gen-bn-%d" % (i + 2)) x = t.prelu(x, reuse=share_params, name='gen-prelu-%d' % (i + 2)) """ x = z # tf.concat([z, y], axis=1) loop = 5 for i in range(1, loop): x = t.dense(x, self.fc_g_unit, reuse=share_params, name='gen-fc-%d' % i) x = t.batch_norm(x, reuse=share_params, name='gen-bn-%d' % i) x = t.prelu(x, reuse=share_params, name='gen-prelu-%d' % i) """ with tf.variable_scope("generator-%s" % name, reuse=reuse): x = t.deconv2d(x, f=self.channel, k=6, s=1, reuse=False, name='gen-' + name + '-deconv2d-3') x = tf.nn.sigmoid(x, name='gen' + name + '-sigmoid-1') """ x = t.dense(x, self.n_input, reuse=False, name='gen-' + name + '-fc-%d' % loop) x = tf.nn.sigmoid(x) """ return x
def generator(self, x, reuse=None): with tf.variable_scope('generator', reuse=reuse): def residual_block(x, f, name=""): with tf.variable_scope(name, reuse=reuse): skip_connection = tf.identity(x, name='gen-skip_connection-1') x = t.conv2d(x, f, 3, 1, name='gen-conv2d-1') x = t.instance_norm(x, reuse=reuse, name='gen-inst_norm-1') x = tf.nn.relu(x) x = t.conv2d(x, f, 3, 1, name='gen-conv2d-2') x = tf.nn.relu(x) return skip_connection + x shortcut = tf.identity(x, name='shortcut-init') x = t.conv2d(x, self.gf_dim * 1, 7, 1, name='gen-conv2d-1') x = t.instance_norm(x, affine=False, reuse=reuse, name='gen-inst_norm-1') x = tf.nn.relu(x) for i in range(1, 3): x = t.conv2d(x, self.gf_dim * (2**i), 3, 2, name='gen-conv2d-%d' % (i + 1)) x = t.instance_norm(x, affine=False, reuse=reuse, name='gen-inst_norm-%d' % (i + 1)) x = tf.nn.relu(x) # 9 Residual Blocks for i in range(9): x = residual_block(x, self.gf_dim * 4, name='gen-residual_block-%d' % (i + 1)) for i in range(1, 3): x = t.deconv2d(x, self.gf_dim * (2**i), 3, 2, name='gen-deconv2d-%d' % i) x = t.instance_norm(x, affine=False, reuse=reuse, name='gen-inst_norm-%d' % (i + 3)) x = tf.nn.relu(x) x = t.conv2d(x, self.gf_dim * 1, 7, 1, name='gen-conv2d-4') x = tf.nn.tanh(x) return shortcut + x
def u(x, f, name=''): x = t.deconv2d(x, f=f, k=3, s=2, name='gen-u-deconv2d-%s' % name) x = t.instance_norm(x, name='gen-u-ins_norm-%s' % name) x = tf.nn.relu(x) return x
def conv_in_relu(x, f, k, s, de=False, name=""): if not de: x = t.conv2d(x, f=f, k=k, s=s) else: x = t.deconv2d(x, f=f, k=k, s=s) x = t.instance_norm(x, name=name) x = tf.nn.relu(x) return x
def decoder(self, x, reuse=None): """ (128)4c2s - (64)4c2s - (3)4c2s :param x: embeddings :param reuse: re-usable :return: prob """ with tf.variable_scope('decoder', reuse=reuse): x = t.deconv2d(x, self.df_dim * 2, 4, 2, name='dec-deconv2d-1') x = t.batch_norm(x, name='dec-bn-1') x = tf.nn.relu(x) x = t.deconv2d(x, self.df_dim * 1, 4, 2, name='dec-deconv2d-2') x = t.batch_norm(x, name='dec-bn-2') x = tf.nn.relu(x) x = t.deconv2d(x, self.channel, 4, 2, name='dec-deconv2d-3') x = tf.nn.tanh(x) return x
def generator(self, z, reuse=None, is_train=True): with tf.variable_scope('generator', reuse=reuse): x = t.dense(z, self.fc_unit, name='gen-fc-0') x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) x = t.dense(x, self.gf_dim * 4 * 7 * 7, name='gen-fc-1') x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) x = tf.reshape(x, [-1, 7, 7, self.gf_dim * 4]) x = t.deconv2d(x, self.gf_dim * 2, name='gen-deconv2d-0') x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) logits = t.deconv2d(x, self.channel, name='gen-deconv2d-1') prob = tf.nn.sigmoid(logits) return prob
def decoder(self, z, reuse=None): """ :param z: embeddings :param reuse: re-usable :return: prob """ with tf.variable_scope('decoder', reuse=reuse): x = z for i in range(1, 4): x = t.deconv2d(x, self.df_dim * 8 // (2**i), 4, 2, name='dec-deconv2d-%d' % i) x = t.batch_norm(x, name='dec-bn-%d' % i) x = tf.nn.leaky_relu(x) x = t.deconv2d(x, self.channel, 4, 2, name='enc-deconv2d-4') x = tf.nn.tanh(x) return x
def generator(self, z, reuse=None, is_train=True): with tf.variable_scope('generator', reuse=reuse): x = t.dense(z, self.gf_dim * 8 * 4 * 4) x = tf.reshape(x, [-1, 4, 4, self.gf_dim * 8]) x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) x = t.deconv2d(x, self.gf_dim * 4, name='g-deconv-1') x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) x = t.deconv2d(x, self.gf_dim * 2, name='g-deconv-2') x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) logits = t.deconv2d(x, self.channel, name='g-deconv-3') prob = tf.nn.tanh(logits) return prob
def generator(self, z, reuse=None, is_train=True): """ Same as DCGAN Gen Net """ with tf.variable_scope('generator', reuse=reuse): x = t.dense(z, self.gf_dim * 4 * 4 * 4, name='gen-fc-1') x = tf.reshape(x, [-1, 4, 4, self.gf_dim * 4]) x = t.batch_norm(x, is_train=is_train, name='gen-bn-1') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 2, 5, 2, name='gen-deconv2d-1') x = t.batch_norm(x, is_train=is_train, name='gen-bn-2') x = tf.nn.relu(x) x = t.deconv2d(x, self.gf_dim * 1, 5, 2, name='gen-deconv2d-2') x = t.batch_norm(x, is_train=is_train, name='gen-bn-3') x = tf.nn.relu(x) x = t.deconv2d(x, self.channel, 5, 2, name='gen-deconv2d-3') x = tf.nn.tanh(x) return x
def generator(self, z, reuse=None, is_train=True): with tf.variable_scope("generator", reuse=reuse): x = t.dense(z, self.gf_dim * 7 * 7, name='gen-fc-1') x = t.batch_norm(x, name='gen-bn-1') x = tf.nn.leaky_relu(x, alpha=0.3) x = tf.reshape(x, [-1, 7, 7, self.gf_dim]) for i in range(1, 3): x = t.deconv2d(x, self.gf_dim, 5, 2, name='gen-deconv2d-%d' % (i + 1)) x = t.batch_norm(x, is_train=is_train, name='gen-bn-%d' % (i + 1)) x = tf.nn.leaky_relu(x, alpha=0.3) x = t.deconv2d(x, 1, 5, 1, name='gen-deconv2d-3') x = tf.nn.sigmoid(x) return x
def generator(self, z, reuse=None): """ # referred architecture in the paper : FC1024_BR-FC7x7x128_BR-(64)4dc2s_BR-(1)4dc2s_S :param z: embeddings :param reuse: re-usable :return: prob """ with tf.variable_scope("generator", reuse=reuse): x = t.dense(z, self.fc_unit * 4, name='g-fc-1') x = tf.nn.leaky_relu(x) x = t.dense(x, 7 * 7 * self.fc_unit // 2, name='g-fc-2') x = tf.nn.leaky_relu(x) x = tf.layers.flatten(x) x = t.deconv2d(x, self.gf_dim, 4, 2, name='g-deconv2d-1') x = tf.nn.leaky_relu(x) x = t.deconv2d(x, 1, 4, 2, name='g-deconv2d-2') x = tf.nn.sigmoid(x) return x
def decoder(self, x, reuse=None): """ :param x: embeddings :param reuse: re-usable :return: prob """ with tf.variable_scope('decoder', reuse=reuse): x = t.dense(x, self.gf_dim * 7 * 7, name='dec-fc-1') x = tf.nn.leaky_relu(x) x = tf.layers.flatten(x) x = t.deconv2d(x, 1, 4, 2, name='dec-deconv2d-1') x = tf.nn.sigmoid(x) return x
def generator(self, z, scope_name, reuse=None, is_train=True): with tf.variable_scope("%s" % scope_name, reuse=reuse): x = t.dense(z, 4 * 4 * 8 * self.gf_dim) x = tf.nn.leaky_relu(x) x = tf.layers.flatten(x) x = tf.reshape(x, (-1, 4, 4, 8)) for i in range(np.log2(self.height) - 2): # 0 ~ 3 x = t.deconv2d(x, self.gf_dim * (2 ** (i + 1)), k=4, s=2) x = t.batch_norm(x, is_train=is_train) x = tf.nn.leaky_relu(x) x = t.conv2d(x, 3) return x
def generator(self, z, reuse=None, is_train=True): with tf.variable_scope('generator', reuse=reuse): x = t.dense(z, self.gf_dim * 8 * 4 * 4, name='gen-fc-1') x = tf.reshape(x, [-1, 4, 4, self.gf_dim * 8]) x = t.batch_norm(x, is_train=is_train, name='gen-bn-1') x = tf.nn.relu(x) for i in range(1, 4): x = t.deconv2d(x, self.gf_dim * 4, 3, 2, name='gen-deconv2d-%d' % i) x = t.batch_norm(x, is_train=is_train, name='gen-bn-%d' % (i + 1)) x = tf.nn.relu(x) x = t.conv2d(x, self.channel, 3, name='gen-conv2d-1') x = tf.nn.sigmoid(x) return x
def build_fcn(self): vgg19_net = vgg19.VGG19(image=self.x) net = vgg19_net.vgg19_net['pool5'] net = t.conv2d(net, 4096, k=7, s=1, name='conv6_1') net = tf.nn.relu(net, name='relu6_1') net = tf.nn.dropout(net, self.do_rate, name='dropout-6_1') net = t.conv2d(net, 4096, k=1, s=1, name='conv7_1') net = tf.nn.relu(net, name='relu7_1') net = tf.nn.dropout(net, self.do_rate, name='dropout-7_1') feature = t.conv2d(net, self.n_classes, k=1, s=1, name='conv8_1') net = t.deconv2d(feature, vgg19_net.vgg19_net['pool4'].get_shape()[3], name='deconv_1') net = tf.add(net, vgg19_net.vgg19_net['pool4'], name='fuse_1')
def generator(self, x, reuse=None): """ :param x: images :param reuse: re-usable :return: logits """ with tf.variable_scope("generator", reuse=reuse): def conv_in_relu(x, f, k, s, de=False, name=""): if not de: x = t.conv2d(x, f=f, k=k, s=s) else: x = t.deconv2d(x, f=f, k=k, s=s) x = t.instance_norm(x, name=name) x = tf.nn.relu(x) return x x = conv_in_relu(x, f=self.gf_dim * 1, k=7, s=1, name="1") # down-sampling x = conv_in_relu(x, f=self.gf_dim * 2, k=4, s=2, name="2") x = conv_in_relu(x, f=self.gf_dim * 4, k=4, s=2, name="3") # bottleneck for i in range(6): x = residual_block(x, f=self.gf_dim * 4, name=str(i)) # up-sampling x = conv_in_relu(x, self.gf_dim * 2, k=4, s=2, de=True, name="4") x = conv_in_relu(x, self.gf_dim * 1, k=4, s=2, de=True, name="5") x = t.deconv2d(x, f=3, k=7, s=1) x = tf.nn.tanh(x) return x