def gan(self, x_image, reuse=False, features=False):

        with tf.variable_scope('gan') as scope:
            if reuse:
                scope.reuse_variables()
            c_i = x_image

            c_i = lin_bn_lrelu(c_i, 1000, self.bn_settings, name="gan_dens_%d" % 0)

            if features:
                return lin(c_i, 1000, self.bn_settings, name="gan_dens_%d" % 1)
            c_i = lin_bn_lrelu(c_i, 1000, self.bn_settings, name="gan_dens_%d" % 1)
            y = lin(c_i, 1, "gan_out")

            return y
예제 #2
0
    def decoder(self, z, reuse=False):
        with tf.variable_scope('decoder') as scope:
            if reuse:
                scope.reuse_variables()
            z_dim = self.z_dim
            if self.y_dim:
                z = tf.concat(1, [z, self.y_labels])
                z_dim = self.z_dim + self.y_dim

            c_i = z
            c_i = lin(c_i, 784, name='dec_lin_1')
            # Now c_i has shape batch_size x 784
            c_i = tf.maximum(0., c_i)
            c_i = tf.reshape(c_i, [self.batch_size, 28, 28, 1])
            # Now c_i has shape batch_size x 28 x 28 x 1

            c_i = conv(c_i, 5, 1, 16, name="dec_conv_1")
            c_i = tf.maximum(0., c_i)
            # Now c_i has shape batch_size x 28 x 28 x 16

            c_i = conv(c_i, 5, 1, 1, name="dec_conv_2")
            # Now c_i has shape batch_size x 28 x 28 x 1

            c_i = tf.reshape(c_i, shape=[self.batch_size, 784])
            # Now c_i has shape batch_size x 784

            y_image = tf.nn.sigmoid(c_i)
            return y_image
예제 #3
0
    def encoder(self):
        c_i = self.x_image
        c_i = lin_lrelu(c_i, 2048, name="enc_dens_%d" % 0)
        c_i = lin_bn_lrelu(c_i, 2048, self.bn_settings, name="enc_dens_%d" % 1)

        z = lin(c_i, self.z_dim, name="enc_out")
        return z
예제 #4
0
    def encoder(self):
        c_i = self.x_image
        c_i = tf.reshape(c_i, shape=[self.batch_size, 28, 28, 1])
        # Now c_i has shape batch_size x 28 x 28 x 1

        c_i = conv(c_i, 5, 1, 16, name="enc_conv_1")
        c_i = tf.maximum(0., c_i)
        # Now c_i has shape batch_size x 28 x 28 x 16

        c_i = conv(c_i, 5, 1, 16, name="enc_conv_2")
        c_i = tf.maximum(0., c_i)
        # Now c_i has shape batch_size x 28 x 28 x 16

        c_i = tf.reshape(c_i, shape=[self.batch_size, 16 * 784])
        # Now c_i has shape batch_size x 784

        c_i = lin(c_i, 400, name="enc_lin_1")
        c_i = tf.maximum(0., c_i)
        # Now c_i has shape batch_size x 400

        z = lin(c_i, self.z_dim, name="enc_lin_2")

        return z
예제 #5
0
    def decoder(self, z, reuse=False):
        with tf.variable_scope('decoder') as scope:
            c_i = z
            if self.y_dim:
                c_i = tf.concat(1, [z, self.y_labels])

            if reuse:
                scope.reuse_variables()

            c_i = lin_lrelu(c_i, 2048, name="dec_dens_%d" % 0)
            c_i = lin_bn_lrelu(c_i, 2048, self.bn_settings, name="dec_dens_%d" % 1)

            c_i = lin(c_i, self.input_dim, name="dec_out")
            x_reconstructed = tf.nn.sigmoid(c_i)
            return x_reconstructed
예제 #6
0
    def discriminator(self, z, reuse=False):
        with tf.variable_scope('discriminator') as scope:
            if reuse:
                scope.reuse_variables()

            c_i = z
            if self.y_dim:
                c_i = tf.concat(1, [z, self.y_labels])

            c_i = lin_lrelu(c_i, 500, name="disc_dens_%d" % 0)
            c_i = lin_lrelu(c_i, 500, name="disc_dens_%d" % 1)

            y = lin(c_i, 1, name="disc_out")

        return y