Esempio n. 1
0
def discriminator_forward(config,
                          incoming,
                          scope="discriminator",
                          name=None,
                          reuse=False):
    with tf.variable_scope(scope, name, reuse=reuse):
        output = conv_2d(incoming, config.gen_dim, 5, strides=2, name="conv1")
        output = leaky_relu(output, 0.2)
        output = conv_2d(output,
                         2 * config.gen_dim,
                         5,
                         strides=2,
                         name="conv2")

        output = leaky_relu(output, 0.2)
        output = conv_2d(output,
                         4 * config.gen_dim,
                         5,
                         strides=2,
                         name="conv3")

        output = leaky_relu(output, 0.2)
        output = conv_2d(output,
                         8 * config.gen_dim,
                         5,
                         strides=2,
                         name="conv4")

        output = leaky_relu(output, 0.2)
        output = tf.reshape(output, [-1, 4 * 4 * 8 * config.gen_dim])
        output = fully_connected(output, 1, bias=False)

    return output
Esempio n. 2
0
def classifier_forward(config,
                       incoming,
                       name=None,
                       reuse=False,
                       scope="classifier"):
    with tf.variable_scope(scope, name, reuse=reuse):
        network = incoming
        network = relu(
            batch_normalization(
                conv_2d(network,
                        32,
                        5,
                        activation='relu',
                        regularizer="L2",
                        strides=2)))
        network = relu(
            batch_normalization(
                conv_2d(network,
                        64,
                        5,
                        activation='relu',
                        regularizer="L2",
                        strides=2)))
        network = flatten(network)

        network = relu(batch_normalization(fully_connected(network, 1024)))
        network = dropout(network, 0.5)

        network = fully_connected(network, 10)

    return network
Esempio n. 3
0
def discriminator_forward(config,
                          incoming,
                          scope="discriminator",
                          name=None,
                          reuse=False):
    with tf.variable_scope(scope, name, reuse=reuse):
        output = leaky_relu(conv_2d(incoming, config.dim, 5, 2), 0.2)
        output = leaky_relu(conv_2d(output, 2 * config.dim, 5, 2), 0.2)
        output = leaky_relu(conv_2d(output, 4 * config.dim, 5, 2), 0.2)

        output = tf.reshape(output, [-1, 4 * 4 * 4 * config.dim])
        output = tf.reshape(fully_connected(output, 1, bias=False), [-1])

    return output
Esempio n. 4
0
def generator_forward(config,
                      noise=None,
                      scope="generator",
                      name=None,
                      num_samples=-1,
                      reuse=False):
    with tf.variable_scope(scope, name, reuse=reuse):
        if noise is None:
            noise = tf.random_normal(
                [config.batch_size if num_samples == -1 else num_samples, 128],
                name="noise")

        output = fully_connected(noise, 4 * 4 * config.gen_dim, name="input")
        output = tf.reshape(output, [-1, 4, 4, config.gen_dim])

        output = residual_block_upsample(output, config.gen_dim, 3, name="rb1")
        output = residual_block_upsample(output, config.gen_dim, 3, name="rb2")
        output = residual_block_upsample(output, config.gen_dim, 3, name="rb3")

        output = batch_normalization(output)
        output = tf.nn.relu(output)
        output = conv_2d(output, 3, 3, name="output")
        output = tf.tanh(output)

    return output
Esempio n. 5
0
def discriminator_forward(config,
                          incoming,
                          scope="discriminator",
                          name=None,
                          reuse=False):
    with tf.variable_scope(scope, name, reuse=reuse):
        output = conv_2d(incoming, config.disc_dim, 3, name="input")

        output = residual_block_downsample(output,
                                           2 * config.disc_dim,
                                           3,
                                           name="rb1")
        output = residual_block_downsample(output,
                                           4 * config.disc_dim,
                                           3,
                                           name="rb2")
        output = residual_block_downsample(output,
                                           8 * config.disc_dim,
                                           3,
                                           name="rb3")
        output = residual_block_downsample(output,
                                           8 * config.disc_dim,
                                           3,
                                           name="rb4")

        output = tf.reshape(output, [-1, 4 * 4 * 8 * config.disc_dim])
        output = fully_connected(output, 1, name="output")

    return tf.reshape(output, [-1])
Esempio n. 6
0
def classifier_forward(config,
                       incoming,
                       name=None,
                       reuse=False,
                       scope="classifier"):
    with tf.variable_scope(scope, name, reuse=reuse):
        network = incoming
        network = relu(conv_2d(network, 32, 5, strides=2))
        network = relu(conv_2d(network, 64, 5, strides=2))
        network = flatten(network)

        network = relu(fully_connected(network, 1024))
        network = dropout(network, 0.7)

        network = fully_connected(network, 10)

    return network