Esempio n. 1
0
def generator(x, phase, reuse=None):
    with tf.variable_scope('gen', reuse=reuse):
        with arg_scope([dense], bn=True, phase=phase, activation=tf.nn.relu), \
             arg_scope([conv2d_transpose], bn=True, phase=phase, activation=tf.nn.relu):

            if args.dense:
                raise NotImplementedError('yep')
                # Level 0: 1
                l0 = tf.reshape(x, [-1, 1, 1, 128])

                # Level 1: 1 -> 4, 8, 16
                a1 = conv2d_transpose(l0, 64, 1, 1)
                a2 = conv2d_transpose(l0, 64, 1, 1)
                a3 = conv2d_transpose(l0, 64, 1, 1)
                l1 = a1

                # Level 2: 4 -> 8, 16
                b2 = conv2d_transpose(l1, 64, )

                # Level 2: 8 -> 16, 32
                b2 = conv2d_transpose(l1, 64, 3, 2)
                l2 = tf.concat([a2, b2], -1)

                # Level 3: 16 -> 32
                c3 = conv2d_transpose(l2, 64, 3, 2)
                l3 = tf.concat([a3, b3, c3], -1)

            else:
                x = dense(x, 4 * 4 * 512)
                x = tf.reshape(x, [-1, 4, 4, 512])
                x = conv2d_transpose(x, 256, 5, 2)
                x = conv2d_transpose(x, 128, 5, 2)
                x = conv2d_transpose(x, 1, 5, 2, bn=False, activation=tf.nn.tanh)

    return x
Esempio n. 2
0
def generator(z, y, phase, scope='gen', reuse=None, internal_update=False):
    with tf.variable_scope(scope, reuse=reuse):
        with arg_scope([dense, conv2d_transpose], bn=True, phase=phase, activation=leaky_relu), \
             arg_scope([batch_norm], internal_update=internal_update):

            x = tf.concat([z, y], 1)
            x = dense(x, 4 * 4 * 512)
            x = tf.reshape(x, [-1, 4, 4, 512])
            x = conv2d_transpose(x, 256, 5, 2)
            x = conv2d_transpose(x, 128, 5, 2)
            x = conv2d_transpose(x, 3, 5, 2, bn=False, activation=tf.nn.tanh)

    return x
Esempio n. 3
0
def generator(x, y, phase, reuse=None):
    with tf.variable_scope('gen', reuse=reuse):
        with arg_scope([dense], bn=True, phase=phase, activation=tf.nn.relu), \
             arg_scope([conv2d_transpose], bn=True, phase=phase, activation=tf.nn.relu):

            if y is not None:
                x = tf.concat([x, y], 1)

            x = dense(x, 4 * 4 * 512)
            x = tf.reshape(x, [-1, 4, 4, 512])
            x = conv2d_transpose(x, 256, 5, 2)
            x = conv2d_transpose(x, 128, 5, 2)
            x = wnconv2d_transpose(x, 3, 5, 2, bn=False, activation=tf.nn.tanh, scale=True)

    return x