Ejemplo n.º 1
0
# generator labels ( all ones )
y = tf.ones(batch_size, dtype=tf.sg_floatx)

# discriminator labels ( half 1s, half 0s )
y_disc = tf.concat(0, [y, y * 0])


#
# create generator
#

# random class number
z_cat = tf.multinomial(tf.ones((batch_size, num_category), dtype=tf.sg_floatx) / num_category, 1).sg_squeeze()

# random seed = random categorical variable + random uniform
z = z_cat.sg_one_hot(depth=num_category).sg_concat(target=tf.random_uniform((batch_size, num_dim-num_category)))

# random continuous variable
z_cont = z[:, num_category:num_category+num_cont]

# generator network
with tf.sg_context(name='generator', size=(4, 1), stride=(2, 1), act='relu', bn=True):
    gen = (z.sg_dense(dim=1024)
           .sg_dense(dim=48*1*128)
           .sg_reshape(shape=(-1, 48, 1, 128))
           .sg_upconv(dim=64)
           .sg_upconv(dim=32)
           .sg_upconv(dim=2, act='sigmoid', bn=False))

#
# create discriminator & recognizer
Ejemplo n.º 2
0
# input images
x = data.train.image
# generator labels ( all ones )
y = tf.ones(batch_size, dtype=tf.sg_floatx)
# discriminator labels ( half 1s, half 0s )
y_disc = tf.concat([y, y * 0], 0)
#
# create generator
#
# random class number
z_cat = tf.multinomial(
    tf.ones((batch_size, num_category), dtype=tf.sg_floatx) / num_category,
    1).sg_squeeze().sg_int()
# random seed = random categorical variable + random uniform
z = z_cat.sg_one_hot(depth=num_category).sg_concat(
    target=tf.random_uniform((batch_size, num_dim - num_category)))
# random continuous variable
z_cont = z[:, num_category:num_category + num_cont]
# category label
label = tf.concat([data.train.label, z_cat], 0)
# generator network
with tf.sg_context(name='generator', size=4, stride=2, act='relu', bn=True):
    # gen = (z.sg_dense(dim=1024)
    #        .sg_dense(dim=7*7*128)
    #        .sg_reshape(shape=(-1, 7, 7, 128))
    #        .sg_upconv(dim=64)
    #        .sg_upconv(dim=1, act='sigmoid', bn=False))
    #d1
    gen = (z.sg_dense(dim=60).sg_dense(dim=190).sg_dense(dim=190))
# add image summary
# tf.sg_summary_image(gen)
Ejemplo n.º 3
0
# MNIST input tensor ( with QueueRunner )
data = tf.sg_data.Mnist(batch_size=batch_size)

# input images and label
x = data.train.image

# labels for discriminator
y_real = tf.ones(batch_size)
y_fake = tf.zeros(batch_size)

# categorical latent variable
z_cat = tf.multinomial(
    tf.ones((batch_size, cat_dim), dtype=tf.sg_floatx) / cat_dim,
    1).sg_squeeze().sg_int()
# continuous latent variable
z_con = tf.random_uniform((batch_size, con_dim))
# random latent variable dimension
z_rand = tf.random_uniform((batch_size, rand_dim))
# latent variable
z = tf.concat([z_cat.sg_one_hot(depth=cat_dim), z_con, z_rand], 1)

#
# Computational graph
#

# generator
gen = generator(z)

# add image summary
tf.sg_summary_image(x, name='real')
tf.sg_summary_image(gen, name='fake')
Ejemplo n.º 4
0
#
# inputs
#

# MNIST input tensor ( with QueueRunner )
data = tf.sg_data.Mnist(batch_size=batch_size)

# input images
x = data.train.image

#
# create generator
#

# random uniform seed
z = tf.random_uniform((batch_size, z_dim))

size = 4
stride = 2
strides = [1, stride, stride, 1]

with tf.sg_context(name='generator',
                   size=4,
                   stride=2,
                   act='relu',
                   bn=True,
                   bias=False):
    g_p1 = (z.sg_dense(dim=1024).sg_dense(dim=7 * 7 *
                                          128).sg_reshape(shape=(-1, 7, 7,
                                                                 128)))
    g_p2 = ops.upconv_and_scale(g_p1,
Ejemplo n.º 5
0
# target continuous variable # 1
target_cval_1 = tf.placeholder(dtype=tf.sg_floatx, shape=batch_size)
# target continuous variable # 2
target_cval_2 = tf.placeholder(dtype=tf.sg_floatx, shape=batch_size)

# category variables
z = (tf.ones(batch_size, dtype=tf.sg_intx) *
     target_num).sg_one_hot(depth=num_category)

# continuous variables
z = z.sg_concat(
    target=[target_cval_1.sg_expand_dims(),
            target_cval_2.sg_expand_dims()])

# random seed = categorical variable + continuous variable + random uniform
z = z.sg_concat(target=tf.random_uniform((batch_size,
                                          num_dim - num_category - num_cont)))

#
# create generator
#

# generator network
with tf.sg_context(name='generator', stride=2, act='relu', bn=True):
    gen = (z.sg_dense(dim=1024).sg_dense(dim=7 * 7 * 128).sg_reshape(
        shape=(-1, 7, 7,
               128)).sg_upconv(size=4,
                               dim=64).sg_upconv(size=4,
                                                 dim=1,
                                                 act='sigmoid',
                                                 bn=False).sg_squeeze())
Ejemplo n.º 6
0
batch_size = 32  # batch size
num_dim = 50  # latent dimension
snr = 0.5  # signal to noise ratio

#
# inputs
#

# MNIST input tensor ( with QueueRunner )
data = tf.sg_data.Mnist(batch_size=32)

# input images
x = data.train.image

# noised image
x_n = x * snr + tf.random_uniform(x.get_shape()) * (1 - snr)

#
# Computational graph
#

# encoder network
with tf.sg_context(name='encoder', size=4, stride=2, act='relu'):
    z = (x_n.sg_conv(dim=64).sg_conv(dim=128).sg_flatten().sg_dense(
        dim=1024).sg_dense(dim=num_dim, act='linear'))

# decoder network
with tf.sg_context(name='decoder', size=4, stride=2, act='relu'):
    xx = (z.sg_dense(dim=1024).sg_dense(dim=7 * 7 * 128).sg_reshape(
        shape=(-1, 7, 7, 128)).sg_upconv(dim=64).sg_upconv(dim=1,
                                                           act='sigmoid'))
Ejemplo n.º 7
0
# generator labels ( all ones )
y = tf.ones(batch_size, dtype=tf.sg_floatx)

# discriminator labels ( half 1s, half 0s )
y_disc = tf.concat(0, [y, y * 0])

#
# create generator
#

# random class number
z_cat = tf.multinomial(tf.ones((batch_size, num_category), dtype=tf.sg_floatx) / num_category, 1).sg_squeeze().sg_int()

# random seed = random categorical variable + random uniform
z = z_cat.sg_one_hot(depth=num_category).sg_concat(target=tf.random_uniform((batch_size, num_dim - num_category)))

# random continuous variable
z_cont = z[:, num_category:num_category+num_cont]

# category label
label = tf.concat(0, [data.train.label, z_cat])

# generator network
with tf.sg_context(name='generator', size=4, stride=2, act='relu', bn=True):
    gen = (z.sg_dense(dim=1024)
           .sg_dense(dim=7*7*128)
           .sg_reshape(shape=(-1, 7, 7, 128))
           .sg_upconv(dim=64)
           .sg_upconv(dim=1, act='sigmoid', bn=False))
Ejemplo n.º 8
0
# input images
x = data.train.image

# generator labels ( all ones )
y = tf.ones(data.batch_size, dtype=tf.sg_floatx)

# discriminator labels ( half 1s, half 0s )
y_disc = tf.concat(0, [y, y * 0])

#
# create generator
#

# random uniform seed
z = tf.random_uniform((data.batch_size, 100))

with tf.sg_context(name='generator', size=4, stride=2, act='relu', bn=True):

    # generator network
    gen = (z.sg_dense(dim=1024).sg_dense(dim=7 * 7 * 128).sg_reshape(
        shape=(-1, 7, 7, 128)).sg_upconv(dim=64).sg_upconv(dim=1,
                                                           act='sigmoid',
                                                           bn=False))

# add image summary
tf.sg_summary_image(gen)

#
# create discriminator
#
Ejemplo n.º 9
0
# set log level to debug
tf.sg_verbosity(10)

#
# hyper parameters
#

batch_size = 100

#
# create generator
#

# random uniform seed
z = tf.random_uniform((batch_size, 100))

with tf.sg_context(name='generator', size=4, stride=2, act='relu', bn=True):
    # generator network
    gen = (z.sg_dense(dim=1024).sg_dense(dim=7 * 7 * 128).sg_reshape(
        shape=(-1, 7, 7,
               128)).sg_upconv(dim=64).sg_upconv(dim=1,
                                                 act='sigmoid',
                                                 bn=False).sg_squeeze())

#
# draw samples
#

with tf.Session() as sess:
    tf.sg_init(sess)
Ejemplo n.º 10
0
# target_number
target_num = tf.placeholder(dtype=tf.sg_intx, shape=batch_size)
# target continuous variable # 1
target_cval_1 = tf.placeholder(dtype=tf.sg_floatx, shape=batch_size)
# target continuous variable # 2
target_cval_2 = tf.placeholder(dtype=tf.sg_floatx, shape=batch_size)

# category variables
z = (tf.ones(batch_size, dtype=tf.sg_intx) * target_num).sg_one_hot(depth=num_category)

# continuous variables
z = z.sg_concat(target=[target_cval_1.sg_expand_dims(), target_cval_2.sg_expand_dims()])

# random seed = categorical variable + continuous variable + random uniform
z = z.sg_concat(target=tf.random_uniform((batch_size, num_dim-num_cont-num_category)))


#
# create generator
#

# generator network
with tf.sg_context(name='generator', size=(4, 1), stride=(2, 1), act='relu', bn=True):
    gen = (z.sg_dense(dim=1024)
           .sg_dense(dim=48*1*128)
           .sg_reshape(shape=(-1, 48, 1, 128))
           .sg_upconv(dim=64)
           .sg_upconv(dim=32)
           .sg_upconv(dim=2, act='sigmoid', bn=False).sg_squeeze())