Esempio n. 1
0
def encoder(x, y, phase, scope='enc', reuse=None, internal_update=False):
    with tf.variable_scope(scope, reuse=reuse):
        with arg_scope([conv2d, dense], bn=True, phase=phase, activation=leaky_relu), \
             arg_scope([batch_norm], internal_update=internal_update):

            # Ignore y
            x = conv2d(x, 64, 3, 2)
            x = conv2d(x, 128, 3, 2)
            x = conv2d(x, 256, 3, 2)
            x = dense(x, 1024)

            # Autoregression (4 steps)
            ms = []
            vs = []
            zs = [x]

            for i in xrange(5):
                h = tf.concat(zs, axis=-1)
                h = dense(h, 100)
                m = dense(h, 20, activation=None)
                v = dense(h, 20, activation=tf.nn.softplus) + 1e-5
                z = gaussian_sample(m, v)
                ms += [m]
                vs += [v]
                zs += [z]

            m = tf.concat(ms, 1)
            v = tf.concat(vs, 1)
            z = tf.concat(zs[1:], 1)

    return z, (m, v)
Esempio n. 2
0
def qz_graph(x, y, n_z):
    reuse = len(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                                  scope='qz')) > 0
    # -- q(z)
    with tf.variable_scope('qz'):
        xy = tf.concat((x, y), 1, name='xy/concat')
        h1 = tf.contrib.layers.fully_connected(xy,
                                               4,
                                               scope='layer1',
                                               activation_fn=tf.nn.relu,
                                               reuse=reuse)
        zm = tf.contrib.layers.fully_connected(h1,
                                               n_z,
                                               scope='zm',
                                               activation_fn=None,
                                               reuse=reuse)
        zv = tf.contrib.layers.fully_connected(h1,
                                               n_z,
                                               scope='zv',
                                               activation_fn=tf.nn.softplus,
                                               reuse=reuse)
        z = gaussian_sample(zm, zv, 'z')

        # Used to feed into z when sampling
        z = tf.identity(z, name='z_sample')
    return z, zm, zv
Esempio n. 3
0
def encoder(x, y, phase, scope='enc', reuse=None, internal_update=False):
    with tf.variable_scope(scope, reuse=reuse):
        with arg_scope([conv2d, dense], bn=True, phase=phase, activation=leaky_relu), \
             arg_scope([batch_norm], internal_update=internal_update):

            # Ignore y
            x = conv2d(x, 64, 3, 2)
            x = conv2d(x, 128, 3, 2)
            x = conv2d(x, 256, 3, 2)
            x = dense(x, 1024)

            m = dense(x, 100, activation=None)
            v = dense(x, 100, activation=tf.nn.softplus) + 1e-5
            z = gaussian_sample(m, v)

            return z, (m, v)
Esempio n. 4
0
def z_graph(zm, zv):
    with tf.variable_scope('z'):
        z = gaussian_sample(zm, zv, 'z')
        # Used to feed into z when sampling
        z = tf.identity(z, name='z_sample')
    return z