예제 #1
0
def generator(flow):
    flow = fullyConnected('layer_0', flow, 1024, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)
    flow = fullyConnected('layer_1', flow, 7 * 7 * 64, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)
    flow = tf.reshape(flow, [batch_size, 7, 7, 64])
    flow = deconv('layer_2', flow, [batch_size, 14, 14, 32], 5, 2)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)
    flow = tf.nn.sigmoid(
        deconv('layer_3', flow, [batch_size, 28, 28, 1], 5, 2))
    return flow
예제 #2
0
def discriminator(flow):
    flow = conv('layer_0', flow, 32, 5, 2, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)
    flow = conv('layer_1', flow, 64, 5, 2, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)

    flow = flat(flow)
    flow = fullyConnected('layer_2', flow, 1024, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)
    # flow = tf.nn.dropout(flow, .5)
    flow = fullyConnected('output', flow, 1, None)

    return flow
예제 #3
0
def encoder(flow):
    flow = conv('layer_0', flow, 32, 5, 2, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)

    flow = conv('layer_1', flow, 64, 5, 2, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)

    flow = flat(flow)

    flow = fullyConnected('layer_2', flow, 1024, None)
    flow = normalizeBatch(flow, True)
    flow = lrelu(flow)

    mean = fullyConnected('mu', flow, latent_dim, None)
    sigma = fullyConnected('sigma', flow, latent_dim, None)

    return mean, sigma