예제 #1
0
def generator(z, bs, is_training=True, reuse=None):
    with tf.variable_scope("generator", reuse=reuse) as scope_g:
        # Variables for classifier
        G_B1 = utils.bias_variable([3], name="G_B1")
        G_B2 = utils.bias_variable([K], name="G_B2")
        G_B3 = utils.bias_variable([8 * 8 * L], name="G_B3")
        G_B4 = utils.bias_variable([M], name="G_B4")

        GH3 = tf.nn.relu(
            utils.bn((tf.matmul(z, tf.transpose(C_W4)) + G_B4),
                     is_training=is_training,
                     scope="G_bn_gh3"))
        GH2 = tf.nn.relu(
            utils.bn((tf.matmul(GH3, tf.transpose(C_W3)) + G_B3),
                     is_training=is_training,
                     scope="G_bn_gh2"))
        GHH2 = tf.reshape(GH2, shape=[-1, 8, 8, L])
        stride = 2  # output is 14x14
        GH1 = tf.nn.relu(
            tf.nn.conv2d_transpose(GHH2,
                                   C_W2,
                                   output_shape=[bs, 16, 16, K],
                                   strides=[1, stride, stride, 1]) +
            G_B2)  #deconv2 W2
        stride = 2  # output is 28x28
        GXlogits = tf.nn.conv2d_transpose(GH1,
                                          C_W1,
                                          output_shape=[bs, 32, 32, 3],
                                          strides=[1, stride, stride, 1
                                                   ]) + G_B1  #deconv2 W1
        GXsigmoid = tf.nn.sigmoid(GXlogits)

        return GXsigmoid, GXlogits
예제 #2
0
def classifier(x, is_training=True, reuse=None):
    with tf.variable_scope("classifier", reuse=reuse) as scope_c:
        # Variables for classifier
        C_B1 = utils.bias_variable([K], name="C_B1")
        C_B2 = utils.bias_variable([L], name="C_B2")
        C_B3 = utils.bias_variable([M], name="C_B3")
        C_B4 = utils.bias_variable([num_classes], name="C_B4")

        stride = 2  # output is 16x16
        H1 = lrelu(
            tf.nn.conv2d(
                x, C_W1, strides=[1, stride, stride, 1], padding='SAME') +
            C_B1)
        stride = 2  # output is 8x8
        H2 = lrelu(
            utils.bn((tf.nn.conv2d(
                H1, C_W2, strides=[1, stride, stride, 1], padding='SAME') +
                      C_B2),
                     is_training=is_training,
                     scope="C_bn_h2"))

        # reshape the output from the third convolution for the fully connected layer
        HH2 = tf.reshape(H2, shape=[-1, 8 * 8 * L])

        H3 = tf.nn.relu(tf.matmul(HH2, C_W3) + C_B3)
        Ylogits = tf.matmul(H3, C_W4) + C_B4

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
예제 #3
0
def discriminator(x, is_training=True, reuse=None):
    with tf.variable_scope("discriminator", reuse=reuse) as scope:
        # Variables for classifier
        D_W1 = utils.weight_variable([4, 4, 3, K], name="D_W1")
        D_B1 = utils.bias_variable([K], name="D_B1")
        D_W2 = utils.weight_variable([4, 4, K, L], name="D_W2")
        D_B2 = utils.bias_variable([L], name="D_B2")

        D_W3 = utils.weight_variable([8 * 8 * L, M], name="D_W3")
        D_B3 = utils.bias_variable([M], name="D_B3")
        D_W4 = utils.weight_variable([M, 1], name="D_W4")
        D_B4 = utils.bias_variable([1], name="D_B4")

        stride = 2  # output is 16x16
        H1 = lrelu(
            tf.nn.conv2d(
                x, D_W1, strides=[1, stride, stride, 1], padding='SAME') +
            D_B1)
        print(H1.shape)
        stride = 2  # output is 8x8
        H2 = lrelu(
            utils.bn((tf.nn.conv2d(
                H1, D_W2, strides=[1, stride, stride, 1], padding='SAME') +
                      D_B2),
                     is_training=is_training,
                     scope="D_bn_h2"))
        print(H2.shape)

        # reshape the output from the third convolution for the fully connected layer
        HH2 = tf.reshape(H2, shape=[-1, 8 * 8 * L])

        H3 = lrelu(tf.matmul(HH2, D_W3) + D_B3)
        Ylogits = tf.matmul(H3, D_W4) + D_B4

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits