Ejemplo n.º 1
0
def classifier(x, reuse=None):
    with tf.variable_scope("classifier", reuse=reuse) as scope_c:
        # Variables for classifier
        C_B1 = utils.bias_variable([K], name="C_B1")
        C_B2 = utils.bias_variable([L], name="C_B2")
        C_B3 = utils.bias_variable([M], name="C_B3")
        C_B4 = utils.bias_variable([N], name="C_B4")
        C_B5 = utils.bias_variable([10], name="C_B5")

        stride = 1  # output is 28x28
        H1 = tf.nn.relu(
            tf.nn.conv2d(
                x, C_W1, strides=[1, stride, stride, 1], padding='SAME') +
            C_B1)
        stride = 2  # output is 14x14
        H2 = tf.nn.relu(
            tf.nn.conv2d(
                H1, C_W2, strides=[1, stride, stride, 1], padding='SAME') +
            C_B2)
        stride = 2  # output is 7x7
        H3 = tf.nn.relu(
            tf.nn.conv2d(
                H2, C_W3, strides=[1, stride, stride, 1], padding='SAME') +
            C_B3)

        # reshape the output from the third convolution for the fully connected layer
        HH3 = tf.reshape(H3, shape=[-1, 7 * 7 * M])

        H4 = tf.nn.relu(tf.matmul(HH3, C_W4) + C_B4)
        Ylogits = tf.matmul(H4, C_W5) + C_B5

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
Ejemplo n.º 2
0
def generator(y, bs, reuse=None):
    with tf.variable_scope("generator", reuse=reuse) as scope_g:
        # Variables for classifier
        G_B1 = utils.bias_variable([1], name="G_B1")
        G_B2 = utils.bias_variable([K], name="G_B2")
        G_B3 = utils.bias_variable([L], name="G_B3")
        G_B4 = utils.bias_variable([M * 7 * 7], name="G_B4")
        G_B5 = utils.bias_variable([N], name="G_B5")

        GH4 = tf.nn.relu(tf.matmul(y, tf.transpose(C_W5)) + G_B5)
        GH3 = tf.nn.relu(tf.matmul(GH4, tf.transpose(C_W4)) + G_B4)
        GHH3 = tf.reshape(GH3, shape=[-1, 7, 7, M])
        stride = 2  # output is 14x14
        GH2 = tf.nn.relu(
            tf.nn.conv2d_transpose(GHH3,
                                   C_W3,
                                   output_shape=[bs, 14, 14, L],
                                   strides=[1, stride, stride, 1]) +
            G_B3)  #deconv2 W3
        stride = 2  # output is 28x28
        GH1 = tf.nn.relu(
            tf.nn.conv2d_transpose(GH2,
                                   C_W2,
                                   output_shape=[bs, 28, 28, K],
                                   strides=[1, stride, stride, 1]) +
            G_B2)  #deconv2 W2
        stride = 1  # output is 28x28
        GXlogits = tf.nn.conv2d_transpose(GH1,
                                          C_W1,
                                          output_shape=[bs, 28, 28, 1],
                                          strides=[1, stride, stride, 1
                                                   ]) + G_B1  #deconv2 W1
        GXsigmoid = tf.nn.sigmoid(GXlogits)

        return GXsigmoid, GXlogits
Ejemplo n.º 3
0
def classifier(x, is_training=True, reuse=None):
    with tf.variable_scope("classifier", reuse=reuse) as scope_c:
        # Variables for classifier
        C_B1 = utils.bias_variable([K], name="C_B1")
        C_B2 = utils.bias_variable([L], name="C_B2")
        C_B3 = utils.bias_variable([M], name="C_B3")
        C_B4 = utils.bias_variable([num_classes], name="C_B4")

        stride = 2  # output is 16x16
        H1 = lrelu(
            tf.nn.conv2d(
                x, C_W1, strides=[1, stride, stride, 1], padding='SAME') +
            C_B1)
        stride = 2  # output is 8x8
        H2 = lrelu(
            utils.bn((tf.nn.conv2d(
                H1, C_W2, strides=[1, stride, stride, 1], padding='SAME') +
                      C_B2),
                     is_training=is_training,
                     scope="C_bn_h2"))

        # reshape the output from the third convolution for the fully connected layer
        HH2 = tf.reshape(H2, shape=[-1, 8 * 8 * L])

        H3 = tf.nn.relu(tf.matmul(HH2, C_W3) + C_B3)
        Ylogits = tf.matmul(H3, C_W4) + C_B4

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
Ejemplo n.º 4
0
def generator(z, bs, is_training=True, reuse=None):
    with tf.variable_scope("generator", reuse=reuse) as scope_g:
        # Variables for classifier
        G_B1 = utils.bias_variable([3], name="G_B1")
        G_B2 = utils.bias_variable([K], name="G_B2")
        G_B3 = utils.bias_variable([8 * 8 * L], name="G_B3")
        G_B4 = utils.bias_variable([M], name="G_B4")

        GH3 = tf.nn.relu(
            utils.bn((tf.matmul(z, tf.transpose(C_W4)) + G_B4),
                     is_training=is_training,
                     scope="G_bn_gh3"))
        GH2 = tf.nn.relu(
            utils.bn((tf.matmul(GH3, tf.transpose(C_W3)) + G_B3),
                     is_training=is_training,
                     scope="G_bn_gh2"))
        GHH2 = tf.reshape(GH2, shape=[-1, 8, 8, L])
        stride = 2  # output is 14x14
        GH1 = tf.nn.relu(
            tf.nn.conv2d_transpose(GHH2,
                                   C_W2,
                                   output_shape=[bs, 16, 16, K],
                                   strides=[1, stride, stride, 1]) +
            G_B2)  #deconv2 W2
        stride = 2  # output is 28x28
        GXlogits = tf.nn.conv2d_transpose(GH1,
                                          C_W1,
                                          output_shape=[bs, 32, 32, 3],
                                          strides=[1, stride, stride, 1
                                                   ]) + G_B1  #deconv2 W1
        GXsigmoid = tf.nn.sigmoid(GXlogits)

        return GXsigmoid, GXlogits
Ejemplo n.º 5
0
def generator(y, reuse=None):
    with tf.variable_scope("generator", reuse=reuse) as scope_g:
        # Variables for classifier
        G_B1 = utils.bias_variable([784], name="G_B1")
        G_B2 = utils.bias_variable([L], name="G_B2")

        GH1 = tf.nn.sigmoid(tf.matmul(y, tf.transpose(C_W2)) + G_B2)
        GX = tf.matmul(GH1, tf.transpose(C_W1)) + G_B1
        GXlogits = tf.reshape(GX, [-1, 28, 28, 1])
        GXsigmoid = tf.nn.sigmoid(GXlogits)

        return GXsigmoid, GXlogits
Ejemplo n.º 6
0
def classifier(x, reuse=None):
    with tf.variable_scope("classifier", reuse=reuse) as scope_c:
        # Variables for classifier
        C_B1 = utils.bias_variable([L], name="C_B1")
        C_B2 = utils.bias_variable([10], name="C_B2")

        XX = tf.reshape(x, [-1, 784])
        H1 = tf.nn.sigmoid(tf.matmul(XX, C_W1) + C_B1)
        Ylogits = tf.matmul(H1, C_W2) + C_B2

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
Ejemplo n.º 7
0
def discriminator(x, reuse=None):
    with tf.variable_scope("discriminator", reuse=reuse) as scope:
        # Variables for classifier
        D_W1 = utils.weight_variable_xavier_initialized([784, K], name="D_W1")
        D_B1 = utils.bias_variable([K], name="D_B1")
        D_W2 = utils.weight_variable_xavier_initialized([K, 1], name="D_W2")
        D_B2 = utils.bias_variable([1], name="D_B2")

        XX = tf.reshape(x, [-1, 784])
        H1 = tf.nn.relu(tf.matmul(XX, D_W1) + D_B1)
        Ylogits = tf.matmul(H1, D_W2) + D_B2

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
Ejemplo n.º 8
0
def generator(y, reuse=None):
    with tf.variable_scope("generator", reuse=reuse) as scope_g:
        # Variables for classifier
        G_B1 = utils.bias_variable([32 * 32 * 3], name="G_B1")
        G_B2 = utils.bias_variable([L], name="G_B2")
        G_B3 = utils.bias_variable([M], name="G_B3")
        G_B4 = utils.bias_variable([N], name="G_B4")
        G_B5 = utils.bias_variable([O], name="G_B5")

        GH4 = tf.nn.sigmoid(tf.matmul(y, tf.transpose(C_W5)) + G_B5)
        GH3 = tf.nn.sigmoid(tf.matmul(GH4, tf.transpose(C_W4)) + G_B4)
        GH2 = tf.nn.sigmoid(tf.matmul(GH3, tf.transpose(C_W3)) + G_B3)
        GH1 = tf.nn.sigmoid(tf.matmul(GH2, tf.transpose(C_W2)) + G_B2)
        GX = tf.matmul(GH1, tf.transpose(C_W1)) + G_B1
        GXlogits = tf.reshape(GX, [-1, 32, 32, 3])
        GXsigmoid = tf.nn.sigmoid(GXlogits)

        return GXsigmoid, GXlogits
Ejemplo n.º 9
0
def classifier(x, reuse=None):
    with tf.variable_scope("classifier", reuse=reuse) as scope_c:
        # Variables for classifier
        C_B1 = utils.bias_variable([L], name="C_B1")
        C_B2 = utils.bias_variable([M], name="C_B2")
        C_B3 = utils.bias_variable([N], name="C_B3")
        C_B4 = utils.bias_variable([O], name="C_B4")
        C_B5 = utils.bias_variable([num_classes], name="C_B5")

        XX = tf.reshape(x, [-1, 32 * 32 * 3])
        H1 = tf.nn.sigmoid(tf.matmul(XX, C_W1) + C_B1)
        H2 = tf.nn.sigmoid(tf.matmul(H1, C_W2) + C_B2)
        H3 = tf.nn.sigmoid(tf.matmul(H2, C_W3) + C_B3)
        H4 = tf.nn.sigmoid(tf.matmul(H3, C_W4) + C_B4)
        Ylogits = tf.matmul(H4, C_W5) + C_B5

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
Ejemplo n.º 10
0
def classifier(x, reuse=None):
    with tf.variable_scope("classifier", reuse=reuse) as scope_c:
        # Variables for classifier
        C_B1 = utils.bias_variable([num_classes], name="C_B1")

        XX = tf.reshape(x, [-1, 32 * 32 * 3])
        Ylogits = tf.matmul(XX, C_W1) + C_B1

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits
Ejemplo n.º 11
0
def discriminator(x, is_training=True, reuse=None):
    with tf.variable_scope("discriminator", reuse=reuse) as scope:
        # Variables for classifier
        D_W1 = utils.weight_variable([4, 4, 3, K], name="D_W1")
        D_B1 = utils.bias_variable([K], name="D_B1")
        D_W2 = utils.weight_variable([4, 4, K, L], name="D_W2")
        D_B2 = utils.bias_variable([L], name="D_B2")

        D_W3 = utils.weight_variable([8 * 8 * L, M], name="D_W3")
        D_B3 = utils.bias_variable([M], name="D_B3")
        D_W4 = utils.weight_variable([M, 1], name="D_W4")
        D_B4 = utils.bias_variable([1], name="D_B4")

        stride = 2  # output is 16x16
        H1 = lrelu(
            tf.nn.conv2d(
                x, D_W1, strides=[1, stride, stride, 1], padding='SAME') +
            D_B1)
        print(H1.shape)
        stride = 2  # output is 8x8
        H2 = lrelu(
            utils.bn((tf.nn.conv2d(
                H1, D_W2, strides=[1, stride, stride, 1], padding='SAME') +
                      D_B2),
                     is_training=is_training,
                     scope="D_bn_h2"))
        print(H2.shape)

        # reshape the output from the third convolution for the fully connected layer
        HH2 = tf.reshape(H2, shape=[-1, 8 * 8 * L])

        H3 = lrelu(tf.matmul(HH2, D_W3) + D_B3)
        Ylogits = tf.matmul(H3, D_W4) + D_B4

        Ysigmoid = tf.nn.sigmoid(Ylogits)
        Ysoftmax = tf.nn.softmax(Ylogits)

        return Ysoftmax, Ysigmoid, Ylogits