コード例 #1
0
ファイル: training.py プロジェクト: tiffany70072/MLDS2017
    def discriminator(self, pict, text, reuse=False):
        print "discriminator"
        with tf.variable_scope("discriminator") as scope:
            if reuse: scope.reuse_variables()
            if reuse == False: print pict.shape

            x0 = my_lib.lrelu(
                my_lib.conv2d(pict, [5, 5, 3, self.pict_dim], 'd_0'))
            if reuse == False: print x0.shape
            x1 = my_lib.lrelu(
                my_lib.batch_norm(
                    my_lib.conv2d(x0, [5, 5, self.pict_dim, self.pict_dim * 2],
                                  'd_1'), 'd_bn1'))
            if reuse == False: print x1.shape
            x2 = my_lib.lrelu(
                my_lib.batch_norm(
                    my_lib.conv2d(x1,
                                  [5, 5, self.pict_dim * 2, self.pict_dim * 4],
                                  'd_2'), 'd_bn2'))
            if reuse == False: print x2.shape
            x3 = my_lib.lrelu(
                my_lib.batch_norm(
                    my_lib.conv2d(x2,
                                  [5, 5, self.pict_dim * 4, self.pict_dim * 8],
                                  'd_3'), 'd_bn3'))
            if reuse == False: print x3.shape
            x3 = tf.reshape(x3, [-1, 8192])
            x3 = tf.concat([self.text_encoding, x3], axis=1)
            if reuse == False: print x3.shape
            x4 = my_lib.matmul(x3, 8192 + 128, 1, 'd_4')
            if reuse == False: print x4.shape

            return tf.nn.sigmoid(x4), x4
コード例 #2
0
ファイル: training.py プロジェクト: tiffany70072/MLDS2017
    def generator(self, z):
        print "generator"
        with tf.variable_scope("generator") as scope:  # may be useless
            self.text_encoding = my_lib.matmul(self.right_text_nn,
                                               self.text_dim, 128, 'g_txt')
            x0 = tf.concat([z, self.text_encoding], axis=1)
            print x0.shape

            x1 = tf.nn.relu(
                my_lib.matmul(x0, self.noise_dim + 128,
                              self.pict_dim * 16 * 4 * 4, 'g_0'))
            x1 = tf.reshape(x1, [-1, 4, 4, self.pict_dim * 16])
            x1 = tf.nn.relu(my_lib.batch_norm(x1, 'g_bn1'))
            print x1.shape
            x2 = my_lib.conv2d_transpose(
                x1, [5, 5, self.pict_dim * 8, self.pict_dim * 16],
                [self.batch_size, 8, 8, self.pict_dim * 8], 'g_1')
            x2 = tf.nn.relu(my_lib.batch_norm(x2, 'g_bn2'))
            print x2.shape
            x3 = my_lib.conv2d_transpose(
                x2, [5, 5, self.pict_dim * 4, self.pict_dim * 8],
                [self.batch_size, 16, 16, self.pict_dim * 4], 'g_2')
            x3 = tf.nn.relu(my_lib.batch_norm(x3, 'g_bn3'))
            print x3.shape
            x4 = my_lib.conv2d_transpose(
                x3, [5, 5, self.pict_dim * 2, self.pict_dim * 4],
                [self.batch_size, 32, 32, self.pict_dim * 2], 'g_3')
            x4 = tf.nn.relu(my_lib.batch_norm(x4, 'g_bn4'))
            print x4.shape
            x5 = my_lib.conv2d_transpose(x4, [5, 5, 3, self.pict_dim * 2],
                                         [self.batch_size, 64, 64, 3], 'g_4')
            print x5.shape

            return tf.nn.tanh(x5)