Beispiel #1
0
    def generator(self, inputs, is_training, reuse=False):
        with tf.variable_scope('generator', reuse=reuse):
            g_hidden1 = fully_connect(inputs, 1024, name='g_hidden1')
            g_hidden2 = lrelu(bn(fully_connect(g_hidden1, 7*7*128), is_training))
            g_reshape = tf.reshape(g_hidden2, [-1, 7, 7, 128])
            g_hidden3 = lrelu(bn(conv2d_t(g_reshape, [None, 14, 14, 64], [4, 4], name='g_hidden3'), is_training))
            g_hidden4 = conv2d_t(g_hidden3, [None, 28, 28, 1], [4, 4], name='g_hidden4')
            g_logtis = sigmoid(g_hidden4)

            return g_hidden4, g_logtis
Beispiel #2
0
 def discriminator(self, inputs, is_training, reuse=False):
     with tf.variable_scope('discriminator', reuse=reuse):
         d_hidden1 = conv2d(inputs, 64, [4, 4], strides=[1, 2, 2, 1], name='d_hidden1')
         d_hidden2 = lrelu(bn(conv2d(d_hidden1, 128, [4, 4], strides=[1, 2, 2, 1], name='d_hidden2'), is_training))
         d_flatten = tf.reshape(d_hidden2, [-1, 7*7*128])
         d_hidden3 = lrelu(bn(fully_connect(d_flatten, 1024, name='d_hidden3'), is_training))
         d_hidden4 = fully_connect(d_hidden3, 1, name='d_hidden4')
         d_logits = sigmoid(d_hidden4)
         
         return d_hidden4, d_logits
Beispiel #3
0
    def make_model(self, inputs, is_training):

        with tf.variable_scope('STAGE_1'):
            layer = relu(
                bn(
                    conv2d(inputs,
                           64, [7, 7],
                           strides=[1, 2, 2, 1],
                           name='initial_block'), is_training))
            layer = max_pool(layer)

        with tf.variable_scope('STAGE_2'):
            layer = self.conv_block(layer, [64, 64, 256],
                                    is_training,
                                    'a',
                                    s=1)
            layer = self.identity_block(layer, [64, 64, 256], is_training, 'b')
            layer = self.identity_block(layer, [64, 64, 256], is_training, 'c')

        with tf.variable_scope('STAGE_3'):
            layer = self.conv_block(layer, [128, 128, 512], is_training, 'a')
            layer = self.identity_block(layer, [128, 128, 512], is_training,
                                        'b')
            layer = self.identity_block(layer, [128, 128, 512], is_training,
                                        'c')

        with tf.variable_scope('STAGE_4'):
            layer = self.conv_block(layer, [256, 256, 1024], is_training, 'a')
            layer = self.identity_block(layer, [256, 256, 1024], is_training,
                                        'b')
            layer = self.identity_block(layer, [256, 256, 1024], is_training,
                                        'c')
            layer = self.identity_block(layer, [256, 256, 1024], is_training,
                                        'd')
            layer = self.identity_block(layer, [256, 256, 1024], is_training,
                                        'e')
            layer = self.identity_block(layer, [256, 256, 1024], is_training,
                                        'f')

        with tf.variable_scope('STAGE_5'):
            layer = self.conv_block(layer, [512, 512, 2048], is_training, 'a')
            layer = self.identity_block(layer, [512, 512, 2048], is_training,
                                        'b')
            layer = self.identity_block(layer, [512, 512, 2048], is_training,
                                        'c')

        with tf.variable_scope('FINAL_STAGE'):
            layer = avg_pool(layer, [1, 7, 7, 1], [1, 1, 1, 1],
                             padding='VALID')

            _, h, w, d = layer.get_shape().as_list()

            layer = tf.reshape(layer, [-1, h * w * d])
            layer = fully_connect(layer, self.N_CLASS, 'fc')

            return layer
Beispiel #4
0
    def make_model(self, inputs, keep_prob):
        conv1_1 = conv2d(inputs, 64, [3, 3], name='conv1_1')
        conv1_2 = conv2d(conv1_1, 64, [3, 3], name='conv1_2')
        pool1 = max_pool(conv1_2, name='pool1')

        conv2_1 = conv2d(pool1, 128, [3, 3], name='conv2_1')
        conv2_2 = conv2d(conv2_1, 128, [3, 3], name='conv2_2')
        pool2 = max_pool(conv2_2, name='pool2')

        conv3_1 = conv2d(pool2, 256, [3, 3], name='conv3_1')
        conv3_2 = conv2d(conv3_1, 256, [3, 3], name='conv3_2')
        conv3_3 = conv2d(conv3_2, 256, [3, 3], name='conv3_3')
        pool3 = max_pool(conv3_3, name='pool3')

        conv4_1 = conv2d(pool3, 512, [3, 3], name='conv4_1')
        conv4_2 = conv2d(conv4_1, 512, [3, 3], name='conv4_2')
        conv4_3 = conv2d(conv4_2, 512, [3, 3], name='conv4_3')
        pool4 = max_pool(conv4_3, name='pool4')

        conv5_1 = conv2d(pool4, 512, [3, 3], name='conv5_1')
        conv5_2 = conv2d(conv5_1, 512, [3, 3], name='conv5_2')
        conv5_3 = conv2d(conv5_2, 512, [3, 3], name='conv5_3')

        _, h, w, d = conv5_3.get_shape().as_list()

        flatten = tf.reshape(conv5_3, shape=[-1, h * w * d], name='flatten')
        fc1 = fully_connect(flatten, 4096, name='fc1')
        fc1_dropout = tf.nn.dropout(fc1,
                                    keep_prob=keep_prob,
                                    name='fc1_dropout')

        fc2 = fully_connect(fc1_dropout, 4096, name='fc2')
        fc2_dropout = tf.nn.dropout(fc2,
                                    keep_prob=keep_prob,
                                    name='fc2_dropout')

        logits = fully_connect(fc2_dropout, self.N_CLASS, name='fc3')

        return logits
Beispiel #5
0
    def discriminator_B(self, inputs, is_training, reuse=False):
        with tf.variable_scope('discirminator_B', reuse=reuse):
            layer1 = lrelu(
                bn(
                    conv2d(inputs,
                           64, [4, 4],
                           strides=[1, 2, 2, 1],
                           initializer='random',
                           name='d_B_hiddne1'), is_training))
            layer2 = lrelu(
                bn(
                    conv2d(layer1,
                           128, [4, 4],
                           strides=[1, 2, 2, 1],
                           initializer='random',
                           name='d_B_hidden2'), is_training))
            flatten = tf.reshape(layer2, [-1, 7 * 7 * 128])
            layer3 = lrelu(
                bn(fully_connect(flatten, 1024, name='d_B_hidden3'),
                   is_training))
            layer4 = fully_connect(layer3, 1, name='d_B_hidden')
            logits = sigmoid(layer4)

            return logits, layer4