示例#1
0
    def __call__(self, x, n_channels, n_codes):
        non_linearity = tf.nn.relu

        with tf.variable_scope(self.name, reuse=self.reuse):
            conv1_a = conv2d(x, 'conv1_a', 16, 3, 2, 'SAME', True,
                             non_linearity, self.is_train)
            conv1_b = conv2d(conv1_a, 'conv1_b', 16, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv2_a = conv2d(conv1_b, 'conv2_a', 32, 3, 2, 'SAME', True,
                             non_linearity, self.is_train)
            conv2_b = conv2d(conv2_a, "conv2_b", 32, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv3 = conv2d(conv2_b, 'conv3', 1, 3, 2, 'SAME', True,
                           non_linearity, self.is_train)

            fc1 = dense(tf.reshape(conv3, [-1, np.prod([8, 8, 1])]), 'fc1',
                        n_codes, False, None, self.is_train)
            fc2 = dense(fc1, 'fc2', np.prod([8, 8, 1]), False, non_linearity,
                        self.is_train)

            deconv4 = upconv2d(tf.reshape(fc2, [-1] + [8, 8, 1]), 2, 'deconv4',
                               32, 3, 1, 'SAME', True, non_linearity,
                               self.is_train)
            conv4 = conv2d(deconv4, 'conv4', 32, 3, 1, 'SAME', True,
                           non_linearity, self.is_train)
            deconv5 = upconv2d(conv4, 2, 'deconv5', 16, 3, 1, 'SAME', True,
                               non_linearity, self.is_train)
            conv5 = conv2d(deconv5, 'conv5', 16, 3, 1, 'SAME', True,
                           non_linearity, self.is_train)
            deconv6 = upconv2d(conv5, 2, 'deconv6', 16, 3, 1, 'SAME', True,
                               non_linearity, self.is_train)
            conv_out = conv2d(deconv6, 'conv_out', n_channels, 3, 1, 'SAME',
                              False, None, self.is_train)

        if self.reuse is None:
            self.var_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                                              scope=self.name)
            self.saver = tf.train.Saver(var_list=self.var_list,
                                        max_to_keep=100)
            self.reuse = True

        return fc1, conv_out
示例#2
0
    def __call__(self, x):
        non_linearity = tf.nn.elu

        with tf.variable_scope(self.name, reuse=self.reuse):
            conv1_a = conv2d(x, 'conv1_a', 16, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv1_b = conv2d(conv1_a, 'conv1_b', 16, 3, 1, 'SAME', True, None,
                             self.is_train)
            act1 = non_linearity(conv1_b)
            pool1 = tf.nn.avg_pool(act1, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')

            conv2_a = conv2d(pool1, 'conv2_a', 32, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv2_b = conv2d(conv2_a, 'conv2_b', 32, 3, 1, 'SAME', True, None,
                             self.is_train)
            act2 = non_linearity(conv2_b)
            pool2 = tf.nn.avg_pool(act2, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')

            conv3_a = conv2d(pool2, 'conv3_a', 64, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv3_b = conv2d(conv3_a, 'conv3_b', 64, 3, 1, 'SAME', True, None,
                             self.is_train)
            act3 = non_linearity(conv3_b)
            pool3 = tf.nn.avg_pool(act3, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')

            conv4_a = conv2d(pool3, 'conv4_a', 128, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv4_b = conv2d(conv4_a, 'conv4_b', 128, 3, 1, 'SAME', True, None,
                             self.is_train)
            act4 = non_linearity(conv4_b)
            drop4 = tf.layers.dropout(act4,
                                      rate=0.5,
                                      training=self.is_train,
                                      name='drop4')

            deconv4 = upconv2d(drop4, 2, 'deconv4', 64, 3, 1, 'SAME', True,
                               None, self.is_train)
            concat5 = non_linearity(tf.add(deconv4, conv3_b))
            conv5_a = conv2d(concat5, 'conv5_a', 64, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv5_b = conv2d(conv5_a, 'conv5_b', 64, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            drop5 = tf.layers.dropout(conv5_b,
                                      rate=0.5,
                                      training=self.is_train,
                                      name='drop5')

            deconv5 = upconv2d(drop5, 2, 'deconv5', 32, 3, 1, 'SAME', True,
                               None, self.is_train)
            concat6 = non_linearity(tf.add(deconv5, conv2_b))
            conv6_a = conv2d(concat6, 'conv6_a', 32, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv6_b = conv2d(conv6_a, 'conv6_b', 32, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            drop6 = tf.layers.dropout(conv6_b,
                                      rate=0.5,
                                      training=self.is_train,
                                      name='drop6')

            deconv6 = upconv2d(drop6, 2, 'deconv6', 16, 3, 1, 'SAME', True,
                               None, self.is_train)
            concat7 = non_linearity(tf.add(deconv6, conv1_b))
            conv7_a = conv2d(concat7, 'conv7_a', 16, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv7_b = conv2d(conv7_a, 'conv7_b', 16, 3, 1, 'SAME', True,
                             non_linearity, self.is_train)
            conv_out = conv2d(conv7_b, 'conv_out', 2, 3, 1, 'SAME', True, None,
                              self.is_train)

        if self.reuse is None:
            self.var_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                                              scope=self.name)
            self.saver = tf.train.Saver(var_list=self.var_list,
                                        max_to_keep=100)
            self.reuse = True

        return conv_out