def bottleneck_layer(self, x, scope):
        with tf.variable_scope(scope):
            # x = layers.batch_normalization(x, training=self.training, name=scope + '_batch1')
            x = layers.selu(x)
            x = layers.conv2d(x,
                              filters=4 * self.filters,
                              kernel_size=[1, 1],
                              strides=[1, 1],
                              kernel_regularizer=layers.l2_regularizer(0.0005),
                              padding='same',
                              activation=None,
                              name=scope + '_conv1')
            x = layers.drop_out(x, rate=self.dropout, training=self.training)

            # x = layers.batch_normalization(x, training=self.training, name=scope + '_batch2')
            x = layers.selu(x)
            x = layers.conv2d(x,
                              filters=self.filters,
                              kernel_size=[3, 3],
                              strides=[1, 1],
                              kernel_regularizer=layers.l2_regularizer(0.0005),
                              padding='same',
                              activation=None,
                              name=scope + '_conv2')
            x = layers.drop_out(x, rate=self.dropout, training=self.training)

            return x
    def Dense_net(self, input_x):
        x = layers.conv2d(input_x,
                          filters=2 * self.filters,
                          kernel_size=[5, 5],
                          strides=[2, 2],
                          kernel_regularizer=layers.l2_regularizer(0.0005),
                          padding='valid',
                          activation=None,
                          name='conv0')

        x = self.dense_block(input_x=x, nb_layers=6, layer_name='dense_1')
        x = self.transition_layer(x, scope='trans_1')

        x = self.dense_block(input_x=x, nb_layers=12, layer_name='dense_2')
        x = self.transition_layer(x, scope='trans_2')

        # x = self.dense_block(input_x=x, nb_layers=48, layer_name='dense_3')
        # x = self.transition_layer(x, scope='trans_3')

        # x = self.dense_block(input_x=x, nb_layers=32, layer_name='dense_final')
        x = self.dense_block(input_x=x, nb_layers=24, layer_name='dense_final')

        # 100 Layer
        # x = layers.batch_normalization(x, training=self.training, name='linear_batch')
        x = layers.selu(x)
        # x = layers.global_ave_pool2d(x)
        # x = flatten(x)
        # x = layers.fully_connected(x, self.class_num, use_bias=False, activation_fn=None, trainable=self.training,
        # name='full_connecting')

        # x = tf.reshape(x, [-1, 10])
        return x
示例#3
0
 def transition_layer(self, x, scope):
     with tf.variable_scope(scope):
         x = layers.batch_normalization(x, training=self.training, name=scope + '_batch1')
         x = layers.selu(x)
         x = layers.conv2d(x, filters=self.filters, kernel_size=[1, 1], strides=[1, 1],
                           kernel_regularizer=layers.l2_regularizer(0.0005),
                           padding='same', activation=None, name=scope + '_conv1')
         x = layers.drop_out(x, rate=self.dropout, training=self.training)
         x = layers.ave_pool2d(x, pool_size=[2, 2], strides=[2, 2])
         return x