コード例 #1
0
    def __init__(self,
                 input_shape,
                 fc=True,
                 num_classes=1000,
                 first_output=16,
                 growth_rate=12,
                 num_blocks=3,
                 depth=40,
                 dropout=False,
                 name='DenseNet'):
        super(DenseNet, self).__init__(input_shape=input_shape,
                                       layer_name=name)
        self.append(
            nn.ConvolutionalLayer(self.input_shape,
                                  first_output,
                                  3,
                                  activation='linear',
                                  layer_name=name + 'pre_conv'))
        n = (depth - 1) // num_blocks
        for b in range(num_blocks):
            self.append(
                nn.DenseBlock(self.output_shape,
                              num_conv_layer=n - 1,
                              growth_rate=growth_rate,
                              dropout=dropout,
                              layer_name=name + 'dense_block_%d' % b))
            if b < num_blocks - 1:
                self.append(
                    nn.DenseBlock(self.output_shape,
                                  True,
                                  None,
                                  None,
                                  dropout,
                                  layer_name=name +
                                  'dense_block_transit_%d' % b))

        self.append(
            nn.BatchNormLayer(self.output_shape, layer_name=name + 'post_bn'))
        if fc:
            self.append(
                nn.GlobalAveragePoolingLayer(input_shape,
                                             name + '_glbavgpooling'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
コード例 #2
0
    def __init__(self,
                 input_shape,
                 block,
                 layers,
                 num_filters,
                 activation='relu',
                 fc=True,
                 pooling=True,
                 num_classes=1000,
                 layer_name='ResNet',
                 **kwargs):
        super(ResNet, self).__init__(input_shape=input_shape,
                                     layer_name=layer_name)
        self.activation = activation
        self.custom_block = kwargs.pop('custom_block', None)
        self.kwargs = kwargs
        self.append(
            nn.ConvNormAct(self.input_shape,
                           num_filters,
                           7,
                           stride=2,
                           activation=activation,
                           **kwargs))
        if pooling:
            self.append(
                nn.PoolingLayer(self.output_shape, (3, 3),
                                stride=(2, 2),
                                pad=1))
        self.append(
            self._make_layer(block,
                             self.output_shape,
                             num_filters,
                             layers[0],
                             name='block1'))
        self.append(
            self._make_layer(block,
                             self.output_shape,
                             2 * num_filters,
                             layers[1],
                             stride=2,
                             name='block2'))
        self.append(
            self._make_layer(block,
                             self.output_shape,
                             4 * num_filters,
                             layers[2],
                             stride=2,
                             name='block3'))
        self.append(
            self._make_layer(block,
                             self.output_shape,
                             8 * num_filters,
                             layers[3],
                             stride=2,
                             name='block4'))

        if fc:
            self.append(
                nn.GlobalAveragePoolingLayer(self.output_shape,
                                             layer_name='glb_avg_pooling'))
            self.append(
                nn.FullyConnectedLayer(self.output_shape,
                                       num_classes,
                                       activation='softmax',
                                       layer_name='output'))