Exemplo n.º 1
0
 def __init__(self,
              input_shape,
              border_mode='half',
              num_classes=10,
              name='VGG19'):
     super(VGG19, self).__init__(input_shape,
                                 fc=False,
                                 bn=True,
                                 border_mode=border_mode,
                                 num_classes=num_classes,
                                 name=name)
     self.append(
         nn.MaxPoolingLayer(self.output_shape, (2, 2),
                            layer_name=name + '/maxpool4'))
     self.append(
         nn.SoftmaxLayer(self.output_shape, num_classes, name + '/softmax'))
Exemplo n.º 2
0
    def __init__(self,
                 input_shape,
                 fc=True,
                 num_classes=1000,
                 first_output=16,
                 growth_rate=12,
                 num_blocks=3,
                 depth=40,
                 dropout=False,
                 name='DenseNet'):
        super(DenseNet, self).__init__(input_shape=input_shape,
                                       layer_name=name)
        self.append(
            nn.ConvolutionalLayer(self.input_shape,
                                  first_output,
                                  3,
                                  activation='linear',
                                  layer_name=name + 'pre_conv'))
        n = (depth - 1) // num_blocks
        for b in range(num_blocks):
            self.append(
                nn.DenseBlock(self.output_shape,
                              num_conv_layer=n - 1,
                              growth_rate=growth_rate,
                              dropout=dropout,
                              layer_name=name + 'dense_block_%d' % b))
            if b < num_blocks - 1:
                self.append(
                    nn.DenseBlock(self.output_shape,
                                  True,
                                  None,
                                  None,
                                  dropout,
                                  layer_name=name +
                                  'dense_block_transit_%d' % b))

        self.append(
            nn.BatchNormLayer(self.output_shape, layer_name=name + 'post_bn'))
        if fc:
            self.append(
                nn.GlobalAveragePoolingLayer(input_shape,
                                             name + '_glbavgpooling'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
Exemplo n.º 3
0
    def __init__(self, input_shape, fc=True, num_classes=1000, name='vgg19'):
        super(VGG19, self).__init__(input_shape=input_shape, layer_name=name)
        self.fc = fc
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv1_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv1_2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool0'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv2_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv2_2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_4'))

        if fc:
            self.append(
                nn.MaxPoolingLayer(self.output_shape, (2, 2),
                                   layer_name=name + '_maxpool4'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc1'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc2'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
Exemplo n.º 4
0
    def __init__(self,
                 input_shape,
                 fc=True,
                 bn=False,
                 dropout=True,
                 border_mode='half',
                 num_classes=1000,
                 name='vgg16'):
        super(VGG16, self).__init__(input_shape=input_shape, layer_name=name)
        self.fc = fc
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv1',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn1') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv2',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn2') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool0'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv3',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn3') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv4',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn4') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv5',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn5') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu5'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv6',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn6') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu6'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv7',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn7') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu7'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv8',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn8') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu8'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv9',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn9') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu9'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv10',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn10') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu10'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv11',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn11') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu11'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv12',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn11') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu12'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv13',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn13') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu13'))

        if fc:
            self.append(
                nn.MaxPoolingLayer(self.output_shape, (2, 2),
                                   layer_name=name + '_maxpool4'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc1'))
            if dropout:
                self.append(
                    nn.DropoutLayer(self.output_shape,
                                    drop_prob=.5,
                                    layer_name=name + '/dropout1'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc2'))
            if dropout:
                self.append(
                    nn.DropoutLayer(self.output_shape,
                                    drop_prob=.5,
                                    layer_name=name + '/dropout2'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
Exemplo n.º 5
0
    def __init__(self,
                 input_shape,
                 num_classes=10,
                 name='vgg19 mean interp padding'):
        super(VGG19MeanInterpPadding, self).__init__(input_shape=input_shape,
                                                     layer_name=name)
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         64,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv1'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn1'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         64,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv2'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn2',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool0'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu2'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         128,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv3'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn3'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         128,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv4'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn4',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool1'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu4'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv5'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn5'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv6'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn6'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv7'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn7'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv7_1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn7_1',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool2'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu8'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv8'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn8'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv9'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn9'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv10'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn10'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv10_1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn10_1',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool3'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu11'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv11'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn11'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv12'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn12'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv13'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn13'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv13_1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn13_1',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool4'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu14'))

        self.append(
            nn.SoftmaxLayer(self.output_shape, num_classes, name + '/softmax'))