示例#1
0
def _build_simple_conv_block(input_shape, num_filters, stride, border_mode,
                             activation, block_name, **kwargs):
    block = [
        nn.Conv2DLayer(input_shape,
                       num_filters,
                       3,
                       stride=stride,
                       border_mode=border_mode,
                       layer_name=block_name + '/conv1',
                       no_bias=False,
                       activation='linear')
    ]
    block.append(
        nn.BatchNormLayer(block[-1].output_shape,
                          activation=activation,
                          layer_name=block_name + '/conv1_bn'))

    block.append(
        nn.Conv2DLayer(block[-1].output_shape,
                       num_filters,
                       3,
                       border_mode=border_mode,
                       no_bias=True,
                       layer_name=block_name + '/conv2',
                       activation='linear'))
    block.append(
        nn.BatchNormLayer(block[-1].output_shape,
                          layer_name=block_name + '/conv2_bn',
                          activation='linear'))
    return block
示例#2
0
def _build_mean_ref_pad_conv_block(input_shape, num_filters, stride,
                                   activation, block_name, **kwargs):
    block = [
        Conv2DMeanRefPaddingLayer(input_shape,
                                  num_filters,
                                  3,
                                  stride=stride,
                                  layer_name=block_name + '/conv1',
                                  no_bias=False,
                                  activation='linear')
    ]
    block.append(
        nn.BatchNormLayer(block[-1].output_shape,
                          activation=activation,
                          layer_name=block_name + '/conv1_bn'))

    block.append(
        Conv2DMeanRefPaddingLayer(block[-1].output_shape,
                                  num_filters,
                                  3,
                                  layer_name=block_name + '/conv2',
                                  no_bias=True,
                                  activation='linear'))
    block.append(
        nn.BatchNormLayer(block[-1].output_shape,
                          layer_name=block_name + '/conv2_bn',
                          activation='linear'))
    return block
示例#3
0
    def __init__(self,
                 input_shape,
                 n_nodes,
                 use_sigmoid=False,
                 name='Latent Discriminator'):
        super(DiscriminatorLatent, self).__init__(input_shape=input_shape,
                                                  layer_name=name)
        self.append(
            nn.FCLayer(self.output_shape,
                       n_nodes,
                       activation=None,
                       layer_name=name + '/fc1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn1',
                              activation='lrelu',
                              alpha=.2))

        self.append(
            nn.FCLayer(self.output_shape,
                       n_nodes,
                       activation=None,
                       layer_name=name + '/fc2'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn2',
                              activation='lrelu',
                              alpha=.2))

        self.append(
            nn.FCLayer(self.output_shape,
                       n_nodes,
                       activation=None,
                       layer_name=name + '/fc3'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn3',
                              activation='lrelu',
                              alpha=.2))

        self.append(
            nn.FCLayer(self.output_shape,
                       1,
                       activation=None,
                       layer_name=name + '/output'))

        if use_sigmoid:
            self.append(
                nn.ActivationLayer(self.output_shape, 'sigmoid',
                                   name + '/act'))
示例#4
0
    def __init__(self,
                 input_shape,
                 fc=True,
                 num_classes=1000,
                 first_output=16,
                 growth_rate=12,
                 num_blocks=3,
                 depth=40,
                 dropout=False,
                 name='DenseNet'):
        super(DenseNet, self).__init__(input_shape=input_shape,
                                       layer_name=name)
        self.append(
            nn.ConvolutionalLayer(self.input_shape,
                                  first_output,
                                  3,
                                  activation='linear',
                                  layer_name=name + 'pre_conv'))
        n = (depth - 1) // num_blocks
        for b in range(num_blocks):
            self.append(
                nn.DenseBlock(self.output_shape,
                              num_conv_layer=n - 1,
                              growth_rate=growth_rate,
                              dropout=dropout,
                              layer_name=name + 'dense_block_%d' % b))
            if b < num_blocks - 1:
                self.append(
                    nn.DenseBlock(self.output_shape,
                                  True,
                                  None,
                                  None,
                                  dropout,
                                  layer_name=name +
                                  'dense_block_transit_%d' % b))

        self.append(
            nn.BatchNormLayer(self.output_shape, layer_name=name + 'post_bn'))
        if fc:
            self.append(
                nn.GlobalAveragePoolingLayer(input_shape,
                                             name + '_glbavgpooling'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
示例#5
0
文件: sngan.py 项目: justanhduc/sngan
    def __init__(self,
                 input_shape,
                 bottom_width=4,
                 ch=512,
                 wscale=0.02,
                 hidden_activation='relu',
                 output_activation='tanh',
                 layer_name='DCGANGen'):
        super(DCGANGenerator, self).__init__(input_shape=input_shape,
                                             layer_name=layer_name)
        self.append(
            nn.FullyConnectedLayer(self.output_shape,
                                   ch * bottom_width**2,
                                   layer_name=layer_name + '/fc1',
                                   activation='linear',
                                   init=nn.Normal(wscale)))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              layer_name + '/bn_fc1',
                              activation=hidden_activation,
                              epsilon=2e-5))
        self.append(
            nn.ReshapingLayer(self.output_shape,
                              (-1, ch, bottom_width, bottom_width),
                              layer_name + '/reshape'))

        shape = [o * 2 for o in self.output_shape[2:]]
        self.append(
            nn.TransposedConvolutionalLayer(self.output_shape,
                                            ch // 2,
                                            4,
                                            shape,
                                            layer_name=layer_name + '/deconv1',
                                            padding=1,
                                            activation='linear',
                                            init=nn.Normal(wscale)))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              layer_name + '/bn_deconv1',
                              activation=hidden_activation,
                              epsilon=2e-5))

        shape = [o * 2 for o in self.output_shape[2:]]
        self.append(
            nn.TransposedConvolutionalLayer(self.output_shape,
                                            ch // 4,
                                            4,
                                            shape,
                                            layer_name=layer_name + '/deconv2',
                                            padding=1,
                                            activation='linear',
                                            init=nn.Normal(wscale)))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              layer_name + '/bn_deconv2',
                              activation=hidden_activation,
                              epsilon=2e-5))

        shape = [o * 2 for o in self.output_shape[2:]]
        self.append(
            nn.TransposedConvolutionalLayer(self.output_shape,
                                            ch // 8,
                                            4,
                                            shape,
                                            layer_name=layer_name + '/deconv3',
                                            padding=1,
                                            activation='linear',
                                            init=nn.Normal(wscale)))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              layer_name + '/bn_deconv3',
                              activation=hidden_activation,
                              epsilon=2e-5))

        self.append(
            nn.TransposedConvolutionalLayer(self.output_shape,
                                            3,
                                            3,
                                            layer_name=layer_name + '/output',
                                            stride=(1, 1),
                                            activation=output_activation,
                                            init=nn.Normal(wscale)))
示例#6
0
    def __init__(self,
                 input_shape,
                 fc=True,
                 bn=False,
                 dropout=True,
                 border_mode='half',
                 num_classes=1000,
                 name='vgg16'):
        super(VGG16, self).__init__(input_shape=input_shape, layer_name=name)
        self.fc = fc
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv1',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn1') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv2',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn2') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool0'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv3',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn3') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv4',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn4') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv5',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn5') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu5'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv6',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn6') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu6'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv7',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn7') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu7'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv8',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn8') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu8'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv9',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn9') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu9'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv10',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn10') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu10'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv11',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn11') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu11'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv12',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn11') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu12'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv13',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn13') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu13'))

        if fc:
            self.append(
                nn.MaxPoolingLayer(self.output_shape, (2, 2),
                                   layer_name=name + '_maxpool4'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc1'))
            if dropout:
                self.append(
                    nn.DropoutLayer(self.output_shape,
                                    drop_prob=.5,
                                    layer_name=name + '/dropout1'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc2'))
            if dropout:
                self.append(
                    nn.DropoutLayer(self.output_shape,
                                    drop_prob=.5,
                                    layer_name=name + '/dropout2'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
示例#7
0
    def __init__(self, input_shape, output_size):
        super(LSGAN, self).__init__(input_shape=input_shape,
                                    layer_name='LSGAN')
        s2, s4, s8, s16 = output_size // 2, output_size // 4, output_size // 8, output_size // 16

        subnet = 'generator'
        self.gen = nn.Sequential(input_shape=input_shape, layer_name=subnet)
        self.gen.append(
            nn.FullyConnectedLayer(self.gen.output_shape,
                                   256 * s16 * s16,
                                   activation='linear',
                                   layer_name=subnet + '/fc1',
                                   init=nn.Normal(.02)))
        self.gen.append(
            nn.ReshapingLayer(self.gen.output_shape, (-1, 256, s16, s16),
                              subnet + '/reshape'))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn1',
                              activation='relu',
                              epsilon=1.1e-5))

        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            256,
                                            3, (s8, s8),
                                            activation='linear',
                                            layer_name=subnet + '/deconv1',
                                            init=nn.Normal(.02)))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn2',
                              activation='relu',
                              epsilon=1.1e-5))
        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            256,
                                            3, (s8, s8),
                                            stride=(1, 1),
                                            activation='linear',
                                            layer_name=subnet + '/deconv2',
                                            init=nn.Normal(.02)))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn3',
                              activation='relu',
                              epsilon=1.1e-5))

        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            256,
                                            3, (s4, s4),
                                            activation='linear',
                                            layer_name=subnet + '/deconv3',
                                            init=nn.Normal(.02)))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn4',
                              activation='relu',
                              epsilon=1.1e-5))
        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            256,
                                            3, (s4, s4),
                                            stride=(1, 1),
                                            activation='linear',
                                            layer_name=subnet + '/deconv4',
                                            init=nn.Normal(.02)))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn5',
                              activation='relu',
                              epsilon=1.1e-5))

        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            128,
                                            3, (s2, s2),
                                            activation='linear',
                                            layer_name=subnet + '/deconv5',
                                            init=nn.Normal(.02)))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn6',
                              activation='relu',
                              epsilon=1.1e-5))

        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            64,
                                            3, (output_size, output_size),
                                            activation='linear',
                                            layer_name=subnet + '/deconv6',
                                            init=nn.Normal(.02)))
        self.gen.append(
            nn.BatchNormLayer(self.gen.output_shape,
                              subnet + '/bn7',
                              activation='relu',
                              epsilon=1.1e-5))

        self.gen.append(
            nn.TransposedConvolutionalLayer(self.gen.output_shape,
                                            3,
                                            3, (output_size, output_size),
                                            stride=(1, 1),
                                            activation='tanh',
                                            layer_name=subnet + '/output',
                                            init=nn.Normal(.02)))
        self.append(self.gen)

        subnet = 'discriminator'
        self.dis = nn.Sequential(input_shape=self.gen.output_shape,
                                 layer_name=subnet)
        self.dis.append(
            nn.ConvolutionalLayer(self.dis.output_shape,
                                  64,
                                  5,
                                  stride=2,
                                  activation='lrelu',
                                  no_bias=False,
                                  layer_name=subnet + '/first_conv',
                                  init=nn.TruncatedNormal(.02),
                                  alpha=.2))
        self.dis.append(
            nn.ConvNormAct(self.dis.output_shape,
                           64 * 2,
                           5,
                           stride=2,
                           activation='lrelu',
                           no_bias=False,
                           layer_name=subnet + '/conv1',
                           init=nn.TruncatedNormal(.02),
                           epsilon=1.1e-5,
                           alpha=.2))
        self.dis.append(
            nn.ConvNormAct(self.dis.output_shape,
                           64 * 4,
                           5,
                           stride=2,
                           activation='lrelu',
                           no_bias=False,
                           layer_name=subnet + '/conv2',
                           init=nn.TruncatedNormal(.02),
                           epsilon=1.1e-5,
                           alpha=.2))
        self.dis.append(
            nn.ConvNormAct(self.dis.output_shape,
                           64 * 8,
                           5,
                           stride=2,
                           activation='lrelu',
                           no_bias=False,
                           layer_name=subnet + '/conv3',
                           init=nn.TruncatedNormal(.02),
                           epsilon=1.1e-5,
                           alpha=.2))
        self.dis.append(
            nn.FullyConnectedLayer(self.dis.output_shape,
                                   1,
                                   layer_name=subnet + '/output',
                                   activation='linear'))
        self.append(self.dis)
示例#8
0
    def __init__(self,
                 input_shape,
                 num_classes=10,
                 name='vgg19 mean interp padding'):
        super(VGG19MeanInterpPadding, self).__init__(input_shape=input_shape,
                                                     layer_name=name)
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         64,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv1'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn1'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         64,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv2'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn2',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool0'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu2'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         128,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv3'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn3'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         128,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv4'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn4',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool1'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu4'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv5'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn5'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv6'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn6'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv7'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn7'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         256,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv7_1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn7_1',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool2'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu8'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv8'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn8'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv9'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn9'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv10'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn10'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv10_1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn10_1',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool3'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu11'))

        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv11'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn11'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv12'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn12'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv13'))
        self.append(nn.BatchNormLayer(self.output_shape, name + '/bn13'))
        self.append(
            Conv2DMeanInterpPaddingLayer(self.output_shape,
                                         512,
                                         3,
                                         activation=None,
                                         layer_name=name + '/conv13_1'))
        self.append(
            nn.BatchNormLayer(self.output_shape,
                              name + '/bn13_1',
                              activation=None))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool4'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu14'))

        self.append(
            nn.SoftmaxLayer(self.output_shape, num_classes, name + '/softmax'))