Esempio n. 1
0
def cin_resnet_block(input_shape, z_dim, padding, norm_layer, use_dropout,
                     use_bias, block_name, **kwargs):
    num_filters = input_shape[1]

    block = nn.Sequential(input_shape=input_shape, layer_name=block_name)
    block.append(
        nn.Conv2DLayer(input_shape,
                       num_filters,
                       3,
                       border_mode=padding,
                       no_bias=not use_bias,
                       activation=None,
                       layer_name=block_name + '/conv1'))
    block.append(norm_layer(input_shape, z_dim,
                            layer_name=block_name + '/CIN'))
    block.append(
        nn.ActivationLayer(block.output_shape,
                           layer_name=block_name + '/relu1'))
    if use_dropout:
        block.append(
            nn.DropoutLayer(block.output_shape,
                            .5,
                            layer_name=block_name + '/dropout'))

    block.append(
        nn.Conv2DLayer(block.output_shape,
                       num_filters,
                       3,
                       border_mode=padding,
                       no_bias=not use_bias,
                       activation=None,
                       layer_name=block_name + '/conv2'))
    block.append(nn.InstanceNormLayer(block.output_shape, block_name + '/IN'))
    return block
Esempio n. 2
0
def _build_simple_conv_block(input_shape, num_filters, stride, border_mode,
                             activation, block_name, **kwargs):
    block = [
        nn.Conv2DLayer(input_shape,
                       num_filters,
                       3,
                       stride=stride,
                       border_mode=border_mode,
                       layer_name=block_name + '/conv1',
                       no_bias=False,
                       activation='linear')
    ]
    block.append(
        nn.BatchNormLayer(block[-1].output_shape,
                          activation=activation,
                          layer_name=block_name + '/conv1_bn'))

    block.append(
        nn.Conv2DLayer(block[-1].output_shape,
                       num_filters,
                       3,
                       border_mode=border_mode,
                       no_bias=True,
                       layer_name=block_name + '/conv2',
                       activation='linear'))
    block.append(
        nn.BatchNormLayer(block[-1].output_shape,
                          layer_name=block_name + '/conv2_bn',
                          activation='linear'))
    return block
Esempio n. 3
0
def prep(x):
    conv = nn.Conv2DLayer((1, 3, 224, 224),
                          3,
                          1,
                          no_bias=False,
                          activation='linear',
                          filter_flip=False,
                          border_mode='valid')
    kern = np.array([[0, 0, 255], [0, 255, 0], [255, 0, 0]],
                    'float32')[:, :, None, None]
    conv.W.set_value(kern)
    conv.b.set_value(np.array([-103.939, -116.779, -123.68], 'float32'))
    return conv(x)
Esempio n. 4
0
def test_partial_conv_based_padding():
    shape = (1, 3, 5, 5)
    num_filters = 2
    filter_size = 3

    x = T.ones(shape)
    conv = nn.Conv2DLayer(shape,
                          num_filters,
                          filter_size,
                          border_mode='partial')
    y = conv(x)
    f = nn.function([], y)
    print(f())
    print(conv.update_mask.eval())
    print(conv.mask_ratio)
Esempio n. 5
0
    def __init__(self, input_shape, fc=True, num_classes=1000, name='vgg19'):
        super(VGG19, self).__init__(input_shape=input_shape, layer_name=name)
        self.fc = fc
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv1_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv1_2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool0'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv2_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv2_2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv3_4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv4_4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           no_bias=False,
                           layer_name=name + '_conv5_4'))

        if fc:
            self.append(
                nn.MaxPoolingLayer(self.output_shape, (2, 2),
                                   layer_name=name + '_maxpool4'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc1'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc2'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
Esempio n. 6
0
    def __init__(self, input_shape, param_file, name='vgg19'):
        super(VGG19, self).__init__(input_shape=input_shape, layer_name=name)
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv1_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv1_2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_2'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '/maxpool3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv4_1'))
        self.load_params(param_file)
Esempio n. 7
0
    def __init__(self, input_shape, param_file, name='Decoder'):
        super(Decoder, self).__init__(input_shape=input_shape, layer_name=name)
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv1_1',
                           activation='relu'))
        self.append(
            nn.UpsamplingLayer(self.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_1',
                           activation='relu'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_2',
                           activation='relu'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_3',
                           activation='relu'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_4',
                           activation='relu'))
        self.append(
            nn.UpsamplingLayer(self.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_1',
                           activation='relu'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_2',
                           activation='relu'))
        self.append(
            nn.UpsamplingLayer(self.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv4_1',
                           activation='relu'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           3,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           activation='tanh',
                           layer_name=name + '/output'))
        self.load_params(param_file)
Esempio n. 8
0
    def __init__(self,
                 input_shape,
                 n_latent,
                 num_filters,
                 norm_layer,
                 deterministic=False,
                 use_bias=False,
                 name='Latent Encoder'):
        super(LatentEncoder, self).__init__(input_shape=input_shape,
                                            layer_name=name)
        self.deterministic = deterministic
        self.enc = nn.Sequential(input_shape=input_shape,
                                 layer_name=name + '/enc')
        self.enc.append(
            nn.Conv2DLayer(self.enc.output_shape,
                           num_filters,
                           3,
                           stride=2,
                           no_bias=False,
                           activation='relu',
                           layer_name=name + '/conv1'))

        self.enc.append(
            nn.Conv2DLayer(self.enc.output_shape,
                           2 * num_filters,
                           3,
                           stride=2,
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv2'))
        self.enc.append(norm_layer(self.enc.output_shape, name + '/norm2'))
        self.enc.append(
            nn.ActivationLayer(self.enc.output_shape, 'relu', name + '/act2'))

        self.enc.append(
            nn.Conv2DLayer(self.enc.output_shape,
                           4 * num_filters,
                           3,
                           stride=2,
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv3'))
        self.enc.append(norm_layer(self.enc.output_shape, name + '/norm3'))
        self.enc.append(
            nn.ActivationLayer(self.enc.output_shape, 'relu', name + '/act3'))

        self.enc.append(
            nn.Conv2DLayer(self.enc.output_shape,
                           8 * num_filters,
                           3,
                           stride=2,
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv4'))
        self.enc.append(norm_layer(self.enc.output_shape, name + '/norm4'))
        self.enc.append(
            nn.ActivationLayer(self.enc.output_shape, 'relu', name + '/act4'))

        self.enc.append(
            nn.Conv2DLayer(self.enc.output_shape,
                           8 * num_filters,
                           4,
                           stride=1,
                           no_bias=not use_bias,
                           activation=None,
                           border_mode='valid',
                           layer_name=name + '/conv5'))
        self.enc.append(norm_layer(self.enc.output_shape, name + '/norm5'))
        self.enc.append(
            nn.ActivationLayer(self.enc.output_shape, 'relu', name + '/act5'))

        self.enc_mu = nn.Conv2DLayer(self.enc.output_shape,
                                     n_latent,
                                     1,
                                     no_bias=False,
                                     activation=None,
                                     layer_name=name + '/mu')
        self.extend((self.enc, self.enc_mu))

        if not deterministic:
            self.enc_logvar = nn.Conv2DLayer(self.enc.output_shape,
                                             n_latent,
                                             1,
                                             no_bias=False,
                                             activation=None,
                                             layer_name=name + '/logvar')
            self.append(self.enc_logvar)
Esempio n. 9
0
    def __init__(self,
                 input_shape,
                 num_filters=64,
                 norm_layer=partial(nn.BatchNormLayer, activation=None),
                 use_sigmoid=False,
                 use_bias=True,
                 name='Discriminator Edges'):
        super(DiscriminatorEdges, self).__init__(input_shape=input_shape,
                                                 layer_name=name)
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           num_filters,
                           3,
                           stride=2,
                           border_mode='half',
                           no_bias=not use_bias,
                           activation='lrelu',
                           alpha=.2,
                           layer_name=name + '/conv1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           2 * num_filters,
                           3,
                           stride=2,
                           border_mode='half',
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv2'))
        self.append(norm_layer(self.output_shape, name + '/bn2'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               activation='lrelu',
                               alpha=.2,
                               layer_name=name + '/act2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           4 * num_filters,
                           3,
                           border_mode='half',
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv3'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/bn3'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               activation='lrelu',
                               alpha=.2,
                               layer_name=name + '/act3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           4 * num_filters,
                           3,
                           border_mode='half',
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv4'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/bn4'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               activation='lrelu',
                               alpha=.2,
                               layer_name=name + '/act4'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           1,
                           4,
                           border_mode='valid',
                           layer_name=name + '/output',
                           activation=None))

        if use_sigmoid:
            self.append(
                nn.ActivationLayer(self.output_shape, 'sigmoid',
                                   name + '/sigmoid'))
Esempio n. 10
0
    def __init__(self,
                 input_shape,
                 n_latent,
                 num_filters=64,
                 norm_layer=nn.ConditionalInstanceNorm2DLayer,
                 use_sigmoid=False,
                 use_bias=True,
                 name='CIN Discriminator'):
        super(CINDiscriminator, self).__init__(input_shape=input_shape,
                                               layer_name=name)
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           num_filters,
                           4,
                           stride=2,
                           border_mode=1,
                           no_bias=not use_bias,
                           activation='lrelu',
                           alpha=.2,
                           layer_name=name + '/conv1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           2 * num_filters,
                           4,
                           stride=2,
                           border_mode=1,
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv2'))
        self.append(norm_layer(self.output_shape, n_latent, name + '/bn2'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               activation='lrelu',
                               alpha=.2,
                               layer_name=name + '/act2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           4 * num_filters,
                           4,
                           border_mode=1,
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv3'))
        self.append(
            norm_layer(self.output_shape, n_latent, layer_name=name + '/bn3'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               activation='lrelu',
                               alpha=.2,
                               layer_name=name + '/act3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           5 * num_filters,
                           4,
                           border_mode=1,
                           no_bias=not use_bias,
                           activation=None,
                           layer_name=name + '/conv4'))
        self.append(
            norm_layer(self.output_shape, n_latent, layer_name=name + '/bn4'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               activation='lrelu',
                               alpha=.2,
                               layer_name=name + '/act4'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           1,
                           4,
                           border_mode=1,
                           layer_name=name + '/output',
                           activation=None))

        if use_sigmoid:
            self.append(
                nn.ActivationLayer(self.output_shape, 'sigmoid',
                                   name + '/sigmoid'))
Esempio n. 11
0
    def __init__(self,
                 input_shape,
                 num_filters,
                 output_dim,
                 norm_layer=partial(nn.InstanceNormLayer, activation=None),
                 use_dropout=False,
                 padding='ref',
                 name='Resnet Generator'):
        super(ResnetGen, self).__init__(input_shape=input_shape,
                                        layer_name=name)
        self.append(
            nn.Conv2DLayer(input_shape,
                           num_filters,
                           7,
                           border_mode='ref',
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv1'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/cin1'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               'relu',
                               layer_name=name + '/relu1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           num_filters * 2,
                           3,
                           border_mode='half',
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv2'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/cin2'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               'relu',
                               layer_name=name + '/relu2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           num_filters * 4,
                           3,
                           stride=2,
                           border_mode='half',
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv3'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/cin3'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               'relu',
                               layer_name=name + '/relu3'))

        for i in range(3):
            self.append(
                ResnetBlock(self.output_shape, padding, norm_layer,
                            use_dropout, True,
                            name + '/ResBlock %d' % (i + 1)))

        self.append(
            nn.TransposedConvolutionalLayer(self.output_shape,
                                            2 * num_filters,
                                            3,
                                            stride=(2, 2),
                                            padding='half',
                                            activation=None,
                                            layer_name=name + '/deconv'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/cin4'))
        self.append(
            nn.ActivationLayer(self.output_shape, 'relu', name + '/relu4'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           num_filters,
                           3,
                           border_mode='half',
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv5'))
        self.append(norm_layer(self.output_shape, layer_name=name + '/cin5'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               'relu',
                               layer_name=name + '/relu5'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           output_dim,
                           7,
                           activation=None,
                           layer_name=name + '/output'))
        self.append(
            nn.ActivationLayer(self.output_shape,
                               'tanh',
                               layer_name=name + '/output_act'))
Esempio n. 12
0
    def __init__(self,
                 input_shape,
                 fc=True,
                 bn=False,
                 dropout=True,
                 border_mode='half',
                 num_classes=1000,
                 name='vgg16'):
        super(VGG16, self).__init__(input_shape=input_shape, layer_name=name)
        self.fc = fc
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv1',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn1') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu1'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           64,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv2',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn2') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu2'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool0'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv3',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn3') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu3'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           128,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv4',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn4') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu4'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool1'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv5',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn5') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu5'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv6',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn6') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu6'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           256,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv7',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn7') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu7'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool2'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv8',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn8') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu8'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv9',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn9') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu9'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv10',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn10') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu10'))
        self.append(
            nn.MaxPoolingLayer(self.output_shape, (2, 2),
                               layer_name=name + '_maxpool3'))

        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv11',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn11') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu11'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv12',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn11') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu12'))
        self.append(
            nn.Conv2DLayer(self.output_shape,
                           512,
                           3,
                           nn.HeNormal('relu'),
                           activation=None,
                           no_bias=False,
                           layer_name=name + '/conv13',
                           border_mode=border_mode))
        self.append(
            nn.BatchNormLayer(self.output_shape, name + '/bn13') if bn else nn.
            ActivationLayer(self.output_shape, layer_name=name + '/relu13'))

        if fc:
            self.append(
                nn.MaxPoolingLayer(self.output_shape, (2, 2),
                                   layer_name=name + '_maxpool4'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc1'))
            if dropout:
                self.append(
                    nn.DropoutLayer(self.output_shape,
                                    drop_prob=.5,
                                    layer_name=name + '/dropout1'))
            self.append(
                nn.FCLayer(self.output_shape, 4096, layer_name=name + '_fc2'))
            if dropout:
                self.append(
                    nn.DropoutLayer(self.output_shape,
                                    drop_prob=.5,
                                    layer_name=name + '/dropout2'))
            self.append(
                nn.SoftmaxLayer(self.output_shape, num_classes,
                                name + '_softmax'))
Esempio n. 13
0
    def __init__(self, encoder, layer, name='Decoder'):
        super(Decoder,
              self).__init__(input_shape=encoder[layer - 1].output_shape,
                             layer_name=name)
        self.enc = encoder
        self.layer = layer
        dec = nn.Sequential(input_shape=encoder.output_shape, layer_name=name)
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           512,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv1_1'))
        dec.append(
            nn.UpsamplingLayer(dec.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up1'))

        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           512,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_1'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           512,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_2'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           512,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_3'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           512,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv2_4'))
        dec.append(
            nn.UpsamplingLayer(dec.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up2'))

        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_1'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_2'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_3'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           256,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv3_4'))
        dec.append(
            nn.UpsamplingLayer(dec.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up3'))

        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           128,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv4_1'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           128,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv4_2'))
        dec.append(
            nn.UpsamplingLayer(dec.output_shape,
                               2,
                               method='nearest',
                               layer_name=name + '/up4'))

        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           64,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv5_1'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           64,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           layer_name=name + '/conv5_2'))
        dec.append(
            nn.Conv2DLayer(dec.output_shape,
                           3,
                           3,
                           init=nn.GlorotUniform(),
                           border_mode='ref',
                           no_bias=False,
                           activation='tanh',
                           layer_name=name + '/output'))
        self.append(dec[len(encoder) - layer:])