Beispiel #1
0
    def test_local_response_norm_exceptions(self):
        with self.assertRaises(ValueError):
            layers.LocalResponseNorm(n=2)

        with self.assertRaises(LayerConnectionError):
            layers.Input(10) > layers.LocalResponseNorm()

        conn = layers.LocalResponseNorm()
        with self.assertRaises(LayerConnectionError):
            conn.output(T.tensor4())
    def test_local_response_normalization_layer(self):
        input_layer = layers.Input((1, 1, 1))
        conn = input_layer > layers.LocalResponseNorm()

        x_tensor = asfloat(np.ones((1, 1, 1, 1)))
        actual_output = self.eval(conn.output(x_tensor))
        expected_output = np.array([0.59458]).reshape((-1, 1, 1, 1))

        np.testing.assert_array_almost_equal(expected_output,
                                             actual_output,
                                             decimal=5)
Beispiel #3
0
    def test_local_response_normalization_layer(self):
        network = layers.join(
            layers.Input((1, 1, 1)),
            layers.LocalResponseNorm(),
        )

        x_tensor = asfloat(np.ones((1, 1, 1, 1)))
        actual_output = self.eval(network.output(x_tensor, training=True))
        expected_output = np.array([0.59458]).reshape((-1, 1, 1, 1))

        np.testing.assert_array_almost_equal(expected_output,
                                             actual_output,
                                             decimal=5)
         [
             layers.Convolution((nfilters[4], 1, 1)),
             layers.Relu(),
             layers.Convolution((nfilters[5], 5, 5), padding='half'),
             layers.Relu(),
         ]],
        layers.Concatenate(),
    )


googlenet = layers.join(
    layers.Input((3, None, None)),
    layers.Convolution((64, 7, 7), padding='half', stride=2),
    layers.Relu(),
    layers.MaxPooling((3, 3), stride=2),
    layers.LocalResponseNorm(alpha=0.00002, k=1),
    layers.Convolution((64, 1, 1)) > layers.Relu(),
    layers.Convolution((192, 3, 3), padding='half') > layers.Relu(),
    layers.LocalResponseNorm(alpha=0.00002, k=1),
    layers.MaxPooling((3, 3), stride=2),
    Inception((32, 64, 96, 128, 16, 32)),
    Inception((64, 128, 128, 192, 32, 96)),
    layers.MaxPooling((3, 3), stride=2),
    Inception((64, 192, 96, 208, 16, 48)),
    Inception((64, 160, 112, 224, 24, 64)),
    Inception((64, 128, 128, 256, 24, 64)),
    Inception((64, 112, 144, 288, 32, 64)),
    Inception((128, 256, 160, 320, 32, 128)),
    layers.MaxPooling((3, 3), stride=2),
    Inception((128, 256, 160, 320, 32, 128)),
    Inception((128, 384, 192, 384, 48, 128)),
    def test_local_response_norm_exceptions(self):
        with self.assertRaisesRegexp(ValueError, "Only works with odd"):
            layers.LocalResponseNorm(depth_radius=2)

        with self.assertRaises(LayerConnectionError):
            layers.Input(10) > layers.LocalResponseNorm()
Beispiel #6
0
    def __repr__(self):
        return "{}({}, {})".format(
            self.__class__.__name__,
            self.from_channel,
            self.to_channel)


alexnet = layers.join(
    layers.Input((227, 227, 3)),

    layers.Convolution((11, 11, 96), stride=(4, 4), name='conv_1'),
    layers.Relu(),

    layers.MaxPooling((3, 3), stride=(2, 2)),
    layers.LocalResponseNorm(),

    [[
        SliceChannels(0, 48),
        layers.Convolution((5, 5, 128), padding='SAME', name='conv_2_1'),
        layers.Relu(),
    ], [
        SliceChannels(48, 96),
        layers.Convolution((5, 5, 128), padding='SAME', name='conv_2_2'),
        layers.Relu(),
    ]],
    layers.Concatenate(),

    layers.MaxPooling((3, 3), stride=(2, 2)),
    layers.LocalResponseNorm(),
Beispiel #7
0
def alexnet():
    """
    AlexNet network architecture with random parameters. Parameters
    can be loaded using ``neupy.storage`` module.

    Originally AlexNet was built in order to solve image classification
    problem. It was used in the ImageNet competition. The goal of the
    competition is to build a model that classifies image into one of
    the 1,000 categories. Categories include animals, objects, transports
    and so on.

    AlexNet has roughly 61 million parameters.

    Examples
    --------
    >>> from neupy import architectures
    >>> alexnet = architectures.alexnet()
    >>> alexnet
    (3, 227, 227) -> [... 37 layers ...] -> 1000
    >>>
    >>> from neupy import algorithms
    >>> network = algorithms.Momentum(alexnet)

    See Also
    --------
    :architecture:`vgg16` : VGG16 network
    :architecture:`vgg19` : VGG19 network
    :architecture:`squeezenet` : SqueezeNet network
    :architecture:`resnet50` : ResNet50 network

    References
    ----------
    ImageNet Classification with Deep Convolutional Neural Networks
    https://goo.gl/479oZZ
    """
    return layers.join(
        layers.Input((3, 227, 227)),
        layers.Convolution((96, 11, 11), stride=(4, 4), name='conv_1'),
        layers.Relu(),
        layers.MaxPooling((3, 3), stride=(2, 2)),
        layers.LocalResponseNorm(),
        [[
            SliceChannels(0, 48),
            layers.Convolution((128, 5, 5), padding=2, name='conv_2_1'),
            layers.Relu(),
        ],
         [
             SliceChannels(48, 96),
             layers.Convolution((128, 5, 5), padding=2, name='conv_2_2'),
             layers.Relu(),
         ]],
        layers.Concatenate(),
        layers.MaxPooling((3, 3), stride=(2, 2)),
        layers.LocalResponseNorm(),
        layers.Convolution((384, 3, 3), padding=1, name='conv_3'),
        layers.Relu(),
        [[
            SliceChannels(0, 192),
            layers.Convolution((192, 3, 3), padding=1, name='conv_4_1'),
            layers.Relu(),
        ],
         [
             SliceChannels(192, 384),
             layers.Convolution((192, 3, 3), padding=1, name='conv_4_2'),
             layers.Relu(),
         ]],
        layers.Concatenate(),
        [[
            SliceChannels(0, 192),
            layers.Convolution((128, 3, 3), padding=1, name='conv_5_1'),
            layers.Relu(),
        ],
         [
             SliceChannels(192, 384),
             layers.Convolution((128, 3, 3), padding=1, name='conv_5_2'),
             layers.Relu(),
         ]],
        layers.Concatenate(),
        layers.MaxPooling((3, 3), stride=(2, 2)),
        layers.Reshape(),
        layers.Relu(4096, name='dense_1') > layers.Dropout(0.5),
        layers.Relu(4096, name='dense_2') > layers.Dropout(0.5),
        layers.Softmax(1000, name='dense_3'),
    )