def Inception_3(pooling):
    if pooling not in ('max', 'average'):
        raise ValueError("Invalid pooling option: {}".format(pooling))

    if pooling == 'max':
        Pooling = layers.MaxPooling

    elif pooling == 'average':
        Pooling = partial(layers.AveragePooling, mode='exclude_padding')

    return layers.join(
        [[
            ConvReluBN((320, 1, 1)),
        ], [
            ConvReluBN((384, 1, 1)),
            [[
                ConvReluBN((384, 1, 3), padding=(0, 1)),
            ], [
                ConvReluBN((384, 3, 1), padding=(1, 0)),
            ]],
        ], [
            ConvReluBN((448, 1, 1)),
            ConvReluBN((384, 3, 3), padding=1),
            [[
                ConvReluBN((384, 1, 3), padding=(0, 1)),
            ], [
                ConvReluBN((384, 3, 1), padding=(1, 0)),
            ]],
        ], [
            Pooling((3, 3), stride=(1, 1), padding=1),
            ConvReluBN((192, 1, 1)),
        ]],
        layers.Concatenate(),
    )
示例#2
0
    def test_mixture_of_experts_problem_with_specific_network(self):
        with self.assertRaisesRegexp(ValueError, "specified as a list"):
            architectures.mixture_of_experts(*self.networks)

        with self.assertRaisesRegexp(ValueError, "has more than one input"):
            last_network = layers.join(
                layers.parallel(
                    layers.Input(1),
                    layers.Input(2),
                ),
                layers.Concatenate(),
            )
            architectures.mixture_of_experts(
                networks=self.networks + [last_network])

        with self.assertRaisesRegexp(ValueError, "has more than one output"):
            last_network = layers.join(
                layers.Input(1),
                layers.parallel(
                    layers.Softmax(1),
                    layers.Softmax(1),
                ),
            )
            architectures.mixture_of_experts(
                networks=self.networks + [last_network])

        error_message = (
            "Each network from the mixture of experts has to "
            "process only 2-dimensional inputs. Network #2.+"
            "Input layer's shape: \(\?, 1, 1, 1\)"
        )
        with self.assertRaisesRegexp(ValueError, error_message):
            last_network = layers.Input((1, 1, 1))
            architectures.mixture_of_experts(
                networks=self.networks + [last_network])
示例#3
0
    def test_networks_with_complex_parallel_relations(self):
        input_layer = layers.Input((5, 5, 3))
        network = layers.join(
            layers.parallel([
                layers.Convolution((1, 1, 8)),
            ], [
                layers.Convolution((1, 1, 4)),
                layers.parallel(
                    layers.Convolution((1, 3, 2), padding='same'),
                    layers.Convolution((3, 1, 2), padding='same'),
                ),
            ], [
                layers.Convolution((1, 1, 8)),
                layers.Convolution((3, 3, 4), padding='same'),
                layers.parallel(
                    layers.Convolution((1, 3, 2), padding='same'),
                    layers.Convolution((3, 1, 2), padding='same'),
                )
            ], [
                layers.MaxPooling((3, 3), padding='same', stride=(1, 1)),
                layers.Convolution((1, 1, 8)),
            ]),
            layers.Concatenate(),
        )
        self.assertShapesEqual(network.input_shape, [None, None, None, None])
        self.assertShapesEqual(network.output_shape, (None, None, None, None))

        # Connect them at the end, because we need to make
        # sure tha parallel networks defined without input shapes
        network = layers.join(input_layer, network)
        self.assertShapesEqual(network.output_shape, (None, 5, 5, 24))
示例#4
0
    def test_parallel_layer(self):
        input_layer = layers.Input((3, 8, 8))
        parallel_layer = layers.join(
            [[
                layers.Convolution((11, 5, 5)),
            ], [
                layers.Convolution((10, 3, 3)),
                layers.Convolution((5, 3, 3)),
            ]],
            layers.Concatenate(),
        )
        output_layer = layers.MaxPooling((2, 2))

        conn = layers.join(input_layer, parallel_layer)
        output_connection = layers.join(conn, output_layer)

        x = T.tensor4()
        y = theano.function([x], conn.output(x))

        x_tensor4 = asfloat(np.random.random((10, 3, 8, 8)))
        output = y(x_tensor4)
        self.assertEqual(output.shape, (10, 11 + 5, 4, 4))

        output_function = theano.function([x], output_connection.output(x))
        final_output = output_function(x_tensor4)
        self.assertEqual(final_output.shape, (10, 11 + 5, 2, 2))
示例#5
0
    def test_training_with_multiple_inputs(self):
        network = algorithms.GradientDescent(
            [
                [
                    layers.Input(2) > layers.Sigmoid(3),
                    layers.Input(3) > layers.Sigmoid(5),
                ],
                layers.Concatenate(),
                layers.Sigmoid(1),
            ],
            step=0.1,
            verbose=False,
            shuffle_data=True,
        )

        x_train, x_test, y_train, y_test = simple_classification(n_samples=100,
                                                                 n_features=5)

        x_train_2, x_train_3 = x_train[:, :2], x_train[:, 2:]
        x_test_2, x_test_3 = x_test[:, :2], x_test[:, 2:]

        network.train([x_train_2, x_train_3],
                      y_train, [x_test_2, x_test_3],
                      y_test,
                      epochs=100)

        error = network.validation_errors[-1]
        self.assertAlmostEqual(error, 0.14, places=2)
示例#6
0
    def test_graph_relations_in_format_of_layer_names(self):
        l1 = layers.Input(1, name='input')
        l2 = layers.Sigmoid(2, name='sigmoid-2')
        l3 = layers.Sigmoid(3, name='sigmoid-3')
        l4 = layers.Sigmoid(4, name='sigmoid-4')
        lc = layers.Concatenate(name='concat')

        graph = LayerGraph()

        graph.connect_layers(l1, l2)
        graph.connect_layers(l2, l3)
        graph.connect_layers(l3, l4)

        graph.connect_layers(l2, lc)
        graph.connect_layers(l3, lc)

        actual_graph = graph.layer_names_only()
        expected_graph = [
            ('input', ['sigmoid-2']),
            ('sigmoid-2', ['sigmoid-3', 'concat']),
            ('sigmoid-3', ['sigmoid-4', 'concat']),
            ('sigmoid-4', []),
            ('concat', []),
        ]

        self.assertListEqual(actual_graph, expected_graph)
示例#7
0
def Fire(s_1x1, e_1x1, e_3x3, name):
    return layers.join(
        layers.Convolution(
            (1, 1, s_1x1),
            padding='SAME',
            name=name + '/squeeze1x1'
        ),
        layers.Relu(),
        layers.parallel([
            layers.Convolution(
                (1, 1, e_1x1),
                padding='SAME',
                name=name + '/expand1x1'
            ),
            layers.Relu(),
        ], [
            layers.Convolution(
                (3, 3, e_3x3),
                padding='SAME',
                name=name + '/expand3x3'
            ),
            layers.Relu(),
        ]),
        layers.Concatenate(),
    )
示例#8
0
    def test_network_shape_multiple_inputs(self):
        in1 = layers.Input(10)
        in2 = layers.Input(20)
        conn = (in1 | in2) >> layers.Concatenate()

        self.assertShapesEqual(conn.input_shape, [(None, 10), (None, 20)])
        self.assertShapesEqual(conn.output_shape, (None, 30))
    def test_concatenate_init_error(self):
        input_layer_1 = layers.Input((3, 28, 28))
        input_layer_2 = layers.Input((1, 28, 28))
        concat_layer = layers.Concatenate(axis=2)

        layers.join(input_layer_1, concat_layer)
        with self.assertRaises(LayerConnectionError):
            layers.join(input_layer_2, concat_layer)
示例#10
0
    def test_inplace_parallel(self):
        network = layers.Input(10)
        network |= layers.Input(10)
        network >>= layers.Concatenate()

        self.assertEqual(len(network), 3)
        self.assertShapesEqual(network.input_shape, [(None, 10), (None, 10)])
        self.assertShapesEqual(network.output_shape, (None, 20))
示例#11
0
    def test_concatenate_init_error(self):
        input_layer_1 = layers.Input((28, 28, 3))
        input_layer_2 = layers.Input((28, 28, 1))
        concat_layer = layers.Concatenate(axis=2)

        layers.join(input_layer_1, concat_layer)
        with self.assertRaisesRegexp(LayerConnectionError, "match over"):
            layers.join(input_layer_2, concat_layer)
示例#12
0
    def test_concatenate_basic(self):
        concat_layer = layers.Concatenate(axis=-1)

        x1_tensor4 = asfloat(np.random.random((1, 3, 4, 2)))
        x2_tensor4 = asfloat(np.random.random((1, 3, 4, 8)))
        output = self.eval(concat_layer.output(x1_tensor4, x2_tensor4))

        self.assertEqual((1, 3, 4, 10), output.shape)
示例#13
0
 def test_parallel_with_joined_connections(self):
     # Should work without errors
     layers.join(
         [
             layers.Convolution((11, 5, 5)) > layers.Relu(),
             layers.Convolution((10, 3, 3)) > layers.Relu(),
         ],
         layers.Concatenate() > layers.Relu(),
     )
示例#14
0
    def test_concatenate_init_error(self):
        inputs = layers.parallel(
            layers.Input((28, 28, 3)),
            layers.Input((28, 28, 1)),
        )

        expected_message = "don't match over dimension #3"
        with self.assertRaisesRegexp(LayerConnectionError, expected_message):
            layers.join(inputs, layers.Concatenate(axis=2))
示例#15
0
    def test_concatenate_different_dim_number(self):
        inputs = layers.parallel(
            layers.Input((28, 28)),
            layers.Input((28, 28, 1)),
        )

        expected_msg = "different number of dimensions"
        with self.assertRaisesRegexp(LayerConnectionError, expected_msg):
            layers.join(inputs, layers.Concatenate(axis=1))
示例#16
0
    def test_single_input_for_parallel_layers(self):
        left = layers.Input(10, name='input') > layers.Sigmoid(5)
        right = left > layers.Sigmoid(2)
        network = [left, right] > layers.Concatenate()

        self.assertEqual(len(network.input_layers), 1)

        input_layer = network.input_layers[0]
        self.assertEqual(input_layer.name, 'input')
示例#17
0
    def test_concatenate_different_dim_number(self):
        input_layer_1 = layers.Input((28, 28))
        input_layer_2 = layers.Input((28, 28, 1))
        concat_layer = layers.Concatenate(axis=1)

        layers.join(input_layer_1, concat_layer)
        expected_msg = "different number of dimensions"
        with self.assertRaisesRegexp(LayerConnectionError, expected_msg):
            layers.join(input_layer_2, concat_layer)
示例#18
0
    def test_network_representation_for_non_feedforward(self):
        input_layer = layers.Input(10)
        hidden_layer_1 = layers.Sigmoid(20)
        hidden_layer_2 = layers.Sigmoid(20)
        output_layer = layers.Concatenate()

        connection = layers.join(input_layer, hidden_layer_1, output_layer)
        connection = layers.join(input_layer, hidden_layer_2, output_layer)

        network = algorithms.GradientDescent(connection)
        self.assertIn("[... 4 layers ...]", str(network))
示例#19
0
 def test_parallel_layer_with_residual_connections(self):
     connection = layers.join(
         layers.Input((3, 8, 8)),
         [[
             layers.Convolution((7, 1, 1)),
             layers.Relu()
         ], [
             # Residual connection
         ]],
         layers.Concatenate(),
     )
     self.assertEqual(connection.output_shape, (10, 8, 8))
    def test_concatenate_basic(self):
        concat_layer = layers.Concatenate(axis=1)

        x1 = T.tensor4()
        x2 = T.tensor4()
        y = theano.function([x1, x2], concat_layer.output(x1, x2))

        x1_tensor4 = asfloat(np.random.random((1, 2, 3, 4)))
        x2_tensor4 = asfloat(np.random.random((1, 8, 3, 4)))
        output = y(x1_tensor4, x2_tensor4)

        self.assertEqual((1, 10, 3, 4), output.shape)
示例#21
0
    def test_network_architecture_output_exception(self):
        input_layer = layers.Input(10)
        hidden_layer_1 = layers.Sigmoid(20)
        hidden_layer_2 = layers.Sigmoid(20)
        output_layer = layers.Concatenate()

        connection = layers.join(input_layer, hidden_layer_1, output_layer)
        connection = layers.join(input_layer, hidden_layer_2, output_layer)

        network = algorithms.GradientDescent(connection)
        with self.assertRaises(TypeError):
            network.architecture()
示例#22
0
    def test_is_sequential_partial_connection(self):
        connection_2 = layers.Input(10) > layers.Sigmoid(5)
        connection_31 = connection_2 > layers.Sigmoid(1)
        connection_32 = connection_2 > layers.Sigmoid(2)

        concatenate = layers.Concatenate()

        connection_4 = connection_31 > concatenate
        connection_4 = connection_32 > concatenate

        self.assertFalse(is_sequential(connection_4))
        self.assertTrue(is_sequential(connection_31))
        self.assertTrue(is_sequential(connection_32))
示例#23
0
 def test_saliency_map_invalid_n_inputs(self):
     new_network = layers.join(
         layers.parallel(
             layers.Input((28, 28, 3)),
             layers.Input((28, 28, 3)),
         ),
         layers.Concatenate(),
         self.network.start('conv'),
     )
     message = ("Cannot build saliency map for the network that "
                "has more than one input layer.")
     with self.assertRaisesRegexp(InvalidConnection, message):
         plots.saliency_map(new_network, self.image)
示例#24
0
    def test_residual_networks(self):
        network = layers.join(
            layers.Input((5, 5, 3)),
            layers.parallel(
                layers.Identity(),
                layers.join(
                    layers.Convolution((3, 3, 8), padding='same'),
                    layers.Relu(),
                ),
            ),
            layers.Concatenate(),
        )

        self.assertShapesEqual((None, 5, 5, 3), network.input_shape)
        self.assertShapesEqual((None, 5, 5, 11), network.output_shape)
示例#25
0
    def test_concat_with_late_inputs(self):
        network = layers.join(
            layers.parallel(
                layers.Relu(),
                layers.Relu(),
            ),
            layers.Concatenate(),
        )

        self.assertShapesEqual(network.input_shape, [None, None])
        self.assertShapesEqual(network.output_shape, None)

        network = layers.Input((10, 10, 3)) >> network

        self.assertShapesEqual(network.input_shape, (None, 10, 10, 3))
        self.assertShapesEqual(network.output_shape, (None, 10, 10, 6))
示例#26
0
    def test_multi_inputs_propagation(self):
        network = layers.join(
            layers.parallel(
                layers.Input(10, name='input-1'),
                layers.Input(4, name='input-2'),
            ),
            layers.Concatenate(),
        )
        x1 = asfloat(np.random.random((3, 10)))
        x2 = asfloat(np.random.random((3, 4)))

        out1 = self.eval(network.output(x1, x2))
        out2 = self.eval(network.output({'input-2': x2, 'input-1': x1}))

        self.assertEqual((3, 14), out1.shape)
        np.testing.assert_array_almost_equal(out1, out2)
示例#27
0
    def test_concatenate_conv_layers(self):
        network = layers.join(
            layers.Input((28, 28, 3)),
            layers.parallel(
                layers.Convolution((5, 5, 7)),
                layers.join(
                    layers.Convolution((3, 3, 1)),
                    layers.Convolution((3, 3, 4)),
                ),
            ), layers.Concatenate(axis=-1))

        self.assertShapesEqual((None, 24, 24, 11), network.output_shape)

        x_tensor4 = asfloat(np.random.random((5, 28, 28, 3)))
        actual_output = self.eval(network.output(x_tensor4))

        self.assertEqual((5, 24, 24, 11), actual_output.shape)
def Inception_1(conv_filters):
    return layers.join(
        [[
            ConvReluBN((conv_filters[0][0], 1, 1)),
        ], [
            ConvReluBN((conv_filters[1][0], 1, 1)),
            ConvReluBN((conv_filters[1][1], 5, 5), padding=2),
        ], [
            ConvReluBN((conv_filters[2][0], 1, 1)),
            ConvReluBN((conv_filters[2][1], 3, 3), padding=1),
            ConvReluBN((conv_filters[2][2], 3, 3), padding=1),
        ], [
            layers.AveragePooling((3, 3), stride=(1, 1), padding=1,
                                  mode='exclude_padding'),
            ConvReluBN((conv_filters[3][0], 1, 1)),
        ]],
        layers.Concatenate(),
    )
示例#29
0
def Fire(s_1x1, e_1x1, e_3x3, name):
    return layers.join(
        layers.Convolution((s_1x1, 1, 1),
                           padding='half',
                           name=name + '/squeeze1x1'),
        layers.Relu(),
        [[
            layers.Convolution(
                (e_1x1, 1, 1), padding='half', name=name + '/expand1x1'),
            layers.Relu(),
        ],
         [
             layers.Convolution(
                 (e_3x3, 3, 3), padding='half', name=name + '/expand3x3'),
             layers.Relu(),
         ]],
        layers.Concatenate(),
    )
示例#30
0
    def test_concatenate_conv_layers(self):
        input_layer = layers.Input((28, 28, 3))
        hidden_layer_1 = layers.Convolution((5, 5, 7))
        hidden_layer_21 = layers.Convolution((3, 3, 1))
        hidden_layer_22 = layers.Convolution((3, 3, 4))
        concat_layer = layers.Concatenate(axis=-1)

        connection = layers.join(input_layer, hidden_layer_1, concat_layer)
        connection = layers.join(input_layer, hidden_layer_21, hidden_layer_22,
                                 concat_layer)
        connection.initialize()

        self.assertEqual((24, 24, 11), concat_layer.output_shape)

        x_tensor4 = asfloat(np.random.random((5, 28, 28, 3)))
        actual_output = self.eval(connection.output(x_tensor4))

        self.assertEqual((5, 24, 24, 11), actual_output.shape)