示例#1
0
    def test_storage_load_dict_invalid_number_of_paramters(self):
        network = layers.join(
            layers.Input(3),
            layers.Relu(4, name='relu'),
            layers.Linear(5, name='linear') > layers.Relu(),
            layers.Softmax(6, name='softmax'),
        )
        data = {
            'metadata': {},  # avoided for simplicity
            'graph': {},  # avoided for simplicity
            # Input layer was avoided on purpose
            'layers': [{
                'name': 'name-1',
                'class_name': 'Relu',
                'configs': {},
                'parameters': {
                    'weight': {
                        'trainable': True,
                        'value': np.ones((3, 4))
                    },
                    'bias': {
                        'trainable': True,
                        'value': np.ones((4, ))
                    },
                }
            }]
        }

        with self.assertRaises(ParameterLoaderError):
            storage.load_dict(network, data, ignore_missing=False)
    def test_invalid_arguments_exceptions(self):
        network = layers.join(
            layers.Input((3, 28, 28)),
            layers.Convolution((8, 3, 3), name='conv') > layers.Relu(),
            layers.Reshape(),
            layers.Softmax(10),
        )
        image = np.ones((3, 28, 28))

        with self.assertRaisesRegexp(ValueError, 'Invalid image shape'):
            plots.saliency_map(network, np.ones((28, 28)))

        with self.assertRaisesRegexp(ValueError, 'invalid value'):
            plots.saliency_map(network, image, mode='invalid-mode')

        with self.assertRaises(InvalidConnection):
            new_network = network > [
                layers.Sigmoid(1), layers.Sigmoid(2)
            ]
            plots.saliency_map(new_network, image)

        with self.assertRaises(InvalidConnection):
            new_network = [
                layers.Input((3, 28, 28)), layers.Input((3, 28, 28))
            ] > network.start('conv')
            plots.saliency_map(new_network, image)

        with self.assertRaisesRegexp(InvalidConnection, 'invalid input shape'):
            plots.saliency_map(layers.Input(10) > layers.Relu(), image)
示例#3
0
    def test_shared_parameters_between_layers(self):
        hidden_layer_1 = layers.Relu(10)
        network = layers.Input(10) > hidden_layer_1

        hidden_layer_2 = layers.Relu(
            size=10,
            weight=hidden_layer_1.weight,
            bias=hidden_layer_1.bias)

        network = network > hidden_layer_2

        self.assertIs(hidden_layer_1.weight, hidden_layer_2.weight)
        self.assertIs(hidden_layer_1.bias, hidden_layer_2.bias)

        # Check that it is able to train network without errors
        x_train = y_train = asfloat(np.random.random((15, 10)))
        gdnet = algorithms.GradientDescent(network, batch_size='all')
        gdnet.train(x_train, y_train, epochs=5)

        np.testing.assert_array_almost_equal(
            self.eval(hidden_layer_1.weight),
            self.eval(hidden_layer_2.weight),
        )
        np.testing.assert_array_almost_equal(
            self.eval(hidden_layer_1.bias),
            self.eval(hidden_layer_2.bias),
        )
示例#4
0
    def test_change_input_layer(self):
        network = layers.join(
            layers.Input(10, name='input-1'),
            layers.Relu(5, name='relu-1'),
            layers.Relu(1, name='relu-2'),
        )
        network.create_variables()

        self.assertShapesEqual(network.input_shape, (None, 10))
        self.assertShapesEqual(network.output_shape, (None, 1))
        self.assertEqual(len(network), 3)

        relu_1_network = network.start('relu-1')
        self.assertShapesEqual(relu_1_network.input_shape, (None, 10))
        self.assertShapesEqual(relu_1_network.output_shape, (None, 1))
        self.assertEqual(len(relu_1_network), 2)
        self.assertDictEqual(
            relu_1_network.forward_graph, {
                network.layer('relu-1'): [network.layer('relu-2')],
                network.layer('relu-2'): [],
            })

        x_test = asfloat(np.ones((7, 10)))
        y_predicted = self.eval(relu_1_network.output(x_test))
        self.assertEqual(y_predicted.shape, (7, 1))
示例#5
0
    def test_select_network_branch(self):
        network = layers.join(
            layers.Input(10, name='input-1'),
            layers.parallel(
                layers.Relu(1, name='relu-1'),
                layers.Relu(2, name='relu-2'),
            ))

        self.assertShapesEqual(network.input_shape, (None, 10))
        self.assertShapesEqual(network.output_shape, [(None, 1), (None, 2)])
        self.assertEqual(len(network), 3)

        relu_1_network = network.end('relu-1')
        self.assertShapesEqual(relu_1_network.input_shape, (None, 10))
        self.assertShapesEqual(relu_1_network.output_shape, (None, 1))
        self.assertEqual(len(relu_1_network), 2)

        x_test = asfloat(np.ones((7, 10)))
        y_predicted = self.eval(relu_1_network.output(x_test))
        self.assertEqual(y_predicted.shape, (7, 1))

        relu_2_network = network.end('relu-2')
        self.assertShapesEqual(relu_2_network.input_shape, (None, 10))
        self.assertShapesEqual(relu_2_network.output_shape, (None, 2))
        self.assertEqual(len(relu_2_network), 2)
示例#6
0
    def test_cut_input_and_output_layers(self):
        network = layers.join(
            layers.Input(10, name='input-1'),
            layers.Relu(8, name='relu-0'),
            layers.Relu(5, name='relu-1'),
            layers.Relu(2, name='relu-2'),
            layers.Relu(1, name='relu-3'),
        )
        network.create_variables()

        self.assertShapesEqual(network.input_shape, (None, 10))
        self.assertShapesEqual(network.output_shape, (None, 1))
        self.assertEqual(len(network), 5)

        cutted_network = network.start('relu-1').end('relu-2')

        self.assertShapesEqual(cutted_network.input_shape, (None, 8))
        self.assertShapesEqual(cutted_network.output_shape, (None, 2))
        self.assertEqual(len(cutted_network), 2)
        self.assertDictEqual(
            cutted_network.forward_graph, {
                network.layer('relu-1'): [network.layer('relu-2')],
                network.layer('relu-2'): [],
            })

        x_test = asfloat(np.ones((7, 8)))
        y_predicted = self.eval(cutted_network.output(x_test))
        self.assertEqual(y_predicted.shape, (7, 2))
示例#7
0
    def test_subnetwork_in_conv_network(self):
        network = layers.join(
            layers.Input((28, 28, 1)),
            layers.Convolution((3, 3, 8)) >> layers.Relu(),
            layers.Convolution((3, 3, 8)) >> layers.Relu(),
            layers.MaxPooling((2, 2)),
            layers.Reshape(),
            layers.Softmax(1),
        )

        self.assertEqual(8, len(network))
        self.assertTrue(network.is_sequential())
        self.assertShapesEqual(network.input_shape, (None, 28, 28, 1))
        self.assertShapesEqual(network.output_shape, (None, 1))

        expected_order = [
            layers.Input,
            layers.Convolution,
            layers.Relu,
            layers.Convolution,
            layers.Relu,
            layers.MaxPooling,
            layers.Reshape,
            layers.Softmax,
        ]
        for actual_layer, expected_layer in zip(network, expected_order):
            self.assertIsInstance(actual_layer, expected_layer)
示例#8
0
    def test_transfer_learning_using_position(self):
        network_pretrained = layers.join(
            layers.Input(10),
            layers.Relu(5),
            layers.Relu(2, name='relu-2'),
            layers.Sigmoid(1),
        )
        network_new = layers.join(
            layers.Input(10),
            layers.Relu(5),
            layers.Relu(2),
        )
        pretrained_layers_stored = storage.save_dict(network_pretrained)

        with self.assertRaises(ParameterLoaderError):
            storage.load_dict(network_new,
                              pretrained_layers_stored,
                              load_by='names_or_order',
                              ignore_missed=False)

        storage.load_dict(network_new,
                          pretrained_layers_stored,
                          load_by='names_or_order',
                          ignore_missed=True)

        pretrained_predictor = network_pretrained.end('relu-2').compile()
        new_network_predictor = network_new.compile()

        random_input = asfloat(np.random.random((12, 10)))

        pretrained_output = pretrained_predictor(random_input)
        new_network_output = new_network_predictor(random_input)

        np.testing.assert_array_almost_equal(pretrained_output,
                                             new_network_output)
示例#9
0
    def test_transfer_learning_using_names(self):
        network_pretrained = layers.join(
            layers.Input(10),
            layers.Relu(5, name='relu-1'),
            layers.Relu(2, name='relu-2'),
            layers.Sigmoid(1),
        )
        network_new = layers.join(
            layers.Input(10),
            layers.Relu(5, name='relu-1'),
            layers.Relu(2, name='relu-2'),
            layers.Relu(8, name='relu-3'),  # new layer
        )
        pretrained_layers_stored = storage.save_dict(network_pretrained)

        storage.load_dict(
            network_new,
            pretrained_layers_stored,
            load_by='names',
            skip_validation=False,
            ignore_missing=True)

        random_input = asfloat(np.random.random((12, 10)))

        pretrained_output = self.eval(
            network_pretrained.end('relu-2').output(random_input))
        new_network_output = self.eval(
            network_new.end('relu-2').output(random_input))

        np.testing.assert_array_almost_equal(
            pretrained_output, new_network_output)

        pred = self.eval(network_new.output(random_input))
        self.assertEqual(pred.shape, (12, 8))
示例#10
0
    def test_parallel_many_to_many_connection(self):
        relu_layer_1 = layers.Relu(1)
        sigmoid_layer_1 = layers.Sigmoid(1)

        relu_layer_2 = layers.Relu(2)
        sigmoid_layer_2 = layers.Sigmoid(2)

        connection = layers.join(
            [
                sigmoid_layer_1,
                relu_layer_1,
            ], [
                sigmoid_layer_2,
                relu_layer_2,
            ],
        )

        self.assertEqual(connection.input_shape, [None, None])
        self.assertEqual(connection.output_shape, [(2,), (2,)])

        graph = connection.graph

        for layer in [relu_layer_1, sigmoid_layer_1]:
            n_forward_connections = len(graph.forward_graph[layer])
            n_backward_connections = len(graph.backward_graph[layer])

            self.assertEqual(n_forward_connections, 2)
            self.assertEqual(n_backward_connections, 0)

        for layer in [relu_layer_2, sigmoid_layer_2]:
            n_forward_connections = len(graph.forward_graph[layer])
            n_backward_connections = len(graph.backward_graph[layer])

            self.assertEqual(n_forward_connections, 0)
            self.assertEqual(n_backward_connections, 2)
示例#11
0
    def test_json_storage(self):
        connection_1 = layers.join(
            layers.Input(10),
            [
                layers.Sigmoid(5),
                layers.Relu(5),
            ],
            layers.Elementwise(),
        )
        predict_1 = connection_1.compile()

        connection_2 = layers.join(
            layers.Input(10),
            [
                layers.Sigmoid(5),
                layers.Relu(5),
            ],
            layers.Elementwise(),
        )
        predict_2 = connection_2.compile()

        random_input = asfloat(np.random.random((13, 10)))
        random_output_1 = predict_1(random_input)
        random_output_2_1 = predict_2(random_input)

        # Outputs has to be different
        self.assertFalse(np.any(random_output_1 == random_output_2_1))

        with tempfile.NamedTemporaryFile() as temp:
            storage.save_json(connection_1, temp.name)
            storage.load_json(connection_2, temp.name)
            random_output_2_2 = predict_2(random_input)

            np.testing.assert_array_almost_equal(random_output_1,
                                                 random_output_2_2)
示例#12
0
    def test_simple_storage_hdf5(self):
        network_1 = layers.join(
            layers.Input(10),
            layers.parallel(
                layers.Sigmoid(5),
                layers.Relu(5),
            ),
            layers.Elementwise(),
        )
        network_2 = layers.join(
            layers.Input(10),
            layers.parallel(
                layers.Sigmoid(5),
                layers.Relu(5),
            ),
            layers.Elementwise(),
        )

        random_input = asfloat(np.random.random((13, 10)))
        random_output_1 = self.eval(network_1.output(random_input))
        random_output_2_1 = self.eval(network_2.output(random_input))

        # Outputs has to be different
        self.assertFalse(np.any(random_output_1 == random_output_2_1))

        with tempfile.NamedTemporaryFile() as temp:
            storage.save_hdf5(network_1, temp.name)
            storage.load_hdf5(network_2, temp.name)

            random_output_2_2 = self.eval(network_2.output(random_input))

            np.testing.assert_array_almost_equal(random_output_1,
                                                 random_output_2_2)
示例#13
0
    def test_select_network_branch(self):
        network = layers.join(layers.Input(10, name='input-1'), [[
            layers.Relu(1, name='relu-1'),
        ], [
            layers.Relu(2, name='relu-2'),
        ]])

        self.assertEqual(network.input_shape, (10, ))
        self.assertEqual(network.output_shape, [(1, ), (2, )])
        self.assertEqual(len(network), 3)

        relu_1_network = network.end('relu-1')
        self.assertEqual(relu_1_network.input_shape, (10, ))
        self.assertEqual(relu_1_network.output_shape, (1, ))
        self.assertEqual(len(relu_1_network), 2)

        predict = relu_1_network.compile()
        x_test = asfloat(np.ones((7, 10)))
        y_predicted = predict(x_test)
        self.assertEqual(y_predicted.shape, (7, 1))

        relu_2_network = network.end('relu-2')
        self.assertEqual(relu_2_network.input_shape, (10, ))
        self.assertEqual(relu_2_network.output_shape, (2, ))
        self.assertEqual(len(relu_2_network), 2)
示例#14
0
    def test_transfer_learning_using_names(self):
        network_pretrained = layers.join(
            layers.Input(10),
            layers.Relu(5, name='relu-1'),
            layers.Relu(2, name='relu-2'),
            layers.Sigmoid(1),
        )
        network_new = layers.join(
            layers.Input(10),
            layers.Relu(5, name='relu-1'),
            layers.Relu(2, name='relu-2'),
            layers.Relu(8, name='relu-3'),  # new layer
        )
        pretrained_layers_stored = storage.save_dict(network_pretrained)

        storage.load_dict(network_new,
                          pretrained_layers_stored,
                          load_by='names',
                          ignore_missed=True)

        pretrained_predictor = network_pretrained.end('relu-2').compile()
        new_network_predictor = network_new.end('relu-2').compile()

        random_input = asfloat(np.random.random((12, 10)))

        pretrained_output = pretrained_predictor(random_input)
        new_network_output = new_network_predictor(random_input)

        np.testing.assert_array_almost_equal(pretrained_output,
                                             new_network_output)

        new_full_network_predictor = network_new.compile()
        pred = new_full_network_predictor(random_input)
        self.assertEqual(pred.shape, (12, 8))
示例#15
0
def Fire(s_1x1, e_1x1, e_3x3, name):
    return layers.join(
        layers.Convolution(
            (1, 1, s_1x1),
            padding='SAME',
            name=name + '/squeeze1x1'
        ),
        layers.Relu(),
        layers.parallel([
            layers.Convolution(
                (1, 1, e_1x1),
                padding='SAME',
                name=name + '/expand1x1'
            ),
            layers.Relu(),
        ], [
            layers.Convolution(
                (3, 3, e_3x3),
                padding='SAME',
                name=name + '/expand3x3'
            ),
            layers.Relu(),
        ]),
        layers.Concatenate(),
    )
示例#16
0
def ANN(X_train, X_test, y_train, y_test, X_dummy):
    environment.reproducible()
    target_scaler = OneHotEncoder()
    net = algorithms.Momentum(
        [
            layers.Input(17),
            layers.Relu(100),
            layers.Relu(70),
            layers.Softmax(32),
        ],
        error='categorical_crossentropy',
        step=0.01,
        verbose=True,
        shuffle_data=True,
        momentum=0.99,
        nesterov=True,
    )
    # converting vector to one hot encoding
    d1 = int(y_train.shape[0])
    d2 = int(y_test.shape[0])
    Y_train = np.zeros((d1, 32))
    Y_test = np.zeros((d2, 32))
    Y_train[np.arange(d1), y_train] = 1
    Y_test[np.arange(d2), y_test] = 1

    net.architecture()
    net.train(X_train, Y_train, X_test, Y_test, epochs=20)
    y_predicted = net.predict(X_test).argmax(axis=1)
    y_dummy = net.predict(X_dummy).argmax(axis=1)
    #print 'predicted values'
    #print y_predicted
    Y_test = np.asarray(Y_test.argmax(axis=1)).reshape(len(Y_test))
    #print(metrics.classification_report(Y_test, y_predicted))
    return y_dummy, y_predicted, metrics.accuracy_score(Y_test, y_predicted)
示例#17
0
    def test_connect_cutted_layers_to_other_layers(self):
        network = layers.join(
            layers.Input(10, name='input-1'),
            layers.Relu(8, name='relu-0'),
            layers.Relu(5, name='relu-1'),
            layers.Relu(2, name='relu-2'),
            layers.Relu(1, name='relu-3'),
        )

        self.assertEqual(network.input_shape, (10, ))
        self.assertEqual(network.output_shape, (1, ))
        self.assertEqual(len(network), 5)

        cutted_network = network.start('relu-1').end('relu-2')
        self.assertEqual(cutted_network.input_shape, (8, ))
        self.assertEqual(cutted_network.output_shape, (2, ))
        self.assertEqual(len(cutted_network), 2)

        new_network = layers.join(
            layers.Input(8),
            cutted_network,
            layers.Sigmoid(11),
        )
        self.assertEqual(new_network.input_shape, (8, ))
        self.assertEqual(new_network.output_shape, (11, ))
        self.assertEqual(len(new_network), 4)

        x_test = asfloat(np.ones((7, 10)))
        y_predicted = self.eval(network.output(x_test))
        self.assertEqual(y_predicted.shape, (7, 1))

        x_test = asfloat(np.ones((7, 8)))
        y_predicted = self.eval(new_network.output(x_test))
        self.assertEqual(y_predicted.shape, (7, 11))
示例#18
0
    def test_elementwise_in_connections(self):
        input_layer = layers.Input(2)
        hidden_layer_1 = layers.Relu(1,
                                     weight=init.Constant(1),
                                     bias=init.Constant(0))
        hidden_layer_2 = layers.Relu(1,
                                     weight=init.Constant(2),
                                     bias=init.Constant(0))
        elem_layer = layers.Elementwise(merge_function=tf.add)

        connection = layers.join(input_layer, hidden_layer_1, elem_layer)
        connection = layers.join(input_layer, hidden_layer_2, elem_layer)
        connection.initialize()

        self.assertEqual(elem_layer.output_shape, (1, ))

        test_input = asfloat(np.array([
            [0, 1],
            [-1, -1],
        ]))
        actual_output = self.eval(connection.output(test_input))
        expected_output = np.array([
            [3],
            [0],
        ])
        np.testing.assert_array_almost_equal(expected_output, actual_output)
示例#19
0
    def test_connection_output(self):
        input_value = asfloat(np.random.random((10, 2)))

        connection = layers.Input(2) > layers.Relu(10) > layers.Relu(1)
        output_value = self.eval(connection.output(input_value))

        self.assertEqual(output_value.shape, (10, 1))
示例#20
0
    def test_connection_wrong_number_of_input_values(self):
        input_value_1 = asfloat(np.random.random((10, 2)))
        input_value_2 = asfloat(np.random.random((10, 2)))

        connection = layers.Input(2) > layers.Relu(10) > layers.Relu(1)

        with self.assertRaisesRegexp(ValueError, "but 2 inputs was provided"):
            connection.output(input_value_1, input_value_2)
示例#21
0
 def test_unknown_layer_name_exception(self):
     network = layers.join(
         layers.Input(10, name='input-1'),
         layers.Relu(5, name='relu-1'),
         layers.Relu(1, name='relu-2'),
     )
     with self.assertRaises(NameError):
         network.end('abc')
示例#22
0
    def test_inplace_seq_operator(self):
        network = layers.Input(1)
        network >>= layers.Relu(2)
        network >>= layers.Relu(3)

        self.assertEqual(len(network), 3)
        self.assertShapesEqual(network.input_shape, (None, 1))
        self.assertShapesEqual(network.output_shape, (None, 3))
示例#23
0
 def test_gated_average_layer_output_shape(self):
     network = layers.join(
         layers.parallel(
             layers.Input(10) >> layers.Softmax(2),
             layers.Input(20) >> layers.Relu(8),
             layers.Input(20) >> layers.Relu(8),
         ), layers.GatedAverage())
     self.assertShapesEqual(network.output_shape, (None, 8))
示例#24
0
    def test_left_shift_inplace_inline_operator(self):
        network = layers.Relu(3)
        network <<= layers.Relu(2)
        network <<= layers.Input(1)

        expected_shapes = [1, 2, 3]
        for layer, expected_shape in zip(network, expected_shapes):
            self.assertEqual(layer.output_shape[0], expected_shape)
示例#25
0
    def test_relu_activation(self):
        layer = layers.Relu()
        self.assertEqual(0, self.eval(layer.activation_function(-10)))
        self.assertEqual(0, self.eval(layer.activation_function(0)))
        self.assertEqual(10, self.eval(layer.activation_function(10)))

        layer = layers.Relu(alpha=0.1)
        self.assertAlmostEqual(-1, self.eval(layer.activation_function(-10)))
        self.assertAlmostEqual(-0.2, self.eval(layer.activation_function(-2)))
示例#26
0
 def test_mixture_of_experts_non_network_inputs(self):
     error_message = (
         "Invalid input, Mixture of experts expects networks/layers"
     )
     with self.assertRaisesRegexp(TypeError, error_message):
         architectures.mixture_of_experts([
             layers.Input(5) >> layers.Relu(10),
             [layers.Input(5), layers.Relu(10)]
         ])
示例#27
0
 def test_parallel_with_joined_connections(self):
     # Should work without errors
     layers.join(
         [
             layers.Convolution((11, 5, 5)) > layers.Relu(),
             layers.Convolution((10, 3, 3)) > layers.Relu(),
         ],
         layers.Concatenate() > layers.Relu(),
     )
示例#28
0
    def test_storage_load_dict_using_wrong_names(self):
        connection = layers.join(
            layers.Input(3),
            layers.Relu(4, name='relu'),
            layers.Linear(5, name='linear') > layers.Relu(),
            layers.Softmax(6, name='softmax'),
        )

        storage.load_dict(connection, {
            'metadata': {},  # avoided for simplicity
            'graph': {},  # avoided for simplicity
            # Input layer was avoided on purpose
            'layers': [{
                'name': 'name-1',
                'class_name': 'Relu',
                'input_shape': (3,),
                'output_shape': (4,),
                'configs': {},
                'parameters': {
                    'weight': {'trainable': True, 'value': np.ones((3, 4))},
                    'bias': {'trainable': True, 'value': np.ones((4,))},
                }
            }, {
                'name': 'name-2',
                'class_name': 'Relu',
                'input_shape': (4,),
                'output_shape': (5,),
                'configs': {},
                'parameters': {
                    'weight': {'trainable': True, 'value': np.ones((4, 5))},
                    'bias': {'trainable': True, 'value': np.ones((5,))},
                }
            }, {
                'name': 'name-3',
                'class_name': 'Softmax',
                'input_shape': (5,),
                'output_shape': (6,),
                'configs': {},
                'parameters': {
                    'weight': {'trainable': True, 'value': np.ones((5, 6))},
                    'bias': {'trainable': True, 'value': np.ones((6,))},
                }
            }]
        }, load_by='order', skip_validation=False)

        relu = connection.layer('relu')
        self.assertEqual(12, np.sum(self.eval(relu.weight)))
        self.assertEqual(4, np.sum(self.eval(relu.bias)))

        linear = connection.layer('linear')
        self.assertEqual(20, np.sum(self.eval(linear.weight)))
        self.assertEqual(5, np.sum(self.eval(linear.bias)))

        softmax = connection.layer('softmax')
        self.assertEqual(30, np.sum(self.eval(softmax.weight)))
        self.assertEqual(6, np.sum(self.eval(softmax.bias)))
示例#29
0
 def test_storage_invalid_input_type(self):
     network = [
         layers.Input(10),
         layers.Relu(5),
         layers.Relu(2),
     ]
     message = ("Invalid input type. Input should be "
                "network or optimizer with network")
     with self.assertRaisesRegexp(TypeError, message):
         storage.save_dict(network)
示例#30
0
    def test_gated_average_layer_non_default_index(self):
        gated_avg_layer = layers.GatedAverage(gating_layer_index=1)
        layers.join([
            layers.Input(20) > layers.Relu(8),
            layers.Input(10) > layers.Softmax(2),
            layers.Input(20) > layers.Relu(8),
        ], gated_avg_layer)

        self.assertEqual(gated_avg_layer.output_shape, (8, ))
        self.assertEqual(gated_avg_layer.input_shape, [(8, ), (2, ), (8, )])