def test_networks_with_complex_parallel_relations(self): input_layer = layers.Input((5, 5, 3)) network = layers.join( layers.parallel([ layers.Convolution((1, 1, 8)), ], [ layers.Convolution((1, 1, 4)), layers.parallel( layers.Convolution((1, 3, 2), padding='same'), layers.Convolution((3, 1, 2), padding='same'), ), ], [ layers.Convolution((1, 1, 8)), layers.Convolution((3, 3, 4), padding='same'), layers.parallel( layers.Convolution((1, 3, 2), padding='same'), layers.Convolution((3, 1, 2), padding='same'), ) ], [ layers.MaxPooling((3, 3), padding='same', stride=(1, 1)), layers.Convolution((1, 1, 8)), ]), layers.Concatenate(), ) self.assertShapesEqual(network.input_shape, [None, None, None, None]) self.assertShapesEqual(network.output_shape, (None, None, None, None)) # Connect them at the end, because we need to make # sure tha parallel networks defined without input shapes network = layers.join(input_layer, network) self.assertShapesEqual(network.output_shape, (None, 5, 5, 24))
def test_simple_storage_hdf5(self): network_1 = layers.join( layers.Input(10), layers.parallel( layers.Sigmoid(5), layers.Relu(5), ), layers.Elementwise(), ) network_2 = layers.join( layers.Input(10), layers.parallel( layers.Sigmoid(5), layers.Relu(5), ), layers.Elementwise(), ) random_input = asfloat(np.random.random((13, 10))) random_output_1 = self.eval(network_1.output(random_input)) random_output_2_1 = self.eval(network_2.output(random_input)) # Outputs has to be different self.assertFalse(np.any(random_output_1 == random_output_2_1)) with tempfile.NamedTemporaryFile() as temp: storage.save_hdf5(network_1, temp.name) storage.load_hdf5(network_2, temp.name) random_output_2_2 = self.eval(network_2.output(random_input)) np.testing.assert_array_almost_equal(random_output_1, random_output_2_2)
def test_gated_average_layer_exceptions(self): networks = layers.parallel( layers.Input((10, 3, 3)), layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(8), ) error_message = "should be 2-dimensional" with self.assertRaisesRegexp(LayerConnectionError, error_message): layers.join(networks, layers.GatedAverage()) networks = layers.parallel( layers.Input(10) >> layers.Softmax(3), layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(8), ) error_message = "only 3 networks, got 2 networks" with self.assertRaisesRegexp(LayerConnectionError, error_message): layers.join(networks, layers.GatedAverage()) networks = layers.parallel( layers.Input(10) >> layers.Softmax(2), layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(10), ) error_message = "expect to have the same shapes" with self.assertRaisesRegexp(LayerConnectionError, error_message): layers.join(networks, layers.GatedAverage())
def test_mixture_of_experts_problem_with_specific_network(self): with self.assertRaisesRegexp(ValueError, "specified as a list"): architectures.mixture_of_experts(*self.networks) with self.assertRaisesRegexp(ValueError, "has more than one input"): last_network = layers.join( layers.parallel( layers.Input(1), layers.Input(2), ), layers.Concatenate(), ) architectures.mixture_of_experts( networks=self.networks + [last_network]) with self.assertRaisesRegexp(ValueError, "has more than one output"): last_network = layers.join( layers.Input(1), layers.parallel( layers.Softmax(1), layers.Softmax(1), ), ) architectures.mixture_of_experts( networks=self.networks + [last_network]) error_message = ( "Each network from the mixture of experts has to " "process only 2-dimensional inputs. Network #2.+" "Input layer's shape: \(\?, 1, 1, 1\)" ) with self.assertRaisesRegexp(ValueError, error_message): last_network = layers.Input((1, 1, 1)) architectures.mixture_of_experts( networks=self.networks + [last_network])
def test_gated_average_layer_negative_index(self): network = layers.join( layers.parallel( layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(8), layers.Input(10) >> layers.Softmax(2), ), layers.GatedAverage(gate_index=-1, name='gate')) self.assertShapesEqual(network.output_shape, (None, 8)) network = layers.join( layers.parallel( layers.Input(10) >> layers.Softmax(2), layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(8), ), layers.GatedAverage(gate_index=-3, name='gate')) self.assertShapesEqual(network.output_shape, (None, 8))
def test_select_network_branch(self): network = layers.join( layers.Input(10, name='input-1'), layers.parallel( layers.Relu(1, name='relu-1'), layers.Relu(2, name='relu-2'), )) self.assertShapesEqual(network.input_shape, (None, 10)) self.assertShapesEqual(network.output_shape, [(None, 1), (None, 2)]) self.assertEqual(len(network), 3) relu_1_network = network.end('relu-1') self.assertShapesEqual(relu_1_network.input_shape, (None, 10)) self.assertShapesEqual(relu_1_network.output_shape, (None, 1)) self.assertEqual(len(relu_1_network), 2) x_test = asfloat(np.ones((7, 10))) y_predicted = self.eval(relu_1_network.output(x_test)) self.assertEqual(y_predicted.shape, (7, 1)) relu_2_network = network.end('relu-2') self.assertShapesEqual(relu_2_network.input_shape, (None, 10)) self.assertShapesEqual(relu_2_network.output_shape, (None, 2)) self.assertEqual(len(relu_2_network), 2)
def Fire(s_1x1, e_1x1, e_3x3, name): return layers.join( layers.Convolution( (1, 1, s_1x1), padding='SAME', name=name + '/squeeze1x1' ), layers.Relu(), layers.parallel([ layers.Convolution( (1, 1, e_1x1), padding='SAME', name=name + '/expand1x1' ), layers.Relu(), ], [ layers.Convolution( (3, 3, e_3x3), padding='SAME', name=name + '/expand3x3' ), layers.Relu(), ]), layers.Concatenate(), )
def test_gated_average_layer_output_shape(self): network = layers.join( layers.parallel( layers.Input(10) >> layers.Softmax(2), layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(8), ), layers.GatedAverage()) self.assertShapesEqual(network.output_shape, (None, 8))
def test_concatenate_different_dim_number(self): inputs = layers.parallel( layers.Input((28, 28)), layers.Input((28, 28, 1)), ) expected_msg = "different number of dimensions" with self.assertRaisesRegexp(LayerConnectionError, expected_msg): layers.join(inputs, layers.Concatenate(axis=1))
def test_concatenate_init_error(self): inputs = layers.parallel( layers.Input((28, 28, 3)), layers.Input((28, 28, 1)), ) expected_message = "don't match over dimension #3" with self.assertRaisesRegexp(LayerConnectionError, expected_message): layers.join(inputs, layers.Concatenate(axis=2))
def test_fail_many_to_many_connection(self): network_a = layers.join( layers.Input(10), layers.parallel( layers.Relu(5), layers.Relu(4), ), ) network_b = layers.join( layers.parallel( layers.Relu(5), layers.Relu(4), ), layers.Concatenate(), ) error_message = "Cannot make many to many connection between graphs" with self.assertRaisesRegexp(LayerConnectionError, error_message): layers.join(network_a, network_b)
def test_raise_exception_for_multioutputs(self): network = layers.join( layers.Input(5), layers.parallel( layers.Relu(1), layers.Relu(2), ) ) error_message = "should have one output layer" with self.assertRaisesRegexp(InvalidConnection, error_message): algorithms.GradientDescent(network)
def test_gated_average_layer_exceptions_index_position(self): networks = layers.parallel( layers.Input(10) >> layers.Softmax(2), layers.Input(20) >> layers.Relu(8), layers.Input(20) >> layers.Relu(8), ) with self.assertRaisesRegexp(LayerConnectionError, "Invalid index"): layers.join(networks, layers.GatedAverage(gate_index=3)) with self.assertRaisesRegexp(LayerConnectionError, "Invalid index"): layers.join(networks, layers.GatedAverage(gate_index=-4))
def test_saliency_map_invalid_n_outputs(self): new_network = layers.join( self.network, layers.parallel( layers.Sigmoid(1), layers.Sigmoid(2), )) message = ("Cannot build saliency map for the network that " "has more than one output layer.") with self.assertRaisesRegexp(InvalidConnection, message): plots.saliency_map(new_network, self.image)
def test_failed_propagation_for_multiple_inputs(self): inputs = layers.parallel( layers.Input(1), layers.Input(2), ) if six.PY3: expected_message = "2 positional arguments but 3 were given." else: expected_message = ( "get_output_shape\(\) takes exactly 2 arguments \(3 given\)") with self.assertRaisesRegexp(TypeError, expected_message): layers.join(inputs, layers.Relu(3, name='relu'))
def test_saliency_map_invalid_n_inputs(self): new_network = layers.join( layers.parallel( layers.Input((28, 28, 3)), layers.Input((28, 28, 3)), ), layers.Concatenate(), self.network.start('conv'), ) message = ("Cannot build saliency map for the network that " "has more than one input layer.") with self.assertRaisesRegexp(InvalidConnection, message): plots.saliency_map(new_network, self.image)
def test_multi_outputs_propagation(self): network = layers.join( layers.Input(4), layers.parallel( layers.Linear(2), layers.Linear(3), layers.Linear(4), )) x = asfloat(np.random.random((7, 4))) out1, out2, out3 = self.eval(network.output(x)) self.assertEqual((7, 2), out1.shape) self.assertEqual((7, 3), out2.shape) self.assertEqual((7, 4), out3.shape)
def test_check_if_network_sequential(self): network = layers.join( layers.Input(10), layers.Relu(5), layers.Relu(3), ) self.assertTrue(network.is_sequential()) network = layers.join( layers.Input(10), layers.parallel( layers.Relu(5), layers.Relu(3), ), layers.Concatenate(), ) self.assertFalse(network.is_sequential()) network = layers.parallel( layers.Relu(5), layers.Relu(3), ) self.assertFalse(network.is_sequential())
def test_gated_average_layer_output(self): network = layers.join( layers.Input(10), layers.parallel( layers.Softmax(2), layers.Relu(8), layers.Relu(8), ), layers.GatedAverage(), ) random_input = asfloat(np.random.random((20, 10))) actual_output = self.eval(network.output(random_input)) self.assertShapesEqual(actual_output.shape, (20, 8))
def test_residual_networks(self): network = layers.join( layers.Input((5, 5, 3)), layers.parallel( layers.Identity(), layers.join( layers.Convolution((3, 3, 8), padding='same'), layers.Relu(), ), ), layers.Concatenate(), ) self.assertShapesEqual((None, 5, 5, 3), network.input_shape) self.assertShapesEqual((None, 5, 5, 11), network.output_shape)
def test_one_to_many_parallel_connection_output(self): input_connection = layers.Input(4) parallel_connections = layers.parallel( layers.Linear(11), layers.Linear(12), layers.Linear(13), ) layers.join(input_connection, parallel_connections) input_value = asfloat(np.random.random((10, 4))) actual_output = self.eval(parallel_connections.output(input_value)) self.assertEqual(actual_output[0].shape, (10, 11)) self.assertEqual(actual_output[1].shape, (10, 12)) self.assertEqual(actual_output[2].shape, (10, 13))
def test_multi_inputs_propagation(self): network = layers.join( layers.parallel( layers.Input(10, name='input-1'), layers.Input(4, name='input-2'), ), layers.Concatenate(), ) x1 = asfloat(np.random.random((3, 10))) x2 = asfloat(np.random.random((3, 4))) out1 = self.eval(network.output(x1, x2)) out2 = self.eval(network.output({'input-2': x2, 'input-1': x1})) self.assertEqual((3, 14), out1.shape) np.testing.assert_array_almost_equal(out1, out2)
def test_graph_length(self): network = layers.join( layers.Input(10), layers.Relu(3), ) self.assertEqual(2, len(network)) network_2 = layers.join( network, layers.parallel( layers.Relu(1), layers.Relu(2), ), ) self.assertEqual(2, len(network)) self.assertEqual(4, len(network_2))
def test_one_to_many_parallel_network_output(self): one_to_many = layers.join( layers.Input(4), layers.parallel( layers.Linear(11), layers.Linear(12), layers.Linear(13), ), ) input_value = asfloat(np.random.random((10, 4))) actual_output = self.eval(one_to_many.output(input_value)) self.assertEqual(actual_output[0].shape, (10, 11)) self.assertEqual(actual_output[1].shape, (10, 12)) self.assertEqual(actual_output[2].shape, (10, 13))
def test_concat_with_late_inputs(self): network = layers.join( layers.parallel( layers.Relu(), layers.Relu(), ), layers.Concatenate(), ) self.assertShapesEqual(network.input_shape, [None, None]) self.assertShapesEqual(network.output_shape, None) network = layers.Input((10, 10, 3)) >> network self.assertShapesEqual(network.input_shape, (None, 10, 10, 3)) self.assertShapesEqual(network.output_shape, (None, 10, 10, 6))
def test_elementwise_in_network(self): network = layers.join( layers.Input(2), layers.parallel( layers.Relu(1, weight=1, bias=0), layers.Relu(1, weight=2, bias=0), ), layers.Elementwise('add'), ) self.assertShapesEqual(network.input_shape, (None, 2)) self.assertShapesEqual(network.output_shape, (None, 1)) test_input = asfloat(np.array([[0, 1], [-1, -1]])) actual_output = self.eval(network.output(test_input)) expected_output = np.array([[3, 0]]).T np.testing.assert_array_almost_equal(expected_output, actual_output)
def test_many_to_many_parallel_connection_output(self): connection = layers.parallel( layers.Input(1) > layers.Linear(11), layers.Input(2) > layers.Linear(12), layers.Input(3) > layers.Linear(13), ) input_value_1 = asfloat(np.random.random((10, 1))) input_value_2 = asfloat(np.random.random((20, 2))) input_value_3 = asfloat(np.random.random((30, 3))) actual_output = self.eval( connection.output(input_value_1, input_value_2, input_value_3)) self.assertEqual(actual_output[0].shape, (10, 11)) self.assertEqual(actual_output[1].shape, (20, 12)) self.assertEqual(actual_output[2].shape, (30, 13))
def test_concatenate_conv_layers(self): network = layers.join( layers.Input((28, 28, 3)), layers.parallel( layers.Convolution((5, 5, 7)), layers.join( layers.Convolution((3, 3, 1)), layers.Convolution((3, 3, 4)), ), ), layers.Concatenate(axis=-1)) self.assertShapesEqual((None, 24, 24, 11), network.output_shape) x_tensor4 = asfloat(np.random.random((5, 28, 28, 3))) actual_output = self.eval(network.output(x_tensor4)) self.assertEqual((5, 24, 24, 11), actual_output.shape)
def test_gated_average_layer_multi_dimensional_inputs(self): network = layers.join( layers.Input((5, 5, 1)), layers.parallel( layers.Reshape() >> layers.Softmax(2), layers.Convolution((2, 2, 3)), layers.Convolution((2, 2, 3)), ), layers.GatedAverage(), ) self.assertShapesEqual(network.input_shape, (None, 5, 5, 1)) self.assertShapesEqual(network.output_shape, (None, 4, 4, 3)) random_input = asfloat(np.random.random((8, 5, 5, 1))) actual_output = self.eval(network.output(random_input)) self.assertEqual(actual_output.shape, (8, 4, 4, 3))
def test_elementwise_custom_function(self): def weighted_sum(a, b): return 0.2 * a + 0.8 * b network = layers.join( layers.Input(2), layers.parallel( layers.Relu(1, weight=1, bias=0), layers.Relu(1, weight=2, bias=0), ), layers.Elementwise(weighted_sum), ) self.assertShapesEqual(network.input_shape, (None, 2)) self.assertShapesEqual(network.output_shape, (None, 1)) test_input = asfloat(np.array([[0, 1], [-1, -1]])) actual_output = self.eval(network.output(test_input)) expected_output = np.array([[1.8, 0]]).T np.testing.assert_array_almost_equal(expected_output, actual_output)