def test_elementwise_in_connections(self): input_layer = layers.Input(2) hidden_layer_1 = layers.Relu(1, weight=init.Constant(1), bias=init.Constant(0)) hidden_layer_2 = layers.Relu(1, weight=init.Constant(2), bias=init.Constant(0)) elem_layer = layers.Elementwise(merge_function=T.add) connection = layers.join(input_layer, hidden_layer_1, elem_layer) connection = layers.join(input_layer, hidden_layer_2, elem_layer) connection.initialize() self.assertEqual(elem_layer.output_shape, (1,)) x = T.matrix() y = theano.function([x], connection.output(x)) test_input = asfloat(np.array([ [0, 1], [-1, -1], ])) actual_output = y(test_input) expected_output = np.array([ [3], [0], ]) np.testing.assert_array_almost_equal(expected_output, actual_output)
def test_parallel_layer(self): input_layer = layers.Input((3, 8, 8)) parallel_layer = layers.join( [[ layers.Convolution((11, 5, 5)), ], [ layers.Convolution((10, 3, 3)), layers.Convolution((5, 3, 3)), ]], layers.Concatenate(), ) output_layer = layers.MaxPooling((2, 2)) conn = layers.join(input_layer, parallel_layer) output_connection = layers.join(conn, output_layer) x = T.tensor4() y = theano.function([x], conn.output(x)) x_tensor4 = asfloat(np.random.random((10, 3, 8, 8))) output = y(x_tensor4) self.assertEqual(output.shape, (10, 11 + 5, 4, 4)) output_function = theano.function([x], output_connection.output(x)) final_output = output_function(x_tensor4) self.assertEqual(final_output.shape, (10, 11 + 5, 2, 2))
def ResidualUnit(n_in_filters, n_out_filters, stride, has_branch=False): main_branch = layers.join( layers.Convolution((n_in_filters, 1, 1), stride=stride, bias=None), layers.BatchNorm(), layers.Relu(), layers.Convolution((n_in_filters, 3, 3), padding=1, bias=None), layers.BatchNorm(), layers.Relu(), layers.Convolution((n_out_filters, 1, 1), bias=None), layers.BatchNorm(), ) residual_branch = [] if has_branch: residual_branch = layers.join( layers.Convolution((n_out_filters, 1, 1), stride=stride, bias=None), layers.BatchNorm(), ) return layers.join( [main_branch, residual_branch], layers.Elementwise() > layers.Relu(), )
def test_global_pooling_output_shape(self): input_layer = layers.Input((3, 8, 8)) global_pooling_layer = layers.GlobalPooling() self.assertEqual(global_pooling_layer.output_shape, None) layers.join(input_layer, global_pooling_layer) self.assertEqual(global_pooling_layer.output_shape, (3,))
def test_elementwise_init_error(self): input_layer_1 = layers.Input(10) input_layer_2 = layers.Input(20) elem_layer = layers.Elementwise() layers.join(input_layer_1, elem_layer) with self.assertRaises(LayerConnectionError): layers.join(input_layer_2, elem_layer)
def test_concatenate_init_error(self): input_layer_1 = layers.Input((3, 28, 28)) input_layer_2 = layers.Input((1, 28, 28)) concat_layer = layers.Concatenate(axis=2) layers.join(input_layer_1, concat_layer) with self.assertRaises(LayerConnectionError): layers.join(input_layer_2, concat_layer)
def test_parallel_with_joined_connections(self): # Should work without errors layers.join( [ layers.Convolution((11, 5, 5)) > layers.Relu(), layers.Convolution((10, 3, 3)) > layers.Relu(), ], layers.Concatenate() > layers.Relu(), )
def test_count_parameters_single_layer(self): hidden_layer = layers.Sigmoid(5) layers.join( layers.Input(10), hidden_layer, layers.Sigmoid(2), ) n_parameters = layers.count_parameters(hidden_layer) self.assertEqual(n_parameters, 10 * 5 + 5)
def test_pooling_invalid_connections_exceptions(self): # Invalid input shape input_layer = layers.Input(10) max_pool_layer = layers.MaxPooling((2, 2)) with self.assertRaises(LayerConnectionError): layers.join(input_layer, max_pool_layer) # Invalid combination of parameters with self.assertRaises(ValueError): layers.MaxPooling((2, 2), ignore_border=False, padding=1)
def test_network_representation_for_non_feedforward(self): input_layer = layers.Input(10) hidden_layer_1 = layers.Sigmoid(20) hidden_layer_2 = layers.Sigmoid(20) output_layer = layers.Concatenate() connection = layers.join(input_layer, hidden_layer_1, output_layer) connection = layers.join(input_layer, hidden_layer_2, output_layer) network = algorithms.GradientDescent(connection) self.assertIn("[... 4 layers ...]", str(network))
def test_network_architecture_output_exception(self): input_layer = layers.Input(10) hidden_layer_1 = layers.Sigmoid(20) hidden_layer_2 = layers.Sigmoid(20) output_layer = layers.Concatenate() connection = layers.join(input_layer, hidden_layer_1, output_layer) connection = layers.join(input_layer, hidden_layer_2, output_layer) network = algorithms.GradientDescent(connection) with self.assertRaises(TypeError): network.architecture()
def test_layer_name_for_connection(self): input_layer = layers.Input(1) hidden_layer = layers.Sigmoid(5) output_layer = layers.Sigmoid(10) layers.join(input_layer, hidden_layer, output_layer) self.assertEqual(hidden_layer.name, 'sigmoid-1') self.assertEqual(hidden_layer.weight.name, 'layer:sigmoid-1/weight') self.assertEqual(hidden_layer.bias.name, 'layer:sigmoid-1/bias') self.assertEqual(output_layer.name, 'sigmoid-2') self.assertEqual(output_layer.weight.name, 'layer:sigmoid-2/weight') self.assertEqual(output_layer.bias.name, 'layer:sigmoid-2/bias')
def Inception_3(pooling): if pooling not in ('max', 'average'): raise ValueError("Invalid pooling option: {}".format(pooling)) if pooling == 'max': Pooling = layers.MaxPooling elif pooling == 'average': Pooling = partial(layers.AveragePooling, mode='exclude_padding') return layers.join( [[ ConvReluBN((320, 1, 1)), ], [ ConvReluBN((384, 1, 1)), [[ ConvReluBN((384, 1, 3), padding=(0, 1)), ], [ ConvReluBN((384, 3, 1), padding=(1, 0)), ]], ], [ ConvReluBN((448, 1, 1)), ConvReluBN((384, 3, 3), padding=1), [[ ConvReluBN((384, 1, 3), padding=(0, 1)), ], [ ConvReluBN((384, 3, 1), padding=(1, 0)), ]], ], [ Pooling((3, 3), stride=(1, 1), padding=1), ConvReluBN((192, 1, 1)), ]], layers.Concatenate(), )
def test_simple_storage(self): connection = layers.join( layers.Input(10), layers.Sigmoid(5), layers.Sigmoid(2), ) with tempfile.NamedTemporaryFile() as temp: storage.save(connection, temp.name) temp.file.seek(0) filesize_after = os.path.getsize(temp.name) self.assertGreater(filesize_after, 0) data = pickle.load(temp.file) self.assertIn('sigmoid-1', data) self.assertIn('sigmoid-2', data) self.assertIn('weight', data['sigmoid-1']) self.assertIn('bias', data['sigmoid-1']) self.assertIn('weight', data['sigmoid-2']) self.assertIn('bias', data['sigmoid-2']) self.assertEqual(data['sigmoid-1']['weight'].shape, (10, 5)) self.assertEqual(data['sigmoid-1']['bias'].shape, (5,)) self.assertEqual(data['sigmoid-2']['weight'].shape, (5, 2)) self.assertEqual(data['sigmoid-2']['bias'].shape, (2,))
def clean_layers(connection): """ Clean layers connections and format transform them into one format. Also this function validate layers connections. Parameters ---------- connection : list, tuple or object Layers connetion in different formats. Returns ------- object Cleaned layers connection. """ if isinstance(connection, tuple): connection = list(connection) if all(isinstance(element, int) for element in connection): connection = generate_layers(connection) islist = isinstance(connection, list) layer_types = (layers.BaseLayer, LayerConnection) if islist and isinstance(connection[0], layer_types): connection = layers.join(*connection) return connection
def test_connections_with_complex_parallel_relations(self): input_layer = layers.Input((3, 5, 5)) connection = layers.join( [[ layers.Convolution((8, 1, 1)), ], [ layers.Convolution((4, 1, 1)), [[ layers.Convolution((2, 1, 3), padding=(0, 1)), ], [ layers.Convolution((2, 3, 1), padding=(1, 0)), ]], ], [ layers.Convolution((8, 1, 1)), layers.Convolution((4, 3, 3), padding=1), [[ layers.Convolution((2, 1, 3), padding=(0, 1)), ], [ layers.Convolution((2, 3, 1), padding=(1, 0)), ]], ], [ layers.MaxPooling((3, 3), stride=(1, 1), padding=1), layers.Convolution((8, 1, 1)), ]], layers.Concatenate(), ) self.assertEqual(connection.input_shape, [None, None, None, None]) # Connect them at the end, because we need to make # sure tha parallel connections defined without # input shapes connection = input_layer > connection self.assertEqual((24, 5, 5), connection.output_shape)
def test_parallel_many_to_many_connection(self): relu_layer_1 = layers.Relu(1) sigmoid_layer_1 = layers.Sigmoid(1) relu_layer_2 = layers.Relu(2) sigmoid_layer_2 = layers.Sigmoid(2) connection = layers.join( [ sigmoid_layer_1, relu_layer_1, ], [ sigmoid_layer_2, relu_layer_2, ], ) self.assertEqual(connection.input_shape, [None, None]) self.assertEqual(connection.output_shape, [(2,), (2,)]) graph = connection.graph for layer in [relu_layer_1, sigmoid_layer_1]: n_forward_connections = len(graph.forward_graph[layer]) n_backward_connections = len(graph.backward_graph[layer]) self.assertEqual(n_forward_connections, 2) self.assertEqual(n_backward_connections, 0) for layer in [relu_layer_2, sigmoid_layer_2]: n_forward_connections = len(graph.forward_graph[layer]) n_backward_connections = len(graph.backward_graph[layer]) self.assertEqual(n_forward_connections, 0) self.assertEqual(n_backward_connections, 2)
def test_multi_input_exception(self): connection = layers.join([ [layers.Input(10)], [layers.Input(10)], ]) > layers.Concatenate() with self.assertRaises(InvalidConnection): ConstructibleNetwork(connection)
def test_storage_load_invalid_source(self): connection = layers.join( layers.Input(10), layers.Sigmoid(5), layers.Sigmoid(2), ) with self.assertRaisesRegexp(TypeError, "Source type is unknown"): storage.load(connection, object)
def test_count_parameters(self): connection = layers.join( layers.Input(10), layers.Sigmoid(5), layers.Sigmoid(2), ) n_parameters = layers.count_parameters(connection) self.assertEqual(n_parameters, (10 * 5 + 5) + (5 * 2 + 2))
def test_parallel_connection_output_exceptions(self): connection = layers.join([ [layers.Input(10) > layers.Sigmoid(1)], [layers.Input(20) > layers.Sigmoid(2)], [layers.Input(30) > layers.Sigmoid(3)], ]) with self.assertRaises(ValueError): # Received only 2 inputs instead of 3 connection.output(T.matrix(), T.matrix())
def test_is_sequential_connection(self): connection1 = layers.join( layers.Input(10), layers.Sigmoid(5), layers.Sigmoid(1), ) self.assertTrue(is_sequential(connection1)) layer = layers.Input(10) self.assertTrue(is_sequential(layer))
def test_storage_load_unknown_parameter(self): connection = layers.join( layers.Input(10), layers.Relu(1), ) with self.assertRaisesRegexp(ValueError, "Cannot load parameters"): storage.load(connection, {}, ignore_missed=False) # Nothing happens in case if we ignore it storage.load(connection, {}, ignore_missed=True)
def test_connection_inside_connection_mlp(self): connection = layers.join( layers.Input(2), layers.Relu(10), layers.Relu(4) > layers.Relu(7), layers.Relu(3) > layers.Relu(1), ) expected_sizes = [2, 10, 4, 7, 3, 1] for layer, expected_size in zip(connection, expected_sizes): self.assertEqual(expected_size, layer.size)
def test_concatenate_conv_layers(self): input_layer = layers.Input((3, 28, 28)) hidden_layer_1 = layers.Convolution((7, 5, 5)) hidden_layer_21 = layers.Convolution((1, 3, 3)) hidden_layer_22 = layers.Convolution((4, 3, 3)) concat_layer = layers.Concatenate(axis=1) connection = layers.join(input_layer, hidden_layer_1, concat_layer) connection = layers.join(input_layer, hidden_layer_21, hidden_layer_22, concat_layer) connection.initialize() self.assertEqual((11, 24, 24), concat_layer.output_shape) x = T.tensor4() y = theano.function([x], connection.output(x)) x_tensor4 = asfloat(np.random.random((5, 3, 28, 28))) actual_output = y(x_tensor4) self.assertEqual((5, 11, 24, 24), actual_output.shape)
def test_batch_norm_between_layers(self): connection = layers.join( layers.Input(10), layers.Relu(40), layers.BatchNorm(), layers.Relu(1), ) input_value = np.random.random((30, 10)) outpu_value = connection.output(input_value).eval() self.assertEqual(outpu_value.shape, (30, 1))
def test_parallel_layer_with_residual_connections(self): connection = layers.join( layers.Input((3, 8, 8)), [[ layers.Convolution((7, 1, 1)), layers.Relu() ], [ # Residual connection ]], layers.Concatenate(), ) self.assertEqual(connection.output_shape, (10, 8, 8))
def test_batch_norm_gamma_beta_params(self): default_beta = -3.14 default_gamma = 4.3 connection = layers.join( layers.Input(10), layers.BatchNorm(gamma=default_gamma, beta=default_beta) ) input_value = theano.shared(value=np.random.random((30, 10))) output_value = connection.output(input_value).eval() self.assertAlmostEqual(output_value.mean(), default_beta, places=3) self.assertAlmostEqual(output_value.std(), default_gamma, places=3)
def test_inline_connection_with_parallel_connection(self): left_branch = layers.join( layers.Convolution((32, 3, 3)), layers.Relu(), layers.MaxPooling((2, 2)), ) right_branch = layers.join( layers.Convolution((16, 7, 7)), layers.Relu(), ) input_layer = layers.Input((3, 10, 10)) concat = layers.Concatenate() network_concat = input_layer > [left_branch, right_branch] > concat network = network_concat > layers.Reshape() > layers.Softmax() self.assertEqual(network_concat.input_shape, (3, 10, 10)) self.assertEqual(network_concat.output_shape, (48, 4, 4)) self.assertEqual(network.input_shape, (3, 10, 10)) self.assertEqual(network.output_shape, (768,))
def test_standalone_parallel_connection(self): connection = layers.join([ [layers.Input(10) > layers.Sigmoid(1)], [layers.Input(20) > layers.Sigmoid(2)], ]) self.assertEqual(connection.input_shape, [(10,), (20,)]) self.assertEqual(connection.output_shape, [(1,), (2,)]) outputs = connection.output(T.matrix()) self.assertEqual(len(outputs), 2) outputs = connection.output(T.matrix(), T.matrix()) self.assertEqual(len(outputs), 2)
def test_upscale_layer_shape(self): Case = namedtuple("Case", "scale expected_shape") testcases = ( Case(scale=(2, 2), expected_shape=(None, 28, 28, 1)), Case(scale=(2, 1), expected_shape=(None, 28, 14, 1)), Case(scale=(1, 2), expected_shape=(None, 14, 28, 1)), Case(scale=(1, 1), expected_shape=(None, 14, 14, 1)), Case(scale=(1, 10), expected_shape=(None, 14, 140, 1)), ) for testcase in testcases: network = layers.join( layers.Input((14, 14, 1)), layers.Upscale(testcase.scale), ) self.assertShapesEqual(network.output_shape, testcase.expected_shape, msg="scale: {}".format(testcase.scale))
def test_dilated_convolution(self): network = layers.join( layers.Input((6, 6, 1)), layers.Convolution((3, 3, 1), dilation=2, weight=1, bias=None), ) input_value = asfloat(np.arange(36).reshape(1, 6, 6, 1)) actual_output = self.eval(network.output(input_value)) self.assertShapesEqual(actual_output.shape, (1, 2, 2, 1)) self.assertShapesEqual(actual_output.shape[1:], network.output_shape[1:]) actual_output = actual_output[0, :, :, 0] expected_output = np.array([ [126, 135], # every row value adds +1 per filter value (+9) [180, 189], # every col value adds +6 per filter value (+54) ]) np.testing.assert_array_almost_equal(actual_output, expected_output)
def test_change_output_layer(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(5, name='relu-1'), layers.Relu(1, name='relu-2'), ) self.assertShapesEqual(network.input_shape, (None, 10)) self.assertShapesEqual(network.output_shape, (None, 1)) self.assertEqual(len(network), 3) relu_1_network = network.end('relu-1') self.assertShapesEqual(relu_1_network.input_shape, (None, 10)) self.assertShapesEqual(relu_1_network.output_shape, (None, 5)) self.assertEqual(len(relu_1_network.layers), 2) x_test = asfloat(np.ones((7, 10))) y_predicted = self.eval(relu_1_network.output(x_test)) self.assertEqual(y_predicted.shape, (7, 5))
def test_max_pooling(self): X = asfloat( np.array([ [1, 2, 3, -1], [4, -6, 3, 1], [0, 0, 1, 0], [0, -1, 0, 0], ])).reshape(1, 4, 4, 1) expected_output = asfloat(np.array([ [4, 3], [0, 1], ])).reshape(1, 2, 2, 1) network = layers.join( layers.Input((4, 4, 1)), layers.MaxPooling((2, 2)), ) actual_output = self.eval(network.output(X)) np.testing.assert_array_almost_equal(actual_output, expected_output)
def test_conv_with_custom_int_padding(self): network = layers.join( layers.Input((5, 5, 1)), layers.Convolution((3, 3, 1), bias=0, weight=1, padding=2), ) x = asfloat(np.ones((1, 5, 5, 1))) expected_output = np.array([ [1, 2, 3, 3, 3, 2, 1], [2, 4, 6, 6, 6, 4, 2], [3, 6, 9, 9, 9, 6, 3], [3, 6, 9, 9, 9, 6, 3], [3, 6, 9, 9, 9, 6, 3], [2, 4, 6, 6, 6, 4, 2], [1, 2, 3, 3, 3, 2, 1], ]).reshape((1, 7, 7, 1)) actual_output = self.eval(network.output(x)) np.testing.assert_array_almost_equal(expected_output, actual_output)
def test_cut_input_layers_in_sequence(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(5, name='relu-1'), layers.Relu(1, name='relu-2'), ) self.assertEqual(network.input_shape, (10, )) self.assertEqual(network.output_shape, (1, )) self.assertEqual(len(network), 3) cutted_network = network.start('relu-1').start('relu-2') self.assertEqual(cutted_network.input_shape, (5, )) self.assertEqual(cutted_network.output_shape, (1, )) self.assertEqual(len(cutted_network), 1) x_test = asfloat(np.ones((7, 5))) y_predicted = self.eval(cutted_network.output(x_test)) self.assertEqual(y_predicted.shape, (7, 1))
def test_embedding_layer(self): weight = np.arange(10).reshape((5, 2)) input_layer = layers.Input(1) embedding_layer = layers.Embedding(5, 2, weight=weight) connection = layers.join(input_layer, embedding_layer) connection.initialize() input_vector = asfloat(np.array([[0, 1, 4]]).T) expected_output = np.array([ [[0, 1]], [[2, 3]], [[8, 9]], ]) actual_output = self.eval(connection.output(input_vector)) self.assertEqual(embedding_layer.output_shape, (1, 2)) np.testing.assert_array_equal(expected_output, actual_output)
def test_cut_output_layers_in_sequence(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(5, name='relu-1'), layers.Relu(1, name='relu-2'), ) self.assertEqual(network.input_shape, (10, )) self.assertEqual(network.output_shape, (1, )) self.assertEqual(len(network), 3) cutted_network = network.end('relu-1').end('input-1') self.assertEqual(cutted_network.input_shape, (10, )) self.assertEqual(cutted_network.output_shape, (10, )) self.assertEqual(len(cutted_network), 1) predict = cutted_network.compile() x_test = asfloat(np.ones((7, 10))) y_predicted = predict(x_test) self.assertEqual(y_predicted.shape, (7, 10))
def test_custom_layer(self): class NewLayer(layers.BaseLayer): def __init__(self, *args, **kwargs): super(NewLayer, self).__init__(*args, **kwargs) self._input_shape = tf.TensorShape((None, None, None)) def create_variables(self, input_shape): self.input_shape = input_shape def output(self, input): return input new_layer = NewLayer() network = layers.join(layers.Input((10, 5)), new_layer) self.assertShapesEqual(network.output_shape, None) self.assertShapesEqual(new_layer.input_shape, (None, None, None)) network.create_variables() self.assertShapesEqual(network.output_shape, None) self.assertShapesEqual(new_layer.input_shape, (None, 10, 5))
def test_change_input_layer(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(5, name='relu-1'), layers.Relu(1, name='relu-2'), ) self.assertEqual(network.input_shape, (10, )) self.assertEqual(network.output_shape, (1, )) self.assertEqual(len(network), 3) relu_1_network = network.start('relu-1') self.assertEqual(relu_1_network.input_shape, (10, )) self.assertEqual(relu_1_network.output_shape, (1, )) self.assertEqual(len(relu_1_network), 2) predict = relu_1_network.compile() x_test = asfloat(np.ones((7, 10))) y_predicted = predict(x_test) self.assertEqual(y_predicted.shape, (7, 1))
def test_parallel_connection_initialize_method(self): class CustomLayer(layers.BaseLayer): initialized = False def initialize(self): self.initialized = True connections = layers.join([ [CustomLayer(), CustomLayer(), CustomLayer()], [CustomLayer(), CustomLayer(), CustomLayer()], [CustomLayer(), CustomLayer(), CustomLayer()], ]) connections.initialize() for connection in connections: for layer in connection: self.assertTrue(layer.initialized, msg=layer.name)
def test_deconvolution_tuple_padding(self): network = layers.join( layers.Input((10, 10, 3)), layers.Convolution((3, 3, 7), padding=(9, 3)), layers.Deconvolution((3, 3, 4), padding=(9, 3)), ) shapes = network.output_shapes_per_layer shapes = {l: shape_to_tuple(s) for l, s in shapes.items()} self.assertSequenceEqual( shapes, { network.layers[0]: (None, 10, 10, 3), network.layers[1]: (None, 26, 14, 7), network.layers[2]: (None, 10, 10, 4), }) input_value = asfloat(np.random.random((1, 10, 10, 3))) actual_output = self.eval(network.output(input_value)) self.assertEqual(actual_output.shape, (1, 10, 10, 4))
def test_get_layer_by_name_from_connection(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(8, name='relu-0'), layers.Relu(5, name='relu-1'), ) reul0 = network.layer('relu-0') self.assertShapesEqual(reul0.output_shape, (None, 8)) reul1 = network.layer('relu-1') self.assertShapesEqual(reul1.output_shape, (None, 5)) message = "Cannot find layer with name 'some-layer-name'" with self.assertRaisesRegexp(NameError, message): network.layer('some-layer-name') message = "Layer name expected to be a string" with self.assertRaisesRegexp(ValueError, message): network.layer(object)
def test_upscale_layer(self): input_value = np.array([ [1, 2, 3, 4], [5, 6, 7, 8], ]).reshape((1, 2, 4, 1)) expected_output = np.array([ [1, 1, 2, 2, 3, 3, 4, 4], [1, 1, 2, 2, 3, 3, 4, 4], [1, 1, 2, 2, 3, 3, 4, 4], [5, 5, 6, 6, 7, 7, 8, 8], [5, 5, 6, 6, 7, 7, 8, 8], [5, 5, 6, 6, 7, 7, 8, 8], ]).reshape((1, 6, 8, 1)) upscale_layer = layers.Upscale((3, 2)) network = layers.join(layers.Input((2, 4, 1)), upscale_layer) self.assertShapesEqual(network.output_shape, (None, 6, 8, 1)) actual_output = self.eval(network.output(asfloat(input_value))) np.testing.assert_array_almost_equal(asfloat(expected_output), actual_output)
def Inception_2(conv_filters): return layers.join( [[ ConvReluBN((conv_filters[0][0], 1, 1)), ], [ ConvReluBN((conv_filters[1][0], 1, 1)), ConvReluBN((conv_filters[1][1], 1, 7), padding=(0, 3)), ConvReluBN((conv_filters[1][2], 7, 1), padding=(3, 0)), ], [ ConvReluBN((conv_filters[2][0], 1, 1)), ConvReluBN((conv_filters[2][1], 7, 1), padding=(3, 0)), ConvReluBN((conv_filters[2][2], 1, 7), padding=(0, 3)), ConvReluBN((conv_filters[2][3], 7, 1), padding=(3, 0)), ConvReluBN((conv_filters[2][4], 1, 7), padding=(0, 3)), ], [ layers.AveragePooling((3, 3), stride=(1, 1), padding=1, mode='exclude_padding'), ConvReluBN((conv_filters[3][0], 1, 1)), ]], layers.Concatenate(), )
def test_fail_when_cycle_created(self): network = layers.join( layers.Input(10), layers.Relu(10), ) error_message = ("Cannot define connection between layers, " "because it creates cycle in the graph") with self.assertRaisesRegexp(LayerConnectionError, error_message): layers.join(network, network) extra_relu = layers.Relu(5) network = layers.join(network, extra_relu) with self.assertRaisesRegexp(LayerConnectionError, error_message): layers.join(network, extra_relu)
def test_storage_pickle_save_load_save(self): connection = layers.join( layers.Input(10), layers.Sigmoid(5), layers.Sigmoid(2), ) with tempfile.NamedTemporaryFile() as temp: storage.save_pickle(connection, temp.name) temp.file.seek(0) filesize_first = os.path.getsize(temp.name) storage.load_pickle(connection, temp.name) with tempfile.NamedTemporaryFile() as temp: storage.save_pickle(connection, temp.name) temp.file.seek(0) filesize_second = os.path.getsize(temp.name) self.assertEqual(filesize_first, filesize_second)
def Inception(nfilters): return layers.join( [[ layers.MaxPooling((3, 3), stride=1, padding='SAME'), layers.Convolution((1, 1, nfilters[0])), layers.Relu(), ], [ layers.Convolution((1, 1, nfilters[1])), layers.Relu(), ], [ layers.Convolution((1, 1, nfilters[2])), layers.Relu(), layers.Convolution((3, 3, nfilters[3]), padding='SAME'), layers.Relu(), ], [ layers.Convolution((1, 1, nfilters[4])), layers.Relu(), layers.Convolution((5, 5, nfilters[5]), padding='SAME'), layers.Relu(), ]], layers.Concatenate(), )
def test_parallel_connection_disable_training_sate(self): connections = layers.join([ [layers.Input(10) > layers.Sigmoid(1)], [layers.Input(20) > layers.Sigmoid(2)], ]) all_layers = [] for connection in connections: all_layers.extend(list(connection)) # Enabled for layer in all_layers: self.assertTrue(layer.training_state, msg=layer) # Disabled with connections.disable_training_state(): for layer in all_layers: self.assertFalse(layer.training_state, msg=layer) # Enabled for layer in all_layers: self.assertTrue(layer.training_state, msg=layer)
def clean_layers(connection): """ Clean layers connections and format transform them into one format. Also this function validate layers connections. Parameters ---------- connection : list, tuple or object Layers connetion in different formats. Returns ------- object Cleaned layers connection. """ if all(isinstance(element, int) for element in connection): connection = generate_layers(connection) if isinstance(connection, (list, tuple)): connection = layers.join(*connection) return connection
def test_different_input_types(self): input_layer = layers.Input(10, name='input') network = layers.join( input_layer, layers.Sigmoid(5), layers.Sigmoid(4), ) x_matrix = asfloat(np.random.random((3, 10))) out1 = self.eval(network.output(x_matrix)) self.assertEqual((3, 4), out1.shape) out2 = self.eval(network.output({input_layer: x_matrix})) np.testing.assert_array_almost_equal(out1, out2) out3 = self.eval(network.output({'input': x_matrix})) np.testing.assert_array_almost_equal(out2, out3) unknown_layer = layers.Input(5, name='unk') message = "The `unk` layer doesn't appear in the network" with self.assertRaisesRegexp(ValueError, message): network.output({unknown_layer: x_matrix})
def test_layer_definitions(self): Conv = layers.Convolution.define( padding='SAME', weight=init.Constant(1), bias=None, ) network = layers.join( layers.Input((28, 28, 1)), Conv((3, 3, 16)), Conv((3, 3, 32)), ) network.create_variables() self.assertShapesEqual(network.output_shape, (None, 28, 28, 32)) weight_1 = self.eval(network.layers[1].weight) self.assertEqual(weight_1.sum(), 1 * 3 * 3 * 16) self.assertIsNone(network.layers[1].bias) weight_2 = self.eval(network.layers[2].weight) self.assertEqual(weight_2.sum(), 16 * 3 * 3 * 32) self.assertIsNone(network.layers[2].bias)
def test_cut_input_and_output_layers(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(8, name='relu-0'), layers.Relu(5, name='relu-1'), layers.Relu(2, name='relu-2'), layers.Relu(1, name='relu-3'), ) self.assertEqual(network.input_shape, (10, )) self.assertEqual(network.output_shape, (1, )) self.assertEqual(len(network), 5) cutted_network = network.start('relu-1').end('relu-2') self.assertEqual(cutted_network.input_shape, (8, )) self.assertEqual(cutted_network.output_shape, (2, )) self.assertEqual(len(cutted_network), 2) x_test = asfloat(np.ones((7, 8))) y_predicted = self.eval(cutted_network.output(x_test)) self.assertEqual(y_predicted.shape, (7, 2))
def test_deconv_unknown_input_width_and_height(self): network = layers.join( layers.Input((None, None, 3)), layers.Convolution((3, 3, 7)), layers.Deconvolution((3, 3, 4)), ) shapes = network.output_shapes_per_layer shapes = {l: shape_to_tuple(s) for l, s in shapes.items()} self.assertDictEqual( shapes, { network.layers[0]: (None, None, None, 3), network.layers[1]: (None, None, None, 7), network.layers[2]: (None, None, None, 4), }) input_value = asfloat(np.random.random((1, 10, 10, 3))) actual_output = self.eval(network.output(input_value)) self.assertEqual(actual_output.shape, (1, 10, 10, 4)) input_value = asfloat(np.random.random((1, 7, 7, 3))) actual_output = self.eval(network.output(input_value)) self.assertEqual(actual_output.shape, (1, 7, 7, 4))
def test_cut_input_layers_in_sequence(self): network = layers.join( layers.Input(10, name='input-1'), layers.Relu(5, name='relu-1'), layers.Relu(1, name='relu-2'), ) network.create_variables() self.assertShapesEqual(network.input_shape, (None, 10)) self.assertShapesEqual(network.output_shape, (None, 1)) self.assertEqual(len(network), 3) cutted_network = network.start('relu-1').start('relu-2') self.assertShapesEqual(cutted_network.input_shape, (None, 5)) self.assertShapesEqual(cutted_network.output_shape, (None, 1)) self.assertEqual(len(cutted_network), 1) self.assertDictEqual(cutted_network.forward_graph, { network.layer('relu-2'): [], }) x_test = asfloat(np.ones((7, 5))) y_predicted = self.eval(cutted_network.output(x_test)) self.assertEqual(y_predicted.shape, (7, 1))
def test_compare_bp_and_cg(self): x_train, x_test, y_train, y_test = simple_classification() compare_networks( # Test classes partial( partial(algorithms.GradientDescent, batch_size=None), step=1.0, ), partial(algorithms.ConjugateGradient, update_function='fletcher_reeves'), # Test data (asfloat(x_train), asfloat(y_train)), # Network configurations network=layers.join( layers.Input(10), layers.Sigmoid(5), layers.Sigmoid(1), ), loss='mse', shuffle_data=True, # Test configurations epochs=50, show_comparison_plot=False)
def Inception(nfilters): return layers.join( [[ layers.MaxPooling((3, 3), stride=1, padding=(1, 1)), layers.Convolution((nfilters[0], 1, 1)), layers.Relu(), ], [ layers.Convolution((nfilters[1], 1, 1)), layers.Relu(), ], [ layers.Convolution((nfilters[2], 1, 1)), layers.Relu(), layers.Convolution((nfilters[3], 3, 3), padding='half'), layers.Relu(), ], [ layers.Convolution((nfilters[4], 1, 1)), layers.Relu(), layers.Convolution((nfilters[5], 5, 5), padding='half'), layers.Relu(), ]], layers.Concatenate(), )
def test_parallel_many_to_many_connection(self): relu_layer_1 = layers.Relu(1) sigmoid_layer_1 = layers.Sigmoid(1) relu_layer_2 = layers.Relu(2) sigmoid_layer_2 = layers.Sigmoid(2) connection = layers.join( [ sigmoid_layer_1, relu_layer_1, ], [ sigmoid_layer_2, relu_layer_2, ], ) self.assertEqual(connection.input_shape, [None, None]) self.assertEqual(connection.output_shape, [(2, ), (2, )]) graph = connection.graph for layer in [relu_layer_1, sigmoid_layer_1]: n_forward_connections = len(graph.forward_graph[layer]) n_backward_connections = len(graph.backward_graph[layer]) self.assertEqual(n_forward_connections, 2) self.assertEqual(n_backward_connections, 0) for layer in [relu_layer_2, sigmoid_layer_2]: n_forward_connections = len(graph.forward_graph[layer]) n_backward_connections = len(graph.backward_graph[layer]) self.assertEqual(n_forward_connections, 0) self.assertEqual(n_backward_connections, 2)
def test_select_network_branch(self): network = layers.join(layers.Input(10, name='input-1'), [[ layers.Relu(1, name='relu-1'), ], [ layers.Relu(2, name='relu-2'), ]]) self.assertEqual(network.input_shape, (10, )) self.assertEqual(network.output_shape, [(1, ), (2, )]) self.assertEqual(len(network), 3) relu_1_network = network.end('relu-1') self.assertEqual(relu_1_network.input_shape, (10, )) self.assertEqual(relu_1_network.output_shape, (1, )) self.assertEqual(len(relu_1_network), 2) x_test = asfloat(np.ones((7, 10))) y_predicted = self.eval(relu_1_network.output(x_test)) self.assertEqual(y_predicted.shape, (7, 1)) relu_2_network = network.end('relu-2') self.assertEqual(relu_2_network.input_shape, (10, )) self.assertEqual(relu_2_network.output_shape, (2, )) self.assertEqual(len(relu_2_network), 2)