def test_transfer_learning_using_names(self): network_pretrained = layers.join( layers.Input(10), layers.Elu(5, name='elu-a'), layers.Elu(2, name='elu-b'), layers.Sigmoid(1), ) network_new = layers.join( layers.Input(10), layers.Elu(5, name='elu-a'), layers.Elu(2, name='elu-b'), layers.Elu(8, name='elu-c'), # new layer ) pretrained_layers_stored = storage.save_dict(network_pretrained) storage.load_dict(network_new, pretrained_layers_stored, load_by='names', skip_validation=False, ignore_missing=True) random_input = asfloat(np.random.random((12, 10))) pretrained_output = self.eval( network_pretrained.end('elu-b').output(random_input)) new_network_output = self.eval( network_new.end('elu-b').output(random_input)) np.testing.assert_array_almost_equal(pretrained_output, new_network_output) pred = self.eval(network_new.output(random_input)) self.assertEqual(pred.shape, (12, 8))
def test_storage_load_dict_invalid_number_of_paramters(self): network = layers.join( layers.Input(3), layers.Relu(4, name='relu'), layers.Linear(5, name='linear') > layers.Relu(), layers.Softmax(6, name='softmax'), ) data = { 'metadata': {}, # avoided for simplicity 'graph': {}, # avoided for simplicity # Input layer was avoided on purpose 'layers': [{ 'name': 'name-1', 'class_name': 'Relu', 'configs': {}, 'parameters': { 'weight': { 'trainable': True, 'value': np.ones((3, 4)) }, 'bias': { 'trainable': True, 'value': np.ones((4, )) }, } }] } with self.assertRaises(ParameterLoaderError): storage.load_dict(network, data, ignore_missing=False)
def test_transfer_learning_using_position(self): network_pretrained = layers.join( layers.Input(10), layers.Elu(5), layers.Elu(2, name='elu'), layers.Sigmoid(1), ) network_new = layers.join( layers.Input(10), layers.Elu(5), layers.Elu(2), ) pretrained_layers_stored = storage.save_dict(network_pretrained) with self.assertRaises(ParameterLoaderError): storage.load_dict(network_new, pretrained_layers_stored, load_by='names_or_order', ignore_missing=False) storage.load_dict(network_new, pretrained_layers_stored, load_by='names_or_order', ignore_missing=True) random_input = asfloat(np.random.random((12, 10))) new_network_output = self.eval(network_new.output(random_input)) pretrained_output = self.eval( network_pretrained.end('elu').output(random_input)) np.testing.assert_array_almost_equal(pretrained_output, new_network_output)
def test_storage_load_dict_using_names(self): relu = layers.Relu(2, name='relu') network = layers.join(layers.Input(10), relu) weight = np.ones((10, 2)) bias = np.ones((2, )) storage.load_dict( network, { 'metadata': {}, # avoided for simplicity 'graph': {}, # avoided for simplicity # Input layer was avoided on purpose 'layers': [{ 'name': 'relu', 'class_name': 'Relu', 'configs': {}, 'parameters': { 'weight': { 'trainable': True, 'value': weight }, 'bias': { 'trainable': True, 'value': bias }, } }] }) np.testing.assert_array_almost_equal(weight, self.eval(relu.weight)) np.testing.assert_array_almost_equal(bias, self.eval(relu.bias))
def test_transfer_learning_using_names(self): network_pretrained = layers.join( layers.Input(10), layers.Relu(5, name='relu-1'), layers.Relu(2, name='relu-2'), layers.Sigmoid(1), ) network_new = layers.join( layers.Input(10), layers.Relu(5, name='relu-1'), layers.Relu(2, name='relu-2'), layers.Relu(8, name='relu-3'), # new layer ) pretrained_layers_stored = storage.save_dict(network_pretrained) storage.load_dict(network_new, pretrained_layers_stored, load_by='names', ignore_missed=True) pretrained_predictor = network_pretrained.end('relu-2').compile() new_network_predictor = network_new.end('relu-2').compile() random_input = asfloat(np.random.random((12, 10))) pretrained_output = pretrained_predictor(random_input) new_network_output = new_network_predictor(random_input) np.testing.assert_array_almost_equal(pretrained_output, new_network_output) new_full_network_predictor = network_new.compile() pred = new_full_network_predictor(random_input) self.assertEqual(pred.shape, (12, 8))
def test_storage_load_dict_using_names(self): relu = layers.Relu(2, name='relu') connection = layers.Input(10) > relu weight = np.ones((10, 2)) bias = np.ones((2, )) storage.load_dict( connection, { 'metadata': {}, # avoided for simplicity 'graph': {}, # avoided for simplicity # Input layer was avoided on purpose 'layers': [{ 'name': 'relu', 'class_name': 'Relu', 'input_shape': (10, ), 'output_shape': (2, ), 'configs': {}, 'parameters': { 'weight': { 'trainable': True, 'value': weight }, 'bias': { 'trainable': True, 'value': bias }, } }] }) np.testing.assert_array_almost_equal(weight, relu.weight.get_value()) np.testing.assert_array_almost_equal(bias, relu.bias.get_value())
def test_storage_load_dict_using_wrong_names(self): connection = layers.join( layers.Input(3), layers.Relu(4, name='relu'), layers.Linear(5, name='linear') > layers.Relu(), layers.Softmax(6, name='softmax'), ) storage.load_dict(connection, { 'metadata': {}, # avoided for simplicity 'graph': {}, # avoided for simplicity # Input layer was avoided on purpose 'layers': [{ 'name': 'name-1', 'class_name': 'Relu', 'input_shape': (3,), 'output_shape': (4,), 'configs': {}, 'parameters': { 'weight': {'trainable': True, 'value': np.ones((3, 4))}, 'bias': {'trainable': True, 'value': np.ones((4,))}, } }, { 'name': 'name-2', 'class_name': 'Relu', 'input_shape': (4,), 'output_shape': (5,), 'configs': {}, 'parameters': { 'weight': {'trainable': True, 'value': np.ones((4, 5))}, 'bias': {'trainable': True, 'value': np.ones((5,))}, } }, { 'name': 'name-3', 'class_name': 'Softmax', 'input_shape': (5,), 'output_shape': (6,), 'configs': {}, 'parameters': { 'weight': {'trainable': True, 'value': np.ones((5, 6))}, 'bias': {'trainable': True, 'value': np.ones((6,))}, } }] }, load_by='order', skip_validation=False) relu = connection.layer('relu') self.assertEqual(12, np.sum(self.eval(relu.weight))) self.assertEqual(4, np.sum(self.eval(relu.bias))) linear = connection.layer('linear') self.assertEqual(20, np.sum(self.eval(linear.weight))) self.assertEqual(5, np.sum(self.eval(linear.bias))) softmax = connection.layer('softmax') self.assertEqual(30, np.sum(self.eval(softmax.weight))) self.assertEqual(6, np.sum(self.eval(softmax.bias)))
def compare_networks(default_class, tested_class, data, **kwargs): """ Compare two network arcitectures. Parameters ---------- default_class : BaseNetwork instance tested_class : BaseNetwork instance data : tuple **kwargs : Raises ------ AssertionError Raise exception when first network have better prediction accuracy then the second one. """ epochs = kwargs.pop('epochs', 100) show_comparison_plot = kwargs.pop('show_comparison_plot', False) # Compute result for default network (which must be slower) network = default_class(**kwargs) if hasattr(network, 'connection'): initial_parameters = save_dict(network.network) network.train(*data, epochs=epochs) network_default_error = network.errors.train[-1] errors1 = network.errors.train # Compute result for test network (which must be faster) if hasattr(network, 'connection'): load_dict(network.network, initial_parameters) network = tested_class(**kwargs) network.train(*data, epochs=epochs) network_tested_error = network.errors.train[-1] errors2 = network.errors.train if show_comparison_plot: error_range = np.arange(max(len(errors1), len(errors2))) plt.plot(error_range[:len(errors1)], errors1) plt.plot(error_range[:len(errors2)], errors2) plt.show() if network_default_error <= network_tested_error: raise AssertionError( "First network has smaller error ({}) that the second one ({})." "".format(network_default_error, network_tested_error))
def test_basic_skip_validation(self): network = layers.Input(10) >> layers.Relu(1) with self.assertRaises(InvalidFormat): storage.load_dict(network, {}, skip_validation=False)
def test_failed_loading_mode_for_storage(self): network = layers.Input(2) >> layers.Sigmoid(1) with self.assertRaisesRegexp(ValueError, "Invalid value"): storage.load_dict(network, {}, load_by='unknown')