def test_sew_together_when_cutted_piece_already_in_use(self): autoencoder = algorithms.Momentum([ layers.Input(25), layers.Sigmoid(15), layers.Sigmoid(25), ]) encoder = surgery.cut(autoencoder, start=0, end=2) self.assertEqual(len(encoder), 2) classifier = algorithms.Momentum(encoder > layers.Softmax(10)) network = algorithms.GradientDescent([ layers.Input(5), surgery.CutLine(), # <- first cut point layers.Sigmoid(10), layers.Sigmoid(20), layers.Sigmoid(30), surgery.CutLine(), # <- second cut point layers.Sigmoid(1), ]) _, hidden_layers, _ = surgery.cut_along_lines(network) self.assertEqual(len(hidden_layers), 3) connected_layers = surgery.sew_together([ encoder, layers.Relu(5), hidden_layers ]) self.assertEqual(len(connected_layers), 6)
def test_sew_together_cutted_pieces(self): network1 = algorithms.GradientDescent([ layers.Input(100), layers.Sigmoid(200), layers.Sigmoid(100), ]) network2 = algorithms.GradientDescent([ layers.Input(10), layers.Sigmoid(20), layers.Sigmoid(10), ]) first_part = surgery.cut(network1, start=0, end=2) self.assertEqual(first_part.output_shape, (200,)) self.assertEqual(first_part.input_shape, (100,)) second_part = surgery.cut(network2, start=0, end=2) self.assertEqual(second_part.output_shape, (20,)) self.assertEqual(second_part.input_shape, (10,))
def test_cutting_exceptions(self): with self.assertRaises(ValueError): surgery.cut(algorithms.PNN(), 0, 1) with self.assertRaises(ValueError): surgery.cut(self.network, 0, 10) with self.assertRaises(ValueError): surgery.cut(self.network, 0, 0)
def test_cut_layer_copy(self): # Check connection instead of networks as a different # acceptible object type. connection = self.network.connection layer = surgery.cut(connection, start=1, end=2) self.assertIsNot(self.network.layers[1], layer) x = np.random.random((10, 30)) y = np.random.random((10, 1)) self.network.train(x, y, epochs=20) trained_layer = self.network.layers[1] trained_weight = trained_layer.weight.get_value() copied_weight = layer.weight.get_value() self.assertTrue(np.any(trained_weight != copied_weight))
def test_cut_layers_basics(self): testcases = [ dict(kwargs=dict(connection=self.network, start=0, end=2), expected_sizes=(30, 10)), dict(kwargs=dict(connection=self.network, start=1, end=3), expected_sizes=(10, 20)), dict(kwargs=dict(connection=self.network, start=1, end=-1), expected_sizes=(10, 20)), ] for testcase in testcases: layers = surgery.cut(**testcase['kwargs']) output_shapes = [layer.output_shape for layer in iter(layers)] self.assertEqual( as_tuple(*output_shapes), testcase['expected_sizes'] )
def test_cut_one_layer(self): input_layer = surgery.cut(self.network, start=0, end=1) self.assertIsInstance(input_layer, layers.Input) self.assertEqual(input_layer.output_shape, (30,))
layers.Input(classifier_structure.output_shape), layers.PRelu(512), layers.Dropout(0.25), layers.Softmax(10), ], verbose=True, step=0.05, shuffle_data=True, batch_size=128, error='categorical_crossentropy', ) linear_classifier.architecture() linear_classifier.train(x_labeled_encoded, y_labeled, x_unlabeled_encoded, y_unlabeled, epochs=100) classification_layer = surgery.cut(linear_classifier, start=1, end=4) classifier_structure = surgery.sew_together([classifier_structure, classification_layer]) classifier = algorithms.MinibatchGradientDescent( classifier_structure, verbose=True, step=0.1, shuffle_data=True, batch_size=128, error='categorical_crossentropy', ) classifier.architecture() classifier.train(x_labeled_4d, y_labeled, epochs=1000) unlabeled_predicted = classifier.predict(x_unlabeled_4d).argmax(axis=1)
layers.Softmax(10), ], verbose=True, step=0.05, shuffle_data=True, batch_size=128, error='categorical_crossentropy', ) linear_classifier.architecture() linear_classifier.train(x_labeled_encoded, y_labeled, x_unlabeled_encoded, y_unlabeled, epochs=100) classification_layer = surgery.cut(linear_classifier, start=1, end=4) classifier_structure = surgery.sew_together( [classifier_structure, classification_layer]) classifier = algorithms.MinibatchGradientDescent( classifier_structure, verbose=True, step=0.1, shuffle_data=True, batch_size=128, error='categorical_crossentropy', ) classifier.architecture() classifier.train(x_labeled_4d, y_labeled, epochs=1000) unlabeled_predicted = classifier.predict(x_unlabeled_4d).argmax(axis=1)