def test_breed1(self):
        delta = 0.0
        n_parents = 1
        traits = dict(layer_dropout={1})

        model_p = KerasPackageWrapper.make_flat_sequential_model()
        model_p.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_p.add(keras.layers.Dropout(0.1))

        model_c1 = KerasPackageWrapper.make_flat_sequential_model()
        model_c1.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_c1.add(keras.layers.Dropout(0.3))

        model_c2 = KerasPackageWrapper.make_flat_sequential_model()
        model_c2.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_c2.add(keras.layers.Dropout(0.7))

        new_generation = GeneticOptimizer.breed([model_p, model_c1, model_c2], n_parents,
                                                traits, delta)

        # Check the networks layer-wise
        for layer_i in traits["layer_dropout"]:
            self.assertEqual(model_p.layers[layer_i - 1].units, new_generation[1].layers[layer_i - 1].units)
            self.assertEqual(model_p.layers[layer_i].rate, new_generation[1].layers[layer_i].rate)

            self.assertEqual(model_p.layers[layer_i - 1].units, new_generation[2].layers[layer_i - 1].units)
            self.assertEqual(model_p.layers[layer_i].rate, new_generation[2].layers[layer_i].rate)
    def test_breed3(self):
        delta = 0.3
        n_parents = 2
        traits = dict(layer_dropout={1})

        model_p = KerasPackageWrapper.make_flat_sequential_model()
        model_p.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_p.add(keras.layers.Dropout(0.1))

        model_c1 = KerasPackageWrapper.make_flat_sequential_model()
        model_c1.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_c1.add(keras.layers.Dropout(0.3))

        model_c2 = KerasPackageWrapper.make_flat_sequential_model()
        model_c2.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_c2.add(keras.layers.Dropout(0.7))

        model_c3 = KerasPackageWrapper.make_flat_sequential_model()
        model_c3.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model_c3.add(keras.layers.Dropout(0.9))

        new_generation = GeneticOptimizer.breed([model_p, model_c1, model_c2, model_c3],
                                                n_parents, traits, delta)

        # Check the networks layer-wise
        for layer_i in traits["layer_dropout"]:
            for network in new_generation[n_parents:]:
                self.assertEquals(model_p.layers[layer_i - 1].units, network.layers[layer_i - 1].units)
                self._assertAlmostEqualsMultiple(network.layers[layer_i].rate, new_generation[:n_parents], delta)
    def test_inherit_to_child4(self):
        # TODO keras assumed!
        delta = 0.1

        model1 = KerasPackageWrapper.make_flat_sequential_model()
        model1.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model1.add(keras.layers.Dropout(0.2))

        model1.compile(optimizer='rmsprop',
                       loss='categorical_crossentropy',
                       metrics=['accuracy'])

        model2 = KerasPackageWrapper.make_flat_sequential_model()
        model2.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model2.add(keras.layers.Dropout(0.7))

        model2.compile(optimizer='rmsprop',
                       loss='categorical_crossentropy',
                       metrics=['accuracy'])

        parents = [model1, model2]
        child = GeneticOptimizer.inherit_to_child(parents, dict(layer_dropout={1}), delta)
        # Assert that the structure stays the same as that of parents but the rate changes
        self.assertEqual(child.layers[0].units, model1.layers[0].units)
        self.assertEqual(child.layers[0].units, model2.layers[0].units)

        self._assertAlmostEqualsMultiple(child.layers[1].rate, TestGeneticOptimizer._extract_rates(parents, 1), delta)
def get_clean_mnist():
    # TODO keras assumed!
    data = keras.datasets.mnist

    (x_train, y_train), (x_test, y_test) = data.load_data()
    x_train = x_train.reshape(60000, 784)
    x_test = x_test.reshape(10000, 784)
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_test /= 255
    x_train /= 255

    one_hot_labels_train = KerasPackageWrapper.make_one_hot(y_train, 10)
    one_hot_labels_test = KerasPackageWrapper.make_one_hot(y_test, 10)

    return x_train, one_hot_labels_train, x_test, one_hot_labels_test
    def test_get_accuracy(self):
        model = KerasPackageWrapper.make_flat_sequential_model()

        # TODO keras assumed!
        model.add(keras.layers.Dense(10, activation="relu", input_dim=784))
        model.add(keras.layers.Dropout(0.2))

        model.add(keras.layers.Dense(10, activation="relu"))
        model.add(keras.layers.Dropout(0.2))

        # Compile the network
        model.compile(optimizer='rmsprop',
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])

        result = KerasPackageWrapper.get_accuracy(model, self.test_data_x, self.test_data_y)

        print("Test for get_accuracy. Accuracy is {}".format(result))
    def test_do_training(self):
        model = KerasPackageWrapper.make_flat_sequential_model()

        # TODO keras assumed!
        model.add(keras.layers.Dense(500, activation="relu", input_dim=784))
        model.add(keras.layers.Dropout(0.2))

        model.add(keras.layers.Dense(30, activation="relu"))
        model.add(keras.layers.Dropout(0.2))

        model.add(keras.layers.Dense(30, activation="relu"))
        model.add(keras.layers.Dropout(0.2))

        model.add(keras.layers.Dense(10, activation="softmax"))
        model.add(keras.layers.Dropout(0.2))

        # Compile the network
        model.compile(optimizer='rmsprop',
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])

        print(KerasPackageWrapper.do_training(model, *get_clean_mnist_with_cold_labels(), 10, 3))
    def test_inherit_to_child1(self):
        # TODO keras assumed!
        delta = 0.0

        model = KerasPackageWrapper.make_flat_sequential_model()
        model.add(keras.layers.Dense(10, activation="relu", input_dim=10))
        model.add(keras.layers.Dropout(0.5))

        model.compile(optimizer='rmsprop',
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])

        child = GeneticOptimizer.inherit_to_child([model], dict(layer_dropout={1}), delta)
        self.assertEqual(model.layers[1].rate, child.layers[1].rate)
        self.assertEqual(model.layers[0].units, child.layers[0].units)
    def test_train_models2(self):
        model1 = KerasPackageWrapper.make_flat_sequential_model()
        model1.add(keras.layers.Dense(300, activation="relu", input_dim=784))
        model1.add(keras.layers.Dropout(0.2))
        model1.add(keras.layers.Dense(10, activation="softmax"))
        model1.add(keras.layers.Dropout(0.2))

        model2 = KerasPackageWrapper.make_flat_sequential_model()
        model2.add(keras.layers.Dense(300, activation="relu", input_dim=784))
        model2.add(keras.layers.Dropout(0.5))
        model2.add(keras.layers.Dense(10, activation="softmax"))
        model2.add(keras.layers.Dropout(0.5))

        model3 = KerasPackageWrapper.make_flat_sequential_model()
        model3.add(keras.layers.Dense(300, activation="relu", input_dim=784))
        model3.add(keras.layers.Dropout(0.7))
        model3.add(keras.layers.Dense(10, activation="softmax"))
        model3.add(keras.layers.Dropout(0.7))

        model1.compile(optimizer='rmsprop',
                       loss='categorical_crossentropy',
                       metrics=['accuracy'])

        model2.compile(optimizer='rmsprop',
                       loss='categorical_crossentropy',
                       metrics=['accuracy'])

        model3.compile(optimizer='rmsprop',
                       loss='categorical_crossentropy',
                       metrics=['accuracy'])

        instance = GeneticOptimizer(model1, (get_clean_mnist_with_cold_labels()[:2]),
                                    (get_clean_mnist_with_cold_labels()[2:]), n_categories=10,
                                    traits=dict(layer_dropout={1, 3}))

        print(GeneticOptimizer.train_models(instance, [model1, model2, model3]))
    def _check_one_hot(self, input_data, n_categories, exp_result):
        incoding = KerasPackageWrapper.make_one_hot(input_data, n_categories)

        comparison = np.equal(incoding, exp_result)

        self.assertEquals(np.all(comparison), True)