Example #1
0
  def test_phases_end_to_end(self):
    train_dataset, test_dataset = testing_utils.get_test_data(
        train_samples=128,
        test_samples=64,
        input_shape=(10,),
        num_classes=10,
        random_seed=42)

    # TODO: Consider performing `tf.data.Dataset` transformations
    # within get_test_data function.
    train_dataset = train_dataset.batch(32)
    test_dataset = test_dataset.batch(32)

    model1 = tf.keras.Sequential([
        tf.keras.layers.Dense(64, activation='relu'),
        tf.keras.layers.Dense(10),
    ])
    model1.compile(
        optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae'])

    model2 = tf.keras.Sequential([
        tf.keras.layers.Dense(64, activation='relu'),
        tf.keras.layers.Dense(64, activation='relu'),
        tf.keras.layers.Dense(10),
    ])
    model2.compile(
        optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae'])

    ensemble = MeanEnsemble(submodels=[model1, model2])
    ensemble.compile(
        optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae'])

    controller = SequentialController(phases=[
        TrainKerasModelsPhase([
            model1,
            model2,
        ], dataset=train_dataset),
        TrainKerasModelsPhase([ensemble], dataset=train_dataset),
    ])

    train_dataset, test_dataset = testing_utils.get_test_data(
        train_samples=128,
        test_samples=64,
        input_shape=(10,),
        num_classes=2,
        random_seed=42)

    model_search = ModelSearch(controller)
    model_search.run()
    self.assertIsInstance(
        model_search.get_best_models(num_models=1)[0], MeanEnsemble)
Example #2
0
    def test_adanet_controller_end_to_end(self):
        train_dataset, test_dataset = testing_utils.get_test_data(
            train_samples=1280,
            test_samples=640,
            input_shape=(10, ),
            num_classes=10,
            random_seed=42)

        train_dataset = train_dataset.batch(32)
        test_dataset = test_dataset.batch(32)

        candidate_phase = AdaNetCandidatePhase(
            train_dataset,
            candidates_per_iteration=2,
            optimizer='adam',
            loss='sparse_categorical_crossentropy',
            output_units=10)
        # TODO: Setting candidates_per_iteration greater than the one
        # for the candidate phase will lead to unexpected behavior.
        ensemble_phase = AdaNetEnsemblePhase(
            train_dataset,
            candidates_per_iteration=2,
            optimizer='adam',
            loss='sparse_categorical_crossentropy')

        adanet_controller = AdaNetController(candidate_phase,
                                             ensemble_phase,
                                             iterations=5)

        model_search = ModelSearch(adanet_controller)
        model_search.run()
        self.assertIsInstance(
            model_search.get_best_models(num_models=1)[0], MeanEnsemble)
Example #3
0
    def test_tuner_end_to_end(self):
        train_dataset, test_dataset = testing_utils.get_test_data(
            train_samples=128,
            test_samples=64,
            input_shape=(10, ),
            num_classes=10,
            random_seed=42)

        # TODO: Consider performing `tf.data.Dataset` transformations
        # within get_test_data function.
        train_dataset = train_dataset.batch(32)
        test_dataset = test_dataset.batch(32)

        def build_model(hp):
            model = tf.keras.Sequential()
            model.add(
                tf.keras.layers.Dense(units=hp.Int('units',
                                                   min_value=32,
                                                   max_value=512,
                                                   step=32),
                                      activation='relu'))
            model.add(tf.keras.layers.Dense(10, activation='softmax'))
            model.compile(optimizer=tf.keras.optimizers.Adam(
                hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4])),
                          loss='sparse_categorical_crossentropy',
                          metrics=['accuracy'])
            return model

        # Define phases.
        tuner = tuners.RandomSearch(build_model,
                                    objective='val_accuracy',
                                    max_trials=3,
                                    executions_per_trial=1,
                                    directory=self.test_subdirectory,
                                    project_name='helloworld')

        tuner_phase = KerasTunerPhase(tuner,
                                      train_dataset,
                                      validation_data=test_dataset)

        def build_ensemble():
            ensemble = MeanEnsemble(submodels=tuner_phase.get_best_models(
                num_models=2))
            ensemble.compile(optimizer=tf.keras.optimizers.Adam(0.01),
                             loss='mse',
                             metrics=['mae'])
            return [ensemble]

        ensemble_phase = TrainKerasModelsPhase(build_ensemble,
                                               dataset=train_dataset)

        controller = SequentialController(phases=[tuner_phase, ensemble_phase])

        # Execute phases.
        model_search = ModelSearch(controller)
        model_search.run()
        self.assertIsInstance(
            model_search.get_best_models(num_models=1)[0], MeanEnsemble)
Example #4
0
    def test_lifecycle(self, ensemble, want_results, output_units=None):
        train_dataset, test_dataset = testing_utils.get_test_data(
            train_samples=128,
            test_samples=64,
            input_shape=(10, ),
            num_classes=2,
            random_seed=42)

        # TODO: Consider performing `tf.data.Dataset` transformations
        # within get_test_data function.
        train_dataset = train_dataset.batch(32).repeat(10)
        test_dataset = test_dataset.batch(32).repeat(10)

        model1 = tf.keras.Sequential([
            tf.keras.layers.Dense(64, activation='relu'),
            tf.keras.layers.Dense(64, activation='relu'),
            tf.keras.layers.Dense(2),
        ])
        model1.compile(optimizer=tf.keras.optimizers.Adam(0.01), loss='mse')
        model1.fit(train_dataset)
        model1.trainable = False  # Since models inside ensemble should be trained.
        model1_pre_train_weights = model1.get_weights()

        model2 = tf.keras.Sequential([
            tf.keras.layers.Dense(64, activation='relu'),
            tf.keras.layers.Dense(64, activation='relu'),
            tf.keras.layers.Dense(2),
        ])
        model2.compile(optimizer=tf.keras.optimizers.Adam(0.01), loss='mse')
        model2.fit(train_dataset)
        model2.trainable = False  # Since models inside ensemble should be trained.
        model2_pre_train_weights = model2.get_weights()

        if output_units:
            ensemble = ensemble(submodels=[model1, model2],
                                output_units=output_units)
        else:
            ensemble = ensemble(submodels=[model1, model2])
        ensemble.compile(optimizer=tf.keras.optimizers.Adam(0.01),
                         loss='mse',
                         metrics=['mae'])

        ensemble.fit(train_dataset)

        # Make sure submodel weights were not altered during ensemble training.
        model1_post_train_weights = model1.get_weights()
        model2_post_train_weights = model2.get_weights()
        self.assertAllClose(model1_pre_train_weights,
                            model1_post_train_weights)
        self.assertAllClose(model2_pre_train_weights,
                            model2_post_train_weights)

        eval_results = ensemble.evaluate(test_dataset)
        self.assertAllClose(eval_results, want_results)
Example #5
0
  def test_phases_end_to_end(self):
    train_dataset, test_dataset = testing_utils.get_test_data(
        train_samples=128,
        test_samples=64,
        input_shape=(10,),
        num_classes=10,
        random_seed=42)

    # TODO: Consider performing `tf.data.Dataset` transformations
    # within get_test_data function.
    train_dataset = train_dataset.batch(32)
    test_dataset = test_dataset.batch(32)

    model1 = tf.keras.Sequential([
        tf.keras.layers.Dense(64, activation='relu'),
        tf.keras.layers.Dense(10),
    ])
    model1.compile(
        optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae'])

    model2 = tf.keras.Sequential([
        tf.keras.layers.Dense(64, activation='relu'),
        tf.keras.layers.Dense(64, activation='relu'),
        tf.keras.layers.Dense(10),
    ])
    model2.compile(
        optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae'])

    # TODO: This test could potentially have the best model be
    # a non-ensemble Keras model. Therefore, need to address this issue and
    # remove the freeze_submodels flag.
    ensemble = MeanEnsemble(submodels=[model1, model2], freeze_submodels=False)
    ensemble.compile(
        optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae'])

    controller = SequentialController(phases=[
        InputPhase(train_dataset, test_dataset),
        KerasTrainerPhase([model1, model2]),
        KerasTrainerPhase([ensemble]),
    ])

    model_search = ModelSearch(controller)
    model_search.run()
    self.assertIsInstance(
        model_search.get_best_models(num_models=1)[0], MeanEnsemble)