Beispiel #1
0
def tuner_fn(fn_args: tfx.components.FnArgs) -> tfx.components.TunerFnResult:
    """Build the tuner using the KerasTuner API.

  Args:
    fn_args: Holds args as name/value pairs.
      - working_dir: working dir for tuning.
      - train_files: List of file paths containing training tf.Example data.
      - eval_files: List of file paths containing eval tf.Example data.
      - train_steps: number of train steps.
      - eval_steps: number of eval steps.
      - schema_path: optional schema of the input data.
      - transform_graph_path: optional transform graph produced by TFT.

  Returns:
    A namedtuple contains the following:
      - tuner: A BaseTuner that will be used for tuning.
      - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                    model , e.g., the training and validation dataset. Required
                    args depend on the above tuner's implementation.
  """
    # RandomSearch is a subclass of keras_tuner.Tuner which inherits from
    # BaseTuner.
    tuner = keras_tuner.RandomSearch(_make_keras_model,
                                     max_trials=6,
                                     hyperparameters=_get_hyperparameters(),
                                     allow_new_entries=False,
                                     objective=keras_tuner.Objective(
                                         'val_sparse_categorical_accuracy',
                                         'max'),
                                     directory=fn_args.working_dir,
                                     project_name='penguin_tuning')

    transform_graph = tft.TFTransformOutput(fn_args.transform_graph_path)

    train_dataset = base.input_fn(fn_args.train_files, fn_args.data_accessor,
                                  transform_graph, base.TRAIN_BATCH_SIZE)

    eval_dataset = base.input_fn(fn_args.eval_files, fn_args.data_accessor,
                                 transform_graph, base.EVAL_BATCH_SIZE)

    return tfx.components.TunerFnResult(tuner=tuner,
                                        fit_kwargs={
                                            'x': train_dataset,
                                            'validation_data': eval_dataset,
                                            'steps_per_epoch':
                                            fn_args.train_steps,
                                            'validation_steps':
                                            fn_args.eval_steps
                                        })
Beispiel #2
0
def test_tunable_false_hypermodel(tmp_dir):
    def build_model(hp):
        input_shape = (256, 256, 3)
        inputs = tf.keras.Input(shape=input_shape)

        with hp.name_scope("xception"):
            # Tune the pooling of Xception by supplying the search space
            # beforehand.
            hp.Choice("pooling", ["avg", "max"])
            xception = keras_tuner.applications.HyperXception(
                include_top=False, input_shape=input_shape,
                tunable=False).build(hp)
        x = xception(inputs)

        x = tf.keras.layers.Dense(hp.Int("hidden_units", 50, 100, step=10),
                                  activation="relu")(x)
        outputs = tf.keras.layers.Dense(NUM_CLASSES, activation="softmax")(x)

        model = tf.keras.Model(inputs, outputs)

        optimizer = tf.keras.optimizers.get(
            hp.Choice("optimizer", ["adam", "sgd"]))
        optimizer.learning_rate = hp.Float("learning_rate",
                                           1e-4,
                                           1e-2,
                                           sampling="log")

        model.compile(optimizer, loss="sparse_categorical_crossentropy")
        return model

    tuner = keras_tuner.RandomSearch(objective="val_loss",
                                     hypermodel=build_model,
                                     max_trials=4,
                                     directory=tmp_dir)

    x = np.random.random(size=(2, 256, 256, 3))
    y = np.random.randint(0, NUM_CLASSES, size=(2, ))

    tuner.search(x, y, validation_data=(x, y), batch_size=2)

    hps = tuner.oracle.get_space()
    assert "xception/pooling" in hps
    assert "hidden_units" in hps
    assert "optimizer" in hps
    assert "learning_rate" in hps

    # Make sure no HPs from building xception were added.
    assert len(hps.space) == 4
Beispiel #3
0
def tuner_fn(fn_args: FnArgs) -> TunerFnResult:
    """Build the tuner using the KerasTuner API.

  Args:
    fn_args: Holds args as name/value pairs.
      - working_dir: working dir for tuning.
      - train_files: List of file paths containing training tf.Example data.
      - eval_files: List of file paths containing eval tf.Example data.
      - train_steps: number of train steps.
      - eval_steps: number of eval steps.
      - schema_path: optional schema of the input data.
      - transform_graph_path: optional transform graph produced by TFT.

  Returns:
    A namedtuple contains the following:
      - tuner: A BaseTuner that will be used for tuning.
      - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                    model , e.g., the training and validation dataset. Required
                    args depend on the above tuner's implementation.
  """
    hp = keras_tuner.HyperParameters()
    # Defines search space.
    hp.Choice('learning_rate', [1e-1, 1e-3])
    hp.Int('num_layers', 1, 5)

    # RandomSearch is a subclass of Keras model Tuner.
    tuner = keras_tuner.RandomSearch(
        _build_keras_model,
        max_trials=3,
        hyperparameters=hp,
        allow_new_entries=False,
        objective='val_sparse_categorical_accuracy',
        directory=fn_args.working_dir,
        project_name='test')

    schema = schema_pb2.Schema()
    io_utils.parse_pbtxt_file(fn_args.schema_path, schema)
    train_dataset = _input_fn(fn_args.train_files, fn_args.data_accessor,
                              schema)
    eval_dataset = _input_fn(fn_args.eval_files, fn_args.data_accessor, schema)

    return TunerFnResult(tuner=tuner,
                         fit_kwargs={
                             'x': train_dataset,
                             'validation_data': eval_dataset,
                             'steps_per_epoch': fn_args.train_steps,
                             'validation_steps': fn_args.eval_steps
                         })
Beispiel #4
0
 def get_model(self):
     if self.model == 'dense':
         return self.__dense()
     elif self.model == 'dense_tied':
         return self.__dense_tied()
     elif self.model == 'densebin':
         return self.__dense_with_constraints()
     elif self.model == 'conv_simple':
         return self.__conv_simple()
     elif self.model == 'conv_vae':
         return self.__conv_vae()
     elif self.model == 'conv_simple_test':
         return self.__conv_simple_test()
     elif self.model =='conv_simple_tune':
         tuner = kt.RandomSearch(self.__conv_simple_tune, objective='val_loss', max_trials=30)
         return tuner
Beispiel #5
0
def get_tuner(cfg_hypertune, model_builder, outdir, recreate, strategy):
    import keras_tuner as kt

    if cfg_hypertune["algorithm"] == "random":
        print("Keras Tuner: Using RandomSearch")
        cfg_rand = cfg_hypertune["random"]
        return kt.RandomSearch(
            model_builder,
            objective=cfg_rand["objective"],
            max_trials=cfg_rand["max_trials"],
            project_name=outdir,
            overwrite=recreate,
        )
    elif cfg_hypertune["algorithm"] == "bayesian":
        print("Keras Tuner: Using BayesianOptimization")
        cfg_bayes = cfg_hypertune["bayesian"]
        return kt.BayesianOptimization(
            model_builder,
            objective=cfg_bayes["objective"],
            max_trials=cfg_bayes["max_trials"],
            num_initial_points=cfg_bayes["num_initial_points"],
            project_name=outdir,
            overwrite=recreate,
        )
    elif cfg_hypertune["algorithm"] == "hyperband":
        print("Keras Tuner: Using Hyperband")
        cfg_hb = cfg_hypertune["hyperband"]
        return kt.Hyperband(
            model_builder,
            objective=cfg_hb["objective"],
            max_epochs=cfg_hb["max_epochs"],
            factor=cfg_hb["factor"],
            hyperband_iterations=cfg_hb["iterations"],
            directory=outdir + "/tb",
            project_name="mlpf",
            overwrite=recreate,
            executions_per_trial=cfg_hb["executions_per_trial"],
            distribution_strategy=strategy,
        )
Beispiel #6
0
def test_get_best_hyperparameters(tmp_dir):
    hp1 = keras_tuner.HyperParameters()
    hp1.Fixed("a", 1)
    trial1 = keras_tuner.engine.trial.Trial(hyperparameters=hp1)
    trial1.status = "COMPLETED"
    trial1.score = 10

    hp2 = keras_tuner.HyperParameters()
    hp2.Fixed("a", 2)
    trial2 = keras_tuner.engine.trial.Trial(hyperparameters=hp2)
    trial2.status = "COMPLETED"
    trial2.score = 9

    tuner = keras_tuner.RandomSearch(
        objective="val_accuracy",
        hypermodel=build_model,
        max_trials=2,
        directory=tmp_dir,
    )

    tuner.oracle.trials = {trial1.trial_id: trial1, trial2.trial_id: trial2}

    hps = tuner.get_best_hyperparameters()[0]
    assert hps["a"] == 1
def tunertrain(d, n_input, epochs=50):
    def build_model(hp, n_input):
        model = keras.Sequential()
        model.add(
            keras.layers.Dense(hp.Choice('units', [8, 16, 24, 32, 64]),
                               hp.Choice('activation', ['linear', 'relu']),
                               input_shape=(1, n_input)))
        model.add(keras.layers.Dense(1, activation='linear'))
        model.compile(loss='mse')
        return model

    p_build = partial(build_model, n_input=n_input)
    tuner = kt.RandomSearch(p_build,
                            objective='val_loss',
                            overwrite=True,
                            max_trials=100)

    tuner.search(d['x_train'],
                 d['y_train'],
                 epochs=epochs,
                 validation_data=(d['x_val'], d['y_val']))
    print(tuner.results_summary())
    best_model = tuner.get_best_models()[0]
    return best_model
def main():
    # Quick Introduction
    # Write a function that creates and returns a Keras model. Use the
    # hp argument to define the hyperparameters during model creation.
    def build_model(hp):
        model = keras.Sequential()
        model.add(
            keras.layers.Dense(hp.Choice("units", [8, 16, 32]),
                               activation="relu"))
        model.add(keras.layers.Dense(1, activation="relu"))
        model.compile(loss="mse")
        return model

    # Initialize a tuner (here, RandomSearch). Use objective to specify
    # the objective to select the best models, and use max_trials to
    # specify the number of different models to try.
    tuner = kt.RandomSearch(build_model, objective="val_loss", max_trials=5)

    # Start the search and get the best model.
    tuner.search(x_train, y_train, epochs=5, validation_data=(x_val, y_val))
    best_model = tuner.get_best_models()[0]

    # Exit the program.
    exit(0)
Beispiel #9
0
        return best_epoch_loss


"""
Now, we can initialize the tuner. Here, we use `Objective("my_metric", "min")`
as our metric to be minimized. The objective name should be consistent
with the one you use as the key in the
`logs` passed to the 'on_epoch_end()' method of the callbacks.
The callbacks need to use
this value in the `logs` to find the best epoch to checkpoint the model.

"""
tuner = kt.RandomSearch(
    objective=kt.Objective("my_metric", "min"),
    max_trials=2,
    hypermodel=MyHyperModel(),
    directory="results",
    project_name="custom_training",
    overwrite=True,
)
"""
We start the search by passing the arguments we defined in the signature of
`MyHyperModel.fit()` to `tuner.search()`.
"""

tuner.search(x=x_train, y=y_train, validation_data=(x_val, y_val))
"""
Finally, we can retrieve the results.
"""

best_hps = tuner.get_best_hyperparameters()[0]
print(best_hps.values)
        decay_rate=hp.Choice("decay_rate", [0.5, 0.75, 0.95]),
        staircase=True,
    )

    model.compile(
        loss="sparse_categorical_crossentropy",
        optimizer=tf.keras.optimizers.RMSprop(learning_rate=lr_schedule),
        metrics=["sparse_categorical_accuracy"],
    )
    return model


tuner = keras_tuner.RandomSearch(
    build_model,
    objective="val_sparse_categorical_accuracy",
    max_trials=5,
    executions_per_trial=3,
    directory="test_dir",
)

tuner.search_space_summary()

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
train_dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))
test_dataset = tf.data.Dataset.from_tensor_slices((x_test, y_test))
BUFFER_SIZE = 10000
BATCH_SIZE = 64


def scale(image, label):
    image = tf.cast(image, tf.float32)
Beispiel #11
0
In the following example, we only tune the `learning_rate` hyperparameter, and
changed its type and value ranges.
"""

hp = kt.HyperParameters()

# This will override the `learning_rate` parameter with your
# own selection of choices
hp.Float("learning_rate", min_value=1e-4, max_value=1e-2, sampling="log")

tuner = kt.RandomSearch(
    hypermodel=build_model,
    hyperparameters=hp,
    # Prevents unlisted parameters from being tuned
    tune_new_entries=False,
    objective="val_accuracy",
    max_trials=3,
    overwrite=True,
    directory="my_dir",
    project_name="search_a_few",
)

# Generate random data
x_train = np.random.rand(100, 28, 28, 1)
y_train = np.random.randint(0, 10, (100, 1))
x_val = np.random.rand(20, 28, 28, 1)
y_val = np.random.randint(0, 10, (20, 1))

# Run the search
tuner.search(x_train, y_train, epochs=1, validation_data=(x_val, y_val))
"""
Beispiel #12
0
# Do the same for MLP model.
hp.values["model_type"] = "mlp"
model = build_model(hp)
model(x_train[:100])
model.summary()
"""
Initialize the `RandomSearch` tuner with 10 trials and using validation
accuracy as the metric for selecting models.
"""

tuner = kt.RandomSearch(
    build_model,
    max_trials=10,
    # Do not resume the previous search in the same directory.
    overwrite=True,
    objective="val_accuracy",
    # Set a directory to store the intermediate results.
    directory="/tmp/tb",
)
"""
Start the search by calling `tuner.search(...)`. To use TensorBoard, we need
to pass a `keras.callbacks.TensorBoard` instance to the callbacks.
"""

tuner.search(
    x_train,
    y_train,
    validation_split=0.2,
    epochs=2,
    # Use the TensorBoard callback.