Esempio n. 1
0
    def testCloudTunerHyperparameters(self):
        """Test case to configure Tuner with HyperParameters object."""
        study_id = "{}_hyperparameters".format(_STUDY_ID_BASE)
        self._study_id = study_id

        tuner = CloudTuner(
            _build_model,
            project_id=_PROJECT_ID,
            region=_REGION,
            objective="acc",
            hyperparameters=_HPS,
            max_trials=5,
            study_id=study_id,
            directory=os.path.join(self.get_temp_dir(), study_id),
        )

        # "Search space summary" comes first, but the order of
        # "learning_rate (Float)" and "num_layers (Int)" is not deterministic,
        # hence they are wrapped as look-ahead assertions in the regex.
        self._assert_output(
            tuner.search_space_summary,
            r".*Search space summary(?=.*learning_rate \(Float\))"
            r"(?=.*num_layers \(Int\).*)",
        )

        tuner.search(
            x=self._x,
            y=self._y,
            epochs=2,
            steps_per_epoch=20,
            validation_steps=10,
            validation_data=(self._val_x, self._val_y),
        )

        self._assert_results_summary(tuner.results_summary)
Esempio n. 2
0
def _dist_search_fn(temp_dir, study_id, tuner_id):
    """Multi-process safe tuner instantiation and tuner.search()."""

    # Jitter instantiation so as to avoid contention on study
    # creation and dataset download.
    time.sleep(int(tuner_id[5:]))  # tuner_id is formatted as 'tuner%d'

    # Dataset must be loaded independently in sub-process.
    (x, y), (val_x, val_y) = _load_data(temp_dir)

    tuner = CloudTuner(
        _build_model,
        project_id=_PROJECT_ID,
        region=_REGION,
        objective="acc",
        hyperparameters=_HPS,
        max_trials=5,
        study_id=study_id,
        directory=os.path.join(temp_dir, study_id, tuner_id),
    )
    tuner.tuner_id = tuner_id

    tuner.search(
        x=x,
        y=y,
        epochs=2,
        steps_per_epoch=20,
        validation_steps=10,
        validation_data=(val_x, val_y),
    )
    return tuner
Esempio n. 3
0
    def testCloudTunerStudyConfig(self):
        """Test case to configure Tuner with StudyConfig object."""
        # Configure the search space. Specification:
        # https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#StudyConfig  # pylint: disable=line-too-long
        study_config = {
            "metrics": [{"goal": "MAXIMIZE", "metric": "acc"}],
            "parameters": [
                {
                    "discrete_value_spec": {"values": [0.0001, 0.001, 0.01]},
                    "parameter": "learning_rate",
                    "type": "DISCRETE",
                },
                {
                    "integer_value_spec": {"max_value": 10, "min_value": 2},
                    "parameter": "num_layers",
                    "type": "INTEGER",
                },
                {
                    "discrete_value_spec": {"values": [32, 64, 96, 128]},
                    "parameter": "units",
                    "type": "DISCRETE",
                },
            ],
            "algorithm": "ALGORITHM_UNSPECIFIED",
            "automatedStoppingConfig": {
                "decayCurveStoppingConfig": {"useElapsedTime": True}
            },
        }

        study_id = "{}_study_config".format(_STUDY_ID_BASE)
        self._study_id = study_id

        tuner = CloudTuner(
            _build_model,
            project_id=_PROJECT_ID,
            region=_REGION,
            study_config=study_config,
            study_id=study_id,
            max_trials=5,
            directory=os.path.join(self.get_temp_dir(), study_id),
        )

        self._assert_output(
            tuner.search_space_summary,
            r".*Search space summary(?=.*learning_rate \(Choice\))"
            r"(?=.*num_layers \(Int\))(?=.*units \(Int\))",
        )

        tuner.search(
            x=self._x,
            y=self._y,
            epochs=2,
            steps_per_epoch=20,
            validation_steps=10,
            validation_data=(self._val_x, self._val_y),
        )

        self._assert_results_summary(tuner.results_summary)
Esempio n. 4
0
def tuner_fn(fn_args: FnArgs) -> TunerFnResult:
    """Build the tuner using the CloudTuner API.

  Args:
    fn_args: Holds args as name/value pairs. See
      https://www.tensorflow.org/tfx/api_docs/python/tfx/components/trainer/fn_args_utils/FnArgs.
      - transform_graph_path: optional transform graph produced by TFT.
      - custom_config: An optional dictionary passed to the component. In this
        example, it contains the dict ai_platform_tuning_args.
      - working_dir: working dir for tuning.
      - train_files: List of file paths containing training tf.Example data.
      - eval_files: List of file paths containing eval tf.Example data.
      - train_steps: number of train steps.
      - eval_steps: number of eval steps.

  Returns:
    A namedtuple contains the following:
      - tuner: A BaseTuner that will be used for tuning.
      - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                    model , e.g., the training and validation dataset. Required
                    args depend on the above tuner's implementation.
  """
    transform_graph = tft.TFTransformOutput(fn_args.transform_graph_path)

    # CloudTuner is a subclass of kerastuner.Tuner which inherits from
    # BaseTuner.
    tuner = CloudTuner(
        _build_keras_model,
        # The project/region configuations for Cloud Vizier service and its trial
        # executions. Note: this example uses the same configuration as the
        # CAIP Training service for distributed tuning flock management to view
        # all of the pipeline's jobs and resources in the same project. It can
        # also be configured separately.
        project_id=fn_args.custom_config['ai_platform_tuning_args']['project'],
        region=fn_args.custom_config['ai_platform_tuning_args']['region'],
        objective=kerastuner.Objective('val_sparse_categorical_accuracy',
                                       'max'),
        hyperparameters=_get_hyperparameters(),
        max_trials=8,  # Optional.
        directory=fn_args.working_dir)

    train_dataset = _input_fn(fn_args.train_files,
                              fn_args.data_accessor,
                              transform_graph,
                              batch_size=_TRAIN_BATCH_SIZE)

    eval_dataset = _input_fn(fn_args.eval_files,
                             fn_args.data_accessor,
                             transform_graph,
                             batch_size=_EVAL_BATCH_SIZE)

    return TunerFnResult(tuner=tuner,
                         fit_kwargs={
                             'x': train_dataset,
                             'validation_data': eval_dataset,
                             'steps_per_epoch': fn_args.train_steps,
                             'validation_steps': fn_args.eval_steps
                         })
Esempio n. 5
0
    def testCloudTunerDatasets(self):
        """Test case to configure Tuner with tf.data.Dataset as input data."""
        train_dataset = (
            tf.data.Dataset.from_tensor_slices((self._x, self._y))
            .batch(128)
            .cache()
            .prefetch(1000)
        )
        eval_dataset = (
            tf.data.Dataset.from_tensor_slices((self._val_x, self._val_y))
            .batch(128)
            .cache()
            .prefetch(1000)
        )

        study_id = "{}_dataset".format(_STUDY_ID_BASE)
        self._study_id = study_id

        tuner = CloudTuner(
            _build_model,
            project_id=_PROJECT_ID,
            region=_REGION,
            objective="acc",
            hyperparameters=_HPS,
            study_id=study_id,
            max_trials=5,
            directory=os.path.join(self.get_temp_dir(), study_id),
        )

        self._assert_output(
            tuner.search_space_summary,
            r".*Search space summary(?=.*learning_rate \(Float\))"
            r"(?=.*num_layers \(Int\).*)",
        )

        tuner.search(
            x=train_dataset,
            epochs=2,
            steps_per_epoch=20,
            validation_steps=10,
            validation_data=eval_dataset,
        )

        self._assert_results_summary(tuner.results_summary)
Esempio n. 6
0
def tuner_fn(fn_args: TrainerFnArgs) -> TunerFnResult:
    """Build the tuner using CloudTuner (KerasTuner instance).
  Args:
    fn_args: Holds args as name/value pairs.
      - working_dir: working dir for tuning.
      - train_files: List of file paths containing training tf.Example data.
      - eval_files: List of file paths containing eval tf.Example data.
      - train_steps: number of train steps.
      - eval_steps: number of eval steps.
      - schema_path: optional schema of the input data.
      - transform_graph_path: optional transform graph produced by TFT.
  Returns:
    A namedtuple contains the following:
      - tuner: A BaseTuner that will be used for tuning.
      - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                    model , e.g., the training and validation dataset. Required
                    args depend on the above tuner's implementation.
  """
    transform_graph = tft.TFTransformOutput(fn_args.transform_graph_path)

    # Construct a build_keras_model_fn that just takes hyperparams from get_hyperparameters as input.
    build_keras_model_fn = functools.partial(
        _build_keras_model, tf_transform_output=transform_graph)

    # CloudTuner is a subclass of kerastuner.Tuner which inherits from BaseTuner.
    tuner = CloudTuner(build_keras_model_fn,
                       project_id=Config.PROJECT_ID,
                       region=Config.GCP_REGION,
                       max_trials=50,
                       hyperparameters=_get_hyperparameters(),
                       objective=kerastuner.Objective(
                           'val_sparse_categorical_accuracy', 'max'),
                       directory=fn_args.working_dir)

    train_dataset = _input_fn(fn_args.train_files,
                              fn_args.data_accessor,
                              transform_graph,
                              batch_size=TRAIN_BATCH_SIZE)

    eval_dataset = _input_fn(fn_args.eval_files,
                             fn_args.data_accessor,
                             transform_graph,
                             batch_size=EVAL_BATCH_SIZE)

    return TunerFnResult(tuner=tuner,
                         fit_kwargs={
                             'x': train_dataset,
                             'validation_data': eval_dataset,
                             'steps_per_epoch': fn_args.train_steps,
                             'validation_steps': fn_args.eval_steps
                         })
Esempio n. 7
0
def tuner_fn(fn_args: TrainerFnArgs) -> TunerFnResult:
  """Build the tuner using CloudTuner (KerasTuner instance).
  Args:
    fn_args: Holds args used to train and tune the model as name/value pairs. See 
      https://www.tensorflow.org/tfx/api_docs/python/tfx/components/trainer/fn_args_utils/FnArgs.
  Returns:
    A namedtuple contains the following:
      - tuner: A BaseTuner that will be used for tuning.
      - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                    model , e.g., the training and validation dataset. Required
                    args depend on the above tuner's implementation.
  """
  transform_graph = tft.TFTransformOutput(fn_args.transform_graph_path)
  
  # Construct a build_keras_model_fn that just takes hyperparams from get_hyperparameters as input.
  build_keras_model_fn = functools.partial(
      _build_keras_model, tf_transform_output=transform_graph)  

  # CloudTuner is a subclass of kerastuner.Tuner which inherits from BaseTuner.   
  tuner = CloudTuner(
      build_keras_model_fn,
      project_id=fn_args.custom_config['ai_platform_training_args']['project'],
      region=fn_args.custom_config['ai_platform_training_args']['region'],      
      max_trials=50,
      hyperparameters=_get_hyperparameters(),
      objective=kerastuner.Objective('val_sparse_categorical_accuracy', 'max'),
      directory=fn_args.working_dir)
  
  train_dataset = _input_fn(
      fn_args.train_files,
      fn_args.data_accessor,
      transform_graph,
      batch_size=TRAIN_BATCH_SIZE)

  eval_dataset = _input_fn(
      fn_args.eval_files,
      fn_args.data_accessor,
      transform_graph,
      batch_size=EVAL_BATCH_SIZE)

  return TunerFnResult(
      tuner=tuner,
      fit_kwargs={
          'x': train_dataset,
          'validation_data': eval_dataset,
          'steps_per_epoch': fn_args.train_steps,
          'validation_steps': fn_args.eval_steps
      })
Esempio n. 8
0
def tuner_fn(fn_args: FnArgs) -> TunerFnResult:
  """Build the tuner using the CloudTuner API.

  Args:
    fn_args: Holds args as name/value pairs.
      - working_dir: working dir for tuning.
      - train_files: List of file paths containing training tf.Example data.
      - eval_files: List of file paths containing eval tf.Example data.
      - train_steps: number of train steps.
      - eval_steps: number of eval steps.
      - schema_path: optional schema of the input data.
      - transform_graph_path: optional transform graph produced by TFT.

  Returns:
    A namedtuple contains the following:
      - tuner: A BaseTuner that will be used for tuning.
      - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                    model , e.g., the training and validation dataset. Required
                    args depend on the above tuner's implementation.
  """
  # CloudTuner is a subclass of kerastuner.Tuner which inherits from
  # BaseTuner.
  tuner = CloudTuner(
      _build_keras_model,
      project_id=_PROJECT_ID,
      region=_REGION,
      objective=kerastuner.Objective('val_sparse_categorical_accuracy', 'max'),
      hyperparameters=_get_hyperparameters(),
      max_trials=8,  # Optional.
      directory=fn_args.working_dir
      )

  transform_graph = tft.TFTransformOutput(fn_args.transform_graph_path)
  train_dataset = _input_fn(fn_args.train_files, transform_graph)
  eval_dataset = _input_fn(fn_args.eval_files, transform_graph)
  return TunerFnResult(
      tuner=tuner,
      fit_kwargs={
          'x': train_dataset,
          'validation_data': eval_dataset,
          'steps_per_epoch': fn_args.train_steps,
          'validation_steps': fn_args.eval_steps
      })
Esempio n. 9
0
def tuner_fn(fn_args: FnArgs) -> TunerFnResult:
    """Build the tuner using CloudTuner (KerasTuner instance).
    Args:
      fn_args: Holds args used to train and tune the model as name/value pairs. See
        https://www.tensorflow.org/tfx/api_docs/python/tfx/components/trainer/fn_args_utils/FnArgs.
    Returns:
      A namedtuple contains the following:
        - tuner: A BaseTuner that will be used for tuning.
        - fit_kwargs: Args to pass to tuner's run_trial function for fitting the
                      model , e.g., the training and validation dataset. Required
                      args depend on the above tuner's implementation.
    """
    custom_config_dict = _get_custom_config_dict(fn_args)

    max_trials = custom_config_dict.get('max_trials', MAX_TRIALS)

    transform_graph = tft.TFTransformOutput(fn_args.transform_graph_path)

    # Construct a build_keras_model_fn that just takes hyperparams from get_hyperparameters as input.
    build_keras_model_fn = functools.partial(
        _build_keras_model, tf_transform_output=transform_graph)

    # CloudTuner is a subclass of kerastuner.Tuner which inherits from BaseTuner.
    #is_local_run = "custom_config" not in fn_args.custom_config
    is_local_run = custom_config_dict.get("is_local_run", True)
    absl.logging.info('is_local_run : %s' % is_local_run)
    if is_local_run:
        tuner = kerastuner.RandomSearch(build_keras_model_fn,
                                        max_trials=max_trials,
                                        hyperparameters=_get_hyperparameters(),
                                        allow_new_entries=False,
                                        objective=kerastuner.Objective(
                                            'val_binary_accuracy', 'max'),
                                        directory=fn_args.working_dir,
                                        project_name='titanic_tuning')
    else:
        tuner = CloudTuner(
            build_keras_model_fn,
            project_id=fn_args.custom_config['ai_platform_training_args']
            ['project'],
            region=fn_args.custom_config['ai_platform_training_args']
            ['region'],
            max_trials=max_trials,
            hyperparameters=_get_hyperparameters(),
            objective=kerastuner.Objective('val_binary_accuracy', 'max'),
            # objective=kerastuner.Objective('auc', 'min'),
            directory=fn_args.working_dir)

    train_dataset = _input_fn(fn_args.train_files,
                              fn_args.data_accessor,
                              transform_graph,
                              batch_size=TRAIN_BATCH_SIZE)

    eval_dataset = _input_fn(fn_args.eval_files,
                             fn_args.data_accessor,
                             transform_graph,
                             batch_size=EVAL_BATCH_SIZE)

    return TunerFnResult(tuner=tuner,
                         fit_kwargs={
                             'x': train_dataset,
                             'validation_data': eval_dataset,
                             'steps_per_epoch': fn_args.train_steps,
                             'validation_steps': fn_args.eval_steps
                         })