def test_convert_optimizer_trial_to_hps(self):
     hps = hp_module.HyperParameters()
     hps.Choice('learning_rate', [1e-4, 1e-3, 1e-2])
     optimizer_trial = {
         'name':
         'trial_name',
         'state':
         'ACTIVE',
         'parameters': [{
             'parameter': 'learning_rate',
             'floatValue': 0.0001
         }, {
             'parameter': 'num_layers',
             'intValue': '2'
         }, {
             'parameter': 'units_0',
             'floatValue': 96
         }, {
             'parameter': 'units_1',
             'floatValue': 352
         }]
     }
     trial_hps = cloud_tuner_utils.convert_optimizer_trial_to_hps(
         hps, optimizer_trial)
     self.assertEqual(trial_hps.values, EXPECTE_TRIAL_HPS)
  def get_best_trials(self, num_trials = 1):
    """Returns the trials with the best objective values found so far.

    Arguments:
      num_trials: positive int, number of trials to return.

    Returns:
      List of KerasTuner Trials.
    """
    if len(self.objective) > 1:
      raise ValueError(
          'Getting the best trials for multi-objective optimization '
          'is not supported. ')

    maximizing = cloud_tuner_utils.format_goal(
        self.objective[0].direction) == 'MAXIMIZE'

    # List all trials associated with the same study
    trial_list = self.service.list_trials()

    optimizer_trials = [t for t in trial_list if t['state'] == 'COMPLETED']

    if not optimizer_trials:
      return []

    sorted_trials = sorted(
        optimizer_trials,
        key=lambda t: t['finalMeasurement']['metrics'][0]['value'],
        reverse=maximizing)
    best_optimizer_trials = sorted_trials[:num_trials]

    best_trials = []
    # Convert Optimizer trials to KerasTuner Trial instance
    for optimizer_trial in best_optimizer_trials:
      final_measurement = optimizer_trial['finalMeasurement']
      kerastuner_trial = trial_module.Trial(
          hyperparameters=cloud_tuner_utils.convert_optimizer_trial_to_hps(
              self.hyperparameters.copy(), optimizer_trial),
          trial_id=cloud_tuner_utils.get_trial_id(optimizer_trial),
          status=trial_module.TrialStatus.COMPLETED)
      # If trial had ended before having intermediate metric reporting, set
      # epoch = 1.
      kerastuner_trial.best_step = final_measurement.get('stepCount', 1)
      kerastuner_trial.score = final_measurement['metrics'][0]['value']
      best_trials.append(kerastuner_trial)
    return best_trials
    def create_trial(self, tuner_id):
        """Create a new `Trial` to be run by the `Tuner`.

    Arguments:
      tuner_id: An ID that identifies the `Tuner` requesting a `Trial`. `Tuners`
        that should run the same trial (for instance, when running a
        multi-worker model) should have the same ID. If multiple
        suggestTrialsRequests have the same tuner_id, the service will return
        the identical suggested trial if the trial is PENDING, and provide a new
        trial if the last suggest trial was completed.

    Returns:
      A `Trial` object containing a set of hyperparameter values to run
      in a `Tuner`.
    Raises:
      SuggestionInactiveError: Indicates that a suggestion was requested from an
        inactive study.
    """
        # List all trials from the same study and see if any trial.status=STOPPED or
        # if number of trials >= max_limit.
        trial_list = self.service.list_trials()
        # Note that KerasTunerTrialStatus - 'STOPPED' is equivalent to
        # OptimizerTrialState - 'STOPPING'.
        stopping_trials = [t for t in trial_list if t['state'] == 'STOPPING']
        if (self.max_trials
                and len(trial_list) >= self.max_trials) or stopping_trials:
            trial_id = 'n'
            hyperparameters = self.hyperparameters.copy()
            hyperparameters.values = None
            # This will break the search loop later.
            return trial_module.Trial(hyperparameters=hyperparameters,
                                      trial_id=trial_id,
                                      status=trial_module.TrialStatus.STOPPED)

        # Get suggestions
        suggestions = self.service.get_suggestions(tuner_id)

        if 'trials' not in suggestions:
            return trial_module.Trial(hyperparameters={},
                                      status=trial_module.TrialStatus.STOPPED)

        # Fetches the suggested trial.
        # Optimizer Trial instance
        optimizer_trial = suggestions['trials'][0]
        trial_id = cloud_tuner_utils.get_trial_id(optimizer_trial)

        # KerasTuner Trial instance
        kerastuner_trial = trial_module.Trial(
            hyperparameters=cloud_tuner_utils.convert_optimizer_trial_to_hps(
                self.hyperparameters.copy(), optimizer_trial),
            trial_id=trial_id,
            status=trial_module.TrialStatus.RUNNING)

        tf.get_logger().info(
            'Hyperparameters requested by tuner ({}): {} '.format(
                tuner_id, kerastuner_trial.hyperparameters.values))

        self._start_time = time.time()
        self.trials[trial_id] = kerastuner_trial
        self.ongoing_trials[tuner_id] = kerastuner_trial
        self._save_trial(kerastuner_trial)
        self.save()
        return kerastuner_trial