def get_best_trials(self, num_trials: int = 1) -> List[trial_module.Trial]: """Returns the trials with the best objective values found so far. Arguments: num_trials: positive int, number of trials to return. Returns: List of KerasTuner Trials. """ if len(self.objective) > 1: raise ValueError( "Getting the best trials for multi-objective optimization " "is not supported. " ) maximizing = ( utils.format_goal(self.objective[0].direction) == "MAXIMIZE") # List all trials associated with the same study trial_list = self.service.list_trials() optimizer_trials = [t for t in trial_list if t["state"] == "COMPLETED"] if not optimizer_trials: return [] sorted_trials = sorted( optimizer_trials, key=lambda t: t["finalMeasurement"]["metrics"][0]["value"], reverse=maximizing, ) best_optimizer_trials = sorted_trials[:num_trials] best_trials = [] # Convert Optimizer trials to KerasTuner Trial instance for optimizer_trial in best_optimizer_trials: final_measurement = optimizer_trial["finalMeasurement"] kerastuner_trial = trial_module.Trial( hyperparameters=utils.convert_optimizer_trial_to_hps( self.hyperparameters.copy(), optimizer_trial ), trial_id=utils.get_trial_id(optimizer_trial), status=trial_module.TrialStatus.COMPLETED, ) # If trial had ended before having intermediate metric reporting, # set epoch = 1. kerastuner_trial.best_step = final_measurement.get("stepCount", 1) kerastuner_trial.score = final_measurement["metrics"][0]["value"] best_trials.append(kerastuner_trial) return best_trials
def create_trial(self, tuner_id: Text) -> trial_module.Trial: """Create a new `Trial` to be run by the `Tuner`. Args: tuner_id: An ID that identifies the `Tuner` requesting a `Trial`. `Tuners` that should run the same trial (for instance, when running a multi-worker model) should have the same ID. If multiple suggestTrialsRequests have the same tuner_id, the service will return the identical suggested trial if the trial is PENDING, and provide a new trial if the last suggested trial was completed. Returns: A `Trial` object containing a set of hyperparameter values to run in a `Tuner`. Raises: SuggestionInactiveError: Indicates that a suggestion was requested from an inactive study. """ # List all trials from the same study and see if any # trial.status=STOPPED or if number of trials >= max_limit. trial_list = self.service.list_trials() # Note that KerasTunerTrialStatus - 'STOPPED' is equivalent to # OptimizerTrialState - 'STOPPING'. stopping_trials = [t for t in trial_list if t["state"] == "STOPPING"] if (self.max_trials and len(trial_list) >= self.max_trials) or stopping_trials: trial_id = "n" hyperparameters = self.hyperparameters.copy() hyperparameters.values = {} # This will break the search loop later. return trial_module.Trial( hyperparameters=hyperparameters, trial_id=trial_id, status=trial_module.TrialStatus.STOPPED, ) # Get suggestions suggestions = self.service.get_suggestions(tuner_id) if "trials" not in suggestions: return trial_module.Trial( hyperparameters={}, status=trial_module.TrialStatus.STOPPED ) # Fetches the suggested trial. # Optimizer Trial instance optimizer_trial = suggestions["trials"][0] trial_id = utils.get_trial_id(optimizer_trial) # KerasTuner Trial instance kerastuner_trial = trial_module.Trial( hyperparameters=utils.convert_optimizer_trial_to_hps( self.hyperparameters.copy(), optimizer_trial ), trial_id=trial_id, status=trial_module.TrialStatus.RUNNING, ) tf.get_logger().info( "Hyperparameters requested by tuner ({}): {} ".format( tuner_id, kerastuner_trial.hyperparameters.values ) ) self._start_time = time.time() self.trials[trial_id] = kerastuner_trial self.ongoing_trials[tuner_id] = kerastuner_trial self._save_trial(kerastuner_trial) self.save() return kerastuner_trial