示例#1
0
    def get_best_trials(self, num_trials=1):
        """Returns the trials with the best objective values found so far.

    Arguments:
      num_trials: positive int, number of trials to return.

    Returns:
      List of KerasTuner Trials.
    """
        if len(self.objective) > 1:
            raise ValueError(
                'Getting the best trials for multi-objective optimization '
                'is not supported. ')

        maximizing = cloud_tuner_utils.format_goal(
            self.objective[0].direction) == 'MAXIMIZE'

        # List all trials associated with the same study
        trial_list = self.service.list_trials()

        optimizer_trials = [t for t in trial_list if t['state'] == 'COMPLETED']

        if not optimizer_trials:
            return []

        sorted_trials = sorted(
            optimizer_trials,
            key=lambda t: t['finalMeasurement']['metrics'][0]['value'],
            reverse=maximizing)
        best_optimizer_trials = sorted_trials[:num_trials]

        best_trials = []
        # Convert Optimizer trials to KerasTuner Trial instance
        for optimizer_trial in best_optimizer_trials:
            final_measurement = optimizer_trial['finalMeasurement']
            keras_tuner_trial = trial_module.Trial(
                hyperparameters=cloud_tuner_utils.
                convert_optimizer_trial_to_hps(self.hyperparameters.copy(),
                                               optimizer_trial),
                trial_id=cloud_tuner_utils.get_trial_id(optimizer_trial),
                status=trial_module.TrialStatus.COMPLETED)
            # If trial had ended before having intermediate metric reporting, set
            # epoch = 1.
            keras_tuner_trial.best_step = final_measurement.get('stepCount', 1)
            keras_tuner_trial.score = final_measurement['metrics'][0]['value']
            best_trials.append(keras_tuner_trial)
        return best_trials
示例#2
0
    def create_trial(self, tuner_id):
        """Create a new `Trial` to be run by the `Tuner`.

        A `Trial` corresponds to a unique set of hyperparameters to be run
        by `Tuner.run_trial`.

        Args:
            tuner_id: A string, the ID that identifies the `Tuner` requesting a
                `Trial`. `Tuners` that should run the same trial (for instance,
                when running a multi-worker model) should have the same ID.

        Returns:
            A `Trial` object containing a set of hyperparameter values to run
            in a `Tuner`.
        """
        # Allow for multi-worker DistributionStrategy within a Trial.
        if tuner_id in self.ongoing_trials:
            return self.ongoing_trials[tuner_id]

        trial_id = trial_lib.generate_trial_id()

        if self.max_trials and len(self.trials) >= self.max_trials:
            status = trial_lib.TrialStatus.STOPPED
            values = None
        else:
            response = self.populate_space(trial_id)
            status = response["status"]
            values = response["values"] if "values" in response else None

        hyperparameters = self.hyperparameters.copy()
        hyperparameters.values = values or {}
        trial = trial_lib.Trial(hyperparameters=hyperparameters,
                                trial_id=trial_id,
                                status=status)

        if status == trial_lib.TrialStatus.RUNNING:
            self.ongoing_trials[tuner_id] = trial
            self.trials[trial_id] = trial
            self.start_order.append(trial_id)
            self._save_trial(trial)
            self.save()

        return trial
示例#3
0
def test_step_respected(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Float("c", 0, 10, step=3)
    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", direction="max"),
        max_trials=20,
        hyperparameters=hps,
        num_initial_points=2,
    )
    oracle._set_project_dir(tmp_dir, "untitled")

    # Populate initial trials.
    for i in range(10):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["c"] = 3.0
        trial.score = i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    trial = oracle.create_trial("tuner0")
    # Check that oracle respects the `step` param.
    assert trial.hyperparameters.get("c") in {0, 3, 6, 9}
示例#4
0
    def test_end_trial_success(self):
        self._tuner_with_hparams()
        self.mock_client.complete_trial.return_value = {
            "name": "1",
            "state": "COMPLETED",
            "parameters": [{
                "parameter": "learning_rate",
                "floatValue": 0.01
            }],
            "finalMeasurement": {
                "stepCount": "3",
                "metrics": [{
                    "metric": "val_acc",
                    "value": 0.7
                }],
            },
            "trial_infeasible": False,
            "infeasible_reason": None,
        }
        mock_save_trial = mock.Mock()
        self.tuner.oracle._save_trial = mock_save_trial
        self.tuner.oracle.ongoing_trials = {"tuner_0": self._test_trial}
        expected_trial = trial_module.Trial(
            hyperparameters=self._test_hyperparameters,
            trial_id="1",
            status=trial_module.TrialStatus.COMPLETED,
        )
        expected_trial.best_step = 3
        expected_trial.score = 0.7

        self.tuner.oracle.end_trial(trial_id="1")

        self.mock_client.complete_trial.assert_called_once_with(
            "1", False, None)
        self.assertEqual(repr(mock_save_trial.call_args[0][0].get_state()),
                         repr(expected_trial.get_state()))
示例#5
0
    def create_trial(self, tuner_id: Text) -> trial_module.Trial:
        """Create a new `Trial` to be run by the `Tuner`.

        Args:
            tuner_id: An ID that identifies the `Tuner` requesting a `Trial`.
                `Tuners` that should run the same trial (for instance, when
                running a multi-worker model) should have the same ID. If
                multiple suggestTrialsRequests have the same tuner_id, the
                service will return the identical suggested trial if the trial
                is PENDING, and provide a new trial if the last suggested trial
                was completed.

        Returns:
            A `Trial` object containing a set of hyperparameter values to run
            in a `Tuner`.

        Raises:
            SuggestionInactiveError: Indicates that a suggestion was requested
                from an inactive study.
        """
        # List all trials from the same study and see if any
        # trial.status=STOPPED or if number of trials >= max_limit.
        trial_list = self.service.list_trials()
        # Note that KerasTunerTrialStatus - 'STOPPED' is equivalent to
        # VizierTrialState - 'STOPPING'.
        stopping_trials = [t for t in trial_list if t["state"] == "STOPPING"]
        if (self.max_trials
                and len(trial_list) >= self.max_trials) or stopping_trials:
            trial_id = "n"
            hyperparameters = self.hyperparameters.copy()
            hyperparameters.values = {}
            # This will break the search loop later.
            return trial_module.Trial(
                hyperparameters=hyperparameters,
                trial_id=trial_id,
                status=trial_module.TrialStatus.STOPPED,
            )

        # Get suggestions
        suggestions = self.service.get_suggestions(tuner_id)

        if not suggestions:
            return trial_module.Trial(hyperparameters={},
                                      status=trial_module.TrialStatus.STOPPED)

        # Fetches the suggested trial.
        # Vizier Trial instance
        vizier_trial = suggestions[0]
        trial_id = utils.get_trial_id(vizier_trial)

        # KerasTuner Trial instance
        keras_tuner_trial = trial_module.Trial(
            hyperparameters=utils.convert_vizier_trial_to_hps(
                self.hyperparameters.copy(), vizier_trial),
            trial_id=trial_id,
            status=trial_module.TrialStatus.RUNNING,
        )

        tf.get_logger().info(
            "Hyperparameters requested by tuner ({}): {} ".format(
                tuner_id, keras_tuner_trial.hyperparameters.values))

        self._start_time = time.time()
        self.trials[trial_id] = keras_tuner_trial
        self.ongoing_trials[tuner_id] = keras_tuner_trial
        self._save_trial(keras_tuner_trial)
        self.save()
        return keras_tuner_trial
示例#6
0
    def test_get_best_trials_multi_tuners(self):
        # Instantiate tuner_1
        tuner_1 = self._tuner(
            objective=oracle_module.Objective("val_acc", "max"),
            hyperparameters=self._test_hyperparameters,
            study_config=None,
        )
        tuner_1.tuner_id = "tuner_1"
        # tuner_1 has a completed trial
        trial_1 = trial_module.Trial(
            hyperparameters=self._test_hyperparameters,
            trial_id="1",
            status=trial_module.TrialStatus.COMPLETED,
        )
        tuner_1.oracle.trials = {"1": trial_1}

        # Instantiate tuner_2
        tuner_2 = self._tuner(
            objective=oracle_module.Objective("val_acc", "max"),
            hyperparameters=self._test_hyperparameters,
            study_config=None,
        )
        tuner_2.tuner_id = "tuner_2"
        # tuner_2 has a completed trial
        trial_2 = trial_module.Trial(
            hyperparameters=self._test_hyperparameters,
            trial_id="2",
            status=trial_module.TrialStatus.COMPLETED,
        )
        tuner_2.oracle.trials = {"2": trial_2}

        self.mock_client.list_trials.return_value = [
            {
                "name": "1",
                "state": "COMPLETED",
                "parameters": [{
                    "parameter": "learning_rate",
                    "floatValue": 0.01
                }],
                "finalMeasurement": {
                    "stepCount": "3",
                    "metrics": [{
                        "metric": "val_acc",
                        "value": 0.7
                    }],
                },
                "trial_infeasible": False,
                "infeasible_reason": None,
            },
            {
                "name": "2",
                "state": "COMPLETED",
                "parameters": [{
                    "parameter": "learning_rate",
                    "floatValue": 0.001
                }],
                "finalMeasurement": {
                    "stepCount": "3",
                    "metrics": [{
                        "metric": "val_acc",
                        "value": 0.9
                    }],
                },
                "trial_infeasible": False,
                "infeasible_reason": None,
            },
        ]

        # For any tuner worker who tries to get the best trials, all the top N
        # sorted trials will be returned.
        best_trials_1 = tuner_1.oracle.get_best_trials(num_trials=2)
        self.mock_client.list_trials.assert_called_once()

        best_trials_2 = tuner_2.oracle.get_best_trials(num_trials=2)

        self.assertEqual(len(best_trials_1), 2)
        self.assertEqual(best_trials_1[0].trial_id, best_trials_2[0].trial_id)
        self.assertEqual(best_trials_1[1].trial_id, best_trials_2[1].trial_id)
        self.assertEqual(best_trials_1[0].score, 0.9)
        self.assertEqual(best_trials_1[0].best_step, 3)