def test_hyperparams_to_flat(): dict_values = {'hp': 1, 'stepa': {'hp': 2, 'stepb': {'hp': 3}}} r = HyperparameterSamples(**dict_values) r = r.to_flat() expected_dict_values = {'hp': 1, 'stepa__hp': 2, 'stepa__stepb__hp': 3} assert r == HyperparameterSamples(**expected_dict_values)
def _set_hyperparams(self, hyperparams: HyperparameterSamples) -> BaseStep: """ Set hyperparams for base step, and the wrapped sklearn_predictor. :param hyperparams: :return: self """ # flatten the step hyperparams, and set the wrapped sklearn predictor params hyperparams = HyperparameterSamples(hyperparams) BaseStep._set_hyperparams(self, hyperparams.to_flat()) self.wrapped_sklearn_predictor.set_params(**hyperparams.with_separator( RecursiveDict.DEFAULT_SEPARATOR).to_flat_as_dict_primitive()) return self.hyperparams.to_flat()
def get_hyperparams(self, flat=True) -> HyperparameterSamples: hyperparams = dict() for k, v in self.steps.items(): hparams = v.get_hyperparams() # TODO: oop diamond problem? if hasattr(v, "hyperparams"): hparams.update(v.hyperparams) if len(hparams) > 0: hyperparams[k] = hparams hyperparams = HyperparameterSamples(hyperparams) if flat: hyperparams = hyperparams.to_flat() else: hyperparams = hyperparams.to_nested_dict() return hyperparams