def test_unflatten_list_dict_multi_level_nested(self): result = unflatten_list_dict({"a/0/c/d": 1, "a/1/c": 2, "a/2": 3, "e": 4}) assert result == {"a": [{"c": {"d": 1}}, {"c": 2}, 3], "e": 4} result = unflatten_list_dict( {"0/a/0/b": 1, "0/a/1": 2, "1/0": 3, "1/1": 4, "1/2/c": 5, "2": 6} ) assert result == [{"a": [{"b": 1}, 2]}, [3, 4, {"c": 5}], 6]
def test_unflatten_list_dict_output_type(self): in_ = OrderedDict({"a/0": 0, "a/1": 1, "c/d": 2, "e": 3}) out = unflatten_list_dict(in_) assert type(out) is OrderedDict in_ = OrderedDict({"0/a": 0, "1/b": 1, "2/c": 2, "3/d": 3}) out = unflatten_list_dict(in_) assert type(out) is list
def suggest(self, trial_id: str) -> Optional[Dict]: if not self._space: raise RuntimeError( UNDEFINED_SEARCH_SPACE.format(cls=self.__class__.__name__, space="space")) if not self._metric or not self._mode: raise RuntimeError( UNDEFINED_METRIC_MODE.format(cls=self.__class__.__name__, metric=self._metric, mode=self._mode)) max_concurrent = (self._max_concurrent if self._max_concurrent > 0 else float("inf")) if len(self.running) >= max_concurrent: return None if self._points_to_evaluate: config = self._points_to_evaluate.pop(0) else: # This parameter is not used in hpbandster implementation. config, _ = self.bohber.get_config(None) self.trial_to_params[trial_id] = copy.deepcopy(config) self.running.add(trial_id) return unflatten_list_dict(config)
def test_unflatten_list_dict_one_level_nested(self): result = unflatten_list_dict({"a/0": 0, "a/1": 1, "c/d": 2, "e": 3}) assert result == {"a": [0, 1], "c": {"d": 2}, "e": 3} result = unflatten_list_dict({"0/a": 0, "1/b": 1, "2/c": 2, "3": 3}) assert result == [{"a": 0}, {"b": 1}, {"c": 2}, 3]