Esempio n. 1
0
    def testExtraParams(self):
        kwargs = {"metric": "metric_foo", "mode": "min", "extra_param": "test"}

        scheduler = "async_hyperband"
        tune.create_scheduler(scheduler, **kwargs)

        searcher_ax = "ax"
        tune.create_searcher(searcher_ax, **kwargs)
Esempio n. 2
0
    def testCreateScheduler(self):
        kwargs = {"metric": "metric_foo", "mode": "min"}

        scheduler = "async_hyperband"
        shim_scheduler = tune.create_scheduler(scheduler, **kwargs)
        real_scheduler = AsyncHyperBandScheduler(**kwargs)
        assert type(shim_scheduler) is type(real_scheduler)
Esempio n. 3
0
    def _create_scheduler(self, scheduler_config, parameters):
        if not scheduler_config:
            return None

        if scheduler_config.get("type") == "pbt":
            scheduler_config.update(
                {"hyperparam_mutations": self.search_space})

        return tune.create_scheduler(scheduler_config.get("type"),
                                     **scheduler_config)
 def _set_scheduler(scheduler, scheduler_params, metric, mode):
     if scheduler:
         if not isinstance(scheduler, str):
             raise ValueError(f"Scheduler should be of type str. "
                              f"Got {scheduler.__class__.__name__}")
         if scheduler_params is None:
             scheduler_params = dict()
         scheduler_params.update(dict(
             time_attr="training_iteration",
             metric=metric,
             mode=mode,
         ))
         scheduler = tune.create_scheduler(scheduler, **scheduler_params)
     return scheduler
 def _set_scheduler(scheduler, scheduler_params, metric, mode):
     if scheduler:
         if not isinstance(scheduler, str):
             raise ValueError(f"Scheduler should be of type str. "
                              f"Got {scheduler.__class__.__name__}")
         params = scheduler_params.copy() if scheduler_params else dict()
         if metric and "metric" not in params:
             params["metric"] = metric
         if mode and "mode" not in params:
             params["mode"] = mode
         if "time_attr" not in params:
             params["time_attr"] = "training_iteration"
         scheduler = tune.create_scheduler(scheduler, **params)
     return scheduler
Esempio n. 6
0
    def _create_scheduler(self, scheduler_config, parameters):
        if not scheduler_config:
            return None

        dynamic_resource_allocation = scheduler_config.pop("dynamic_resource_allocation", False)

        if scheduler_config.get("type") == "pbt":
            scheduler_config.update({"hyperparam_mutations": self.search_space})

        scheduler = tune.create_scheduler(scheduler_config.get("type"), **scheduler_config)

        if dynamic_resource_allocation:
            scheduler = ResourceChangingScheduler(scheduler, ray_resource_allocation_function)
        return scheduler
Esempio n. 7
0
 def __init__(
     self,
     parameters: dict,
     output_feature: str,
     metric: str,
     goal: str,
     split: str,
     search_alg: Optional[Dict] = None,
     cpu_resources_per_trial: int = None,
     gpu_resources_per_trial: int = None,
     kubernetes_namespace: str = None,
     time_budget_s: Union[int, float, datetime.timedelta] = None,
     max_concurrent_trials: Optional[int] = None,
     num_samples: int = 1,
     scheduler: Optional[Dict] = None,
     **kwargs,
 ) -> None:
     if ray is None:
         raise ImportError(
             "ray module is not installed. To install it, try running pip install ray"
         )
     self.output_feature = output_feature
     self.metric = metric
     self.split = split
     if not ray.is_initialized():
         try:
             ray.init("auto", ignore_reinit_error=True)
         except ConnectionError:
             logger.info("Initializing new Ray cluster...")
             ray.init(ignore_reinit_error=True)
     self.search_space, self.decode_ctx = self._get_search_space(parameters)
     self.num_samples = num_samples
     self.goal = goal
     self.search_algorithm = get_search_algorithm(search_alg)
     self.scheduler = None if scheduler is None else tune.create_scheduler(
         scheduler[TYPE], **scheduler)
     self.output_feature = output_feature
     self.metric = metric
     self.split = split
     self.trial_id = 0
     self.cpu_resources_per_trial = cpu_resources_per_trial
     self.gpu_resources_per_trial = gpu_resources_per_trial
     self.kubernetes_namespace = kubernetes_namespace
     self.time_budget_s = time_budget_s
     self.max_concurrent_trials = max_concurrent_trials
     self.sync_config = None
     self.sync_client = None
     # Head node is the node to which all checkpoints are synced if running on a K8s cluster.
     self.head_node_ip = ray.util.get_node_ip_address()
Esempio n. 8
0
    def _create_scheduler(self, scheduler_config):
        if not scheduler_config:
            return None

        return tune.create_scheduler(scheduler_config.get("type"),
                                     **scheduler_config)