def __init__(self, time_attr="time_total_s", reward_attr=None, metric="episode_reward_mean", mode="max", perturbation_interval=60.0, hyperparam_mutations={}, quantile_fraction=0.25, resample_probability=0.25, custom_explore_fn=None, log_config=True): for value in hyperparam_mutations.values(): if not (isinstance(value, list) or callable(value)): raise TypeError("`hyperparam_mutation` values must be either " "a List or callable.") if not hyperparam_mutations and not custom_explore_fn: raise TuneError( "You must specify at least one of `hyperparam_mutations` or " "`custom_explore_fn` to use PBT.") if quantile_fraction > 0.5 or quantile_fraction < 0: raise TuneError( "You must set `quantile_fraction` to a value between 0 and" "0.5. Current value: '{}'".format(quantile_fraction)) assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!" if reward_attr is not None: mode = "max" metric = reward_attr logger.warning( "`reward_attr` is deprecated and will be removed in a future " "version of Tune. " "Setting `metric={}` and `mode=max`.".format(reward_attr)) FIFOScheduler.__init__(self) self._metric = metric if mode == "max": self._metric_op = 1. elif mode == "min": self._metric_op = -1. self._time_attr = time_attr self._perturbation_interval = perturbation_interval self._hyperparam_mutations = hyperparam_mutations self._quantile_fraction = quantile_fraction self._resample_probability = resample_probability self._trial_state = {} self._custom_explore_fn = custom_explore_fn self._log_config = log_config # Metrics self._num_checkpoints = 0 self._num_perturbations = 0
def __init__( self, time_attr="training_iteration", reward_attr=None, metric="episode_reward_mean", mode="max", max_t=100, grace_period=1, reduction_factor=4, brackets=1, ): assert max_t > 0, "Max (time_attr) not valid!" assert max_t >= grace_period, "grace_period must be <= max_t!" assert grace_period > 0, "grace_period must be positive!" assert reduction_factor > 1, "Reduction Factor not valid!" assert brackets > 0, "brackets must be positive!" assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!" if reward_attr is not None: mode = "max" metric = reward_attr logger.warning( "`reward_attr` is deprecated and will be removed in a future " "version of Tune. " "Setting `metric={}` and `mode=max`.".format(reward_attr) ) FIFOScheduler.__init__(self) self._reduction_factor = reduction_factor self._max_t = max_t # Tracks state for new trial add self._brackets = [ _Bracket(grace_period, max_t, reduction_factor, s) for s in range(brackets) ] self._counter = 0 # for self._num_stopped = 0 self._metric = metric if mode == "max": self._metric_op = 1.0 elif mode == "min": self._metric_op = -1.0 self._time_attr = time_attr self._num_paused = 0
def __init__(self, time_attr="time_total_s", reward_attr=None, metric="episode_reward_mean", mode="max", perturbation_interval=60.0, hyperparam_mutations={}, resample_probability=0.25, custom_explore_fn=None, log_config=True): if not hyperparam_mutations and not custom_explore_fn: raise TuneError( "You must specify at least one of `hyperparam_mutations` or " "`custom_explore_fn` to use PBT.") assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!" if reward_attr is not None: mode = "max" metric = reward_attr logger.warning( "`reward_attr` is deprecated and will be removed in a future " "version of Tune. " "Setting `metric={}` and `mode=max`.".format(reward_attr)) FIFOScheduler.__init__(self) self._metric = metric if mode == "max": self._metric_op = 1. elif mode == "min": self._metric_op = -1. self._time_attr = time_attr self._perturbation_interval = perturbation_interval self._hyperparam_mutations = hyperparam_mutations self._resample_probability = resample_probability self._trial_state = {} self._custom_explore_fn = custom_explore_fn self._log_config = log_config # Metrics self._num_checkpoints = 0 self._num_perturbations = 0
def __init__(self, time_attr="time_total_s", reward_attr="episode_reward_mean", perturbation_interval=60.0, hyperparam_mutations={}, resample_probability=0.25, custom_explore_fn=None): if not hyperparam_mutations and not custom_explore_fn: raise TuneError( "You must specify at least one of `hyperparam_mutations` or " "`custom_explore_fn` to use PBT.") FIFOScheduler.__init__(self) self._reward_attr = reward_attr self._time_attr = time_attr self._perturbation_interval = perturbation_interval self._hyperparam_mutations = hyperparam_mutations self._resample_probability = resample_probability self._trial_state = {} self._custom_explore_fn = custom_explore_fn # Metrics self._num_checkpoints = 0 self._num_perturbations = 0
def __init__( self, time_attr: str = "time_total_s", metric: Optional[str] = None, mode: Optional[str] = None, perturbation_interval: float = 60.0, burn_in_period: float = 0.0, hyperparam_mutations: Dict = None, quantile_fraction: float = 0.25, resample_probability: float = 0.25, custom_explore_fn: Optional[Callable] = None, log_config: bool = True, require_attrs: bool = True, synch: bool = False, ): hyperparam_mutations = hyperparam_mutations or {} for value in hyperparam_mutations.values(): if not (isinstance(value, (list, dict, Domain)) or callable(value)): raise TypeError("`hyperparam_mutation` values must be either " "a List, Dict, a tune search space object, or " "a callable.") if isinstance(value, Function): raise ValueError("arbitrary tune.sample_from objects are not " "supported for `hyperparam_mutation` values." "You must use other built in primitives like" "tune.uniform, tune.loguniform, etc.") if not hyperparam_mutations and not custom_explore_fn: raise TuneError( "You must specify at least one of `hyperparam_mutations` " "or `custom_explore_fn` to use PBT.") if quantile_fraction > 0.5 or quantile_fraction < 0: raise ValueError( "You must set `quantile_fraction` to a value between 0 and" "0.5. Current value: '{}'".format(quantile_fraction)) if perturbation_interval <= 0: raise ValueError( "perturbation_interval must be a positive number greater " "than 0. Current value: '{}'".format(perturbation_interval)) if mode: assert mode in ["min", "max"], "`mode` must be 'min' or 'max'." FIFOScheduler.__init__(self) self._metric = metric self._mode = mode self._metric_op = None if self._mode == "max": self._metric_op = 1.0 elif self._mode == "min": self._metric_op = -1.0 self._time_attr = time_attr self._perturbation_interval = perturbation_interval self._burn_in_period = burn_in_period self._hyperparam_mutations = hyperparam_mutations self._quantile_fraction = quantile_fraction self._resample_probability = resample_probability self._trial_state = {} self._custom_explore_fn = custom_explore_fn self._log_config = log_config self._require_attrs = require_attrs self._synch = synch self._next_perturbation_sync = max( self._perturbation_interval, self._burn_in_period, ) # Metrics self._num_checkpoints = 0 self._num_perturbations = 0
def __init__(self, time_attr="time_total_s", reward_attr=None, metric="episode_reward_mean", mode="max", perturbation_interval=60.0, hyperparam_mutations={}, quantile_fraction=0.25, resample_probability=0.25, custom_explore_fn=None, log_config=True, require_attrs=True, synch=False): for value in hyperparam_mutations.values(): if not (isinstance(value, (list, dict, sample_from)) or callable(value)): raise TypeError("`hyperparam_mutation` values must be either " "a List, Dict, a tune search space object, or " "callable.") if type(value) is sample_from: raise ValueError("arbitrary tune.sample_from objects are not " "supported for `hyperparam_mutation` values." "You must use other built in primitives like" "tune.uniform, tune.loguniform, etc.") if not hyperparam_mutations and not custom_explore_fn: raise TuneError( "You must specify at least one of `hyperparam_mutations` or " "`custom_explore_fn` to use PBT.") if quantile_fraction > 0.5 or quantile_fraction < 0: raise ValueError( "You must set `quantile_fraction` to a value between 0 and" "0.5. Current value: '{}'".format(quantile_fraction)) if perturbation_interval <= 0: raise ValueError( "perturbation_interval must be a positive number greater " "than 0. Current value: '{}'".format(perturbation_interval)) assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!" if reward_attr is not None: mode = "max" metric = reward_attr logger.warning( "`reward_attr` is deprecated and will be removed in a future " "version of Tune. " "Setting `metric={}` and `mode=max`.".format(reward_attr)) FIFOScheduler.__init__(self) self._metric = metric if mode == "max": self._metric_op = 1. elif mode == "min": self._metric_op = -1. self._time_attr = time_attr self._perturbation_interval = perturbation_interval self._hyperparam_mutations = hyperparam_mutations self._quantile_fraction = quantile_fraction self._resample_probability = resample_probability self._trial_state = {} self._custom_explore_fn = custom_explore_fn self._log_config = log_config self._require_attrs = require_attrs self._synch = synch self._next_perturbation_sync = self._perturbation_interval # Metrics self._num_checkpoints = 0 self._num_perturbations = 0
def __init__( self, total_atoms, resource_policy="UNIFORM", scaling_dict=SCALING_MAP["LINEAR"], deadline=np.inf, allocation_grid=None, use_pausing=True, grace_period=1, reduction_factor=4, max_t=100, time_attr="training_iteration", metric="episode_reward_mean", mode="max", _no_speculation=False, _ignore_overhead=False, _no_job_limit=False, _assume_linear=False, _fixed_exploration=False, _exploration_ratio=1.0, ): # Arguments for ablative study self._no_speculation = _no_speculation # stored self._ignore_overhead = _ignore_overhead # stored self._no_job_limit = _no_job_limit # stored self._assume_linear = _assume_linear self._fixed_exploration = _fixed_exploration self._exploration_ratio = _exploration_ratio FIFOScheduler.__init__(self) self.use_pausing = use_pausing self._num_paused = 0 self._num_stopped = 0 self._reduction_factor = reduction_factor self._max_t = max_t self._metric = metric self._time_attr = time_attr if mode == "max": self._metric_op = 1.0 elif mode == "min": self._metric_op = -1.0 if self._no_speculation: self._brackets = [ ASHAv2Bracket( min_t=grace_period, max_t=self._max_t, reduction_factor=self._reduction_factor, s=0, ) ] else: self._brackets = [ _DeadlineBracket( self._reduction_factor, max_t=self._max_t, min_t=grace_period, use_pausing=self.use_pausing, ) ] if self._fixed_exploration: logger.warning( f"FIXED EXPLORATION TIME OF {self._exploration_ratio}" ) if self._fixed_exploration: logger.warning( f"FIXED EXPLORATION TIME OF {self._exploration_ratio}" ) self.grace_period = grace_period self.start_time = time.time() self._deadline = deadline self._deadline_time = deadline + time.time() self._longest_duration = -1 check(self._deadline_time > self.start_time) self.total_atoms = total_atoms self.allocator = DynamicAllocator( self.total_atoms, policy=resource_policy, allocation_grid=allocation_grid, recharge_period=5, metric=self._metric, metric_op=self._metric_op, ) if self._assume_linear: logger.warning("ABLATION: ASSUMING LINEAR SCALING.") scaling_dict = SCALING_MAP["LINEAR"] self.scaling_fn = scaling_function_from_dict(scaling_dict) self._startup_times = set() #: Time it takes for a single iteration self._single_atom_iteration_times = []
def __init__(self, time_attr="time_total_s", reward_attr=None, metric="episode_reward_mean", mode="max", perturbation_interval=60.0, hyperparam_mutations={}, quantile_fraction=0.25, resample_probability=0.25, custom_explore_fn=None, log_config=True): if not hyperparam_mutations and not custom_explore_fn: raise TuneError( "You must specify at least one of `hyperparam_mutations` or " "`custom_explore_fn` to use PBT.") if quantile_fraction > 0.5 or quantile_fraction < 0: raise TuneError( "You must set `quantile_fraction` to a value between 0 and" "0.5. Current value: '{}'".format(quantile_fraction)) assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!" if reward_attr is not None: mode = "max" metric = reward_attr logger.warning( "`reward_attr` is deprecated and will be removed in a future " "version of Tune. " "Setting `metric={}` and `mode=max`.".format(reward_attr)) FIFOScheduler.__init__(self) self._metric = metric if mode == "max": self._metric_op = 1. elif mode == "min": self._metric_op = -1. self._time_attr = time_attr self._perturbation_interval = perturbation_interval self._hyperparam_mutations = hyperparam_mutations self._quantile_fraction = quantile_fraction self._resample_probability = resample_probability self._trial_state = {} self._custom_explore_fn = custom_explore_fn self._log_config = log_config self.meta = { 'timesteps': [], 'lengthscales': [], 'closest': [], 'meandist': [] } self.latest = 0 # when we last did bayesopt self.data = pd.DataFrame() self.bounds = {} for key, distribution in self._hyperparam_mutations.items(): self.bounds[key] = [ np.min([distribution() for _ in range(999999)]), np.max([distribution() for _ in range(999999)]) ] # Metrics self._num_checkpoints = 0 self._num_perturbations = 0