def _get_n_jobs(self, logger, **kwargs): try: if config.fixed_num_folds == 0: n_jobs = max(1, int(int(max_threads() / min(config.num_folds, kwargs['max_workers'])))) else: n_jobs = max(1, int(int(max_threads() / min(config.fixed_num_folds, config.num_folds, kwargs['max_workers'])))) except KeyError: loggerinfo(logger, "Prophet No Max Worker in kwargs. Set n_jobs to 1") n_jobs = 1 return n_jobs
def _get_n_jobs(logger, **kwargs): if 'n_jobs_prophet' in config.recipe_dict: return min(config.recipe_dict['n_jobs_prophet'], max_threads()) try: if config.fixed_num_folds <= 0: n_jobs = max(1, int(int(max_threads() / min(config.num_folds, kwargs['max_workers'])))) else: n_jobs = max(1, int( int(max_threads() / min(config.fixed_num_folds, config.num_folds, kwargs['max_workers'])))) except KeyError: loggerinfo(logger, "Prophet No Max Worker in kwargs. Set n_jobs to 1") n_jobs = 1 return n_jobs if n_jobs > 1 else 1
def update_n_jobs(self, X_shape, **kwargs): if 'IS_SCORER' in kwargs: # i.e. don't listen to fit-time n_jobs for predict if scoring parent_max_workers = max( 1, kwargs.get('parent_max_workers', kwargs.get('max_workers', 1)) or 1) new_optimal = optimal_nthreads_model( X_shape, max_workers=parent_max_workers) if config.hard_asserts: # then in testing situation, maybe multiple experiments, so drop down self.params['n_jobs'] = self.params_base['n_jobs'] = min( self.params_base['n_jobs'], new_optimal) else: self.params['n_jobs'] = self.params_base[ 'n_jobs'] = new_optimal else: # still doing experiment, shared resources assumed, re-use self.params_base['n_jobs'] from fit pass if config.max_fit_cores_override > 0: self.params_base['n_jobs'] = config.max_fit_cores_override # self.params_base['n_jobs'] can change for predict n_jobs = min(max_threads(), get_num_threads(), self.params_base['n_jobs']) return n_jobs
def set_default_params(self, accuracy=None, time_tolerance=None, interpretability=None, **kwargs): self.params = dict( random_state=kwargs.get("random_state", 24), eta=0.1, max_depth=12, min_child_weight=2.0, reg_lambda=1.0, colsample_bytree=0.8, subsample=1.0, mu=0.1, reg_alpha=0, n_jobs=self.params_base.get('n_jobs', max_threads()), )
def mutate_params(self, accuracy=10, **kwargs): if accuracy > 8: eta = [0.5, 0.1, 0.05, 0.01] max_depth = list(range(4, 21)) min_child_weight = [0.1, 0.5, 1.0, 2.0, 4.0, 8.0, 16.0, 32.0] reg_lambda = [0.0, 0.1, 1.0, 2.0, 5.0, 8.0, 10.0, 20.0] reg_alpha = [0.0, 0.1, 1.0, 5.0, 10.0] colsample_bytree = [0.1 * ii for ii in range(1, 11)] subsample = [0.5, 0.8, 0.9, 1.0] mu = [0.05 * ii for ii in range(1, 14)] elif accuracy >= 5: eta = [0.5, 0.1, 0.05] max_depth = list(range(4, 21, 2)) min_child_weight = [0.1, 0.5, 1.0, 2.0, 4.0, 8.0, 16.0, 32.0] reg_lambda = [0.0, 0.1, 1.0, 2.0, 5.0, 8.0, 10.0, 20.0] reg_alpha = [0.0, 0.1, 1.0] colsample_bytree = [0.1 * ii for ii in range(2, 11, 2)] subsample = [1.0] mu = [0.05 * ii for ii in range(1, 14)] else: eta = [0.1] max_depth = list(range(4, 21, 2)) min_child_weight = [0.1, 0.5, 1.0, 2.0, 4.0, 8.0, 16.0, 32.0] reg_lambda = [0.0, 0.1, 1.0, 5.0, 10.0] reg_alpha = [0.0] colsample_bytree = [0.1 * ii for ii in range(2, 11, 2)] subsample = [1.0] mu = [0.05 * ii for ii in range(1, 14)] self.params["eta"] = np.random.choice(eta) self.params["max_depth"] = np.random.choice(max_depth) self.params["min_child_weight"] = np.random.choice(min_child_weight) self.params["reg_lambda"] = np.random.choice(reg_lambda) self.params["reg_alpha"] = np.random.choice(reg_alpha) self.params["colsample_bytree"] = np.random.choice(colsample_bytree) self.params["subsample"] = np.random.choice(subsample) self.params["mu"] = np.random.choice(mu) self.params["n_jobs"] = self.params_base.get('n_jobs', max_threads())