def get_reduced_configs(self): """Reduce the experiments to restart.""" iteration_config = self.experiment_group.iteration_config if iteration_config is None: logger.error( 'Experiment group `%s` attempt to update iteration, but has no iteration', self.experiment_group.id, extra={'stack': True}) return search_manager = self.experiment_group.search_manager # Get the number of experiments to keep n_configs_to_keep = search_manager.get_n_config_to_keep_for_iteration( iteration=iteration_config.iteration, bracket_iteration=iteration_config.bracket_iteration) # Get the last group's experiments metrics experiments_metrics = self.experiment_group.iteration_config.experiments_metrics # Order the experiments reverse = Optimization.maximize(self.experiment_group.hptuning_config. hyperband.metric.optimization) experiments_metrics = sorted(experiments_metrics, key=lambda x: x[1], reverse=reverse) # Keep n experiments return [xp[0] for xp in experiments_metrics[:n_configs_to_keep]]
def get_ordered_experiments_by_metric(self, experiment_ids: List[int], metric: str, optimization: str): query = self.get_annotated_experiments_with_metric( metric=metric, experiment_ids=experiment_ids) metric_order_by = '{}{}'.format( '-' if Optimization.maximize(optimization) else '', metric) return query.order_by(metric_order_by)
def should_stop_early(self) -> bool: filters = [] for early_stopping_metric in self.early_stopping: comparison = ( 'gte' if Optimization.maximize(early_stopping_metric.optimization) else 'lte') metric_filter = 'last_metric__{}__{}'.format( early_stopping_metric.metric, comparison) filters.append({metric_filter: early_stopping_metric.value}) if filters: return self.experiments.filter(functools.reduce(OR, [Q(**f) for f in filters])).exists() return False
def parse_y(self, metrics): if not metrics: return metrics y_values = [] for value in metrics: if Optimization.maximize(self.hptuning_config.bo.metric.optimization): y_values.append(float(value)) else: y_values.append(-float(value)) return np.array(y_values)