def __init__(self, experiment_name: str, search_config: AgingEvoConfig, training_config: TrainingConfig, bound_config: BoundConfig): self.log = logging.getLogger( name=f"AgingEvoSearch [{experiment_name}]") self.config = search_config self.trainer = ModelTrainer(training_config) self.root_dir = Path(search_config.checkpoint_dir) self.root_dir.mkdir(parents=True, exist_ok=True) self.experiment_name = experiment_name if training_config.pruning and not training_config.pruning.structured: self.log.warning( "For unstructured pruning, we can only meaningfully use the model " "size resource metric.") bound_config.peak_mem_bound = None bound_config.mac_bound = None self.pruning = training_config.pruning # We establish an order of objective in the feature vector, all functions must ensure the order is the same self.constraint_bounds = [ bound_config.error_bound, bound_config.peak_mem_bound, bound_config.model_size_bound, bound_config.mac_bound ] self.history: List[EvaluatedPoint] = [] self.population: List[EvaluatedPoint] = [] self.population_size = search_config.population_size self.initial_population_size = search_config.initial_population_size or self.population_size self.rounds = search_config.rounds self.sample_size = search_config.sample_size num_gpus = len(tf.config.experimental.list_physical_devices("GPU")) self.max_parallel_evaluations = search_config.max_parallel_evaluations or num_gpus
def __init__(self, experiment_name: str, search_config: BayesOptConfig, training_config: TrainingConfig, bound_config: BoundConfig): assert search_config.starting_points >= 1 self.log = logging.getLogger(name=f"BayesOpt [{experiment_name}]") self.config = search_config self.trainer = ModelTrainer(training_config) self.root_dir = Path(search_config.checkpoint_dir) self.root_dir.mkdir(parents=True, exist_ok=True) self.experiment_name = experiment_name if training_config.pruning and not training_config.pruning.structured: self.log.warning("For unstructured pruning, we can only use the model size resource metric.") bound_config.peak_mem_bound = None bound_config.mac_bound = None # We establish an order of objective in the feature vector, all functions must ensure the order is the same self.constraint_bounds = [bound_config.error_bound, bound_config.peak_mem_bound, bound_config.model_size_bound, bound_config.mac_bound]