def objective( cfg, root_experiment_path, trial, reproduce_iter=None, num_fixed_params=0 ): if reproduce_iter is not None: trial_num = f"reproduction{reproduce_iter}" else: trial_num = str(trial.number) experiment_path = os.path.join(root_experiment_path, trial_num) config_path = os.path.join(experiment_path, "configs") if os.path.isdir(experiment_path): shutil.rmtree(experiment_path) ( framework, adapter, datasets, validator, saver, logger, configerer, num_classes, ) = get_adapter_datasets_etc( cfg, experiment_path, cfg.validator, cfg.target_domains, trial, config_path, num_fixed_params, ) dataloader_creator = main_utils.get_dataloader_creator( cfg.batch_size, cfg.num_workers, ) stat_getter = main_utils.get_stat_getter(num_classes, cfg.pretrain_on_src) configerer.save(config_path) main_utils.save_argparse(cfg, config_path) main_utils.save_this_file(__file__, config_path) val_data_hook = None if cfg.save_features: val_data_hook = get_val_data_hook(os.path.join(experiment_path, "features")) adapter = framework( adapter, validator=validator, stat_getter=stat_getter, saver=saver, logger=logger, val_data_hook=val_data_hook, ) meta_validator = ForwardOnlyValidator() best_score, best_epoch = meta_validator.run( adapter, datasets=datasets, dataloader_creator=dataloader_creator, max_epochs=cfg.max_epochs, patience=cfg.patience, validation_interval=cfg.validation_interval, check_initial_score=True, ) if best_score is None: return float("nan") scores_csv_filename = main_utils.get_scores_csv_filename( root_experiment_path, reproduce_iter ) print("***best score***", best_score) accuracies = main_utils.get_accuracies_of_best_model( adapter, datasets, saver, dataloader_creator, num_classes ) main_utils.write_scores_to_csv( scores_csv_filename, best_score, accuracies, trial_num, ) return best_score
def save(self, folder): super().save(folder) main_utils.save_this_file(__file__, folder)
def save(self, folder): main_utils.save_this_file(__file__, folder)