def run_dataset_experiment( searcher_max_steps: int, batches_per_step: int, secrets: Dict[str, str], tf2: bool, slots_per_trial: int = 1, source_trial_id: Optional[str] = None, ) -> List[gql.trials]: config = conf.load_config( conf.fixtures_path("estimator_dataset/const.yaml")) config.setdefault("searcher", {}) config["searcher"]["max_steps"] = searcher_max_steps config["batches_per_step"] = batches_per_step config = conf.set_tf2_image(config) if tf2 else conf.set_tf1_image(config) if source_trial_id is not None: config["searcher"]["source_trial_id"] = source_trial_id config.setdefault("resources", {}) config["resources"]["slots_per_trial"] = slots_per_trial if cluster.num_agents() > 1: config["checkpoint_storage"] = exp.s3_checkpoint_config(secrets) experiment_id = exp.run_basic_test_with_temp_config( config, conf.fixtures_path("estimator_dataset"), 1) return exp.experiment_trials(experiment_id)
def test_gc_checkpoints_s3(secrets: Dict[str, str]) -> None: config = exp.s3_checkpoint_config(secrets) run_gc_checkpoints_test(config)