def test_sweep_over_unspecified_mandatory_default( self, sweep_runner, launcher_name, overrides # noqa: F811 ): base_overrides = [ "hydra/launcher=" + launcher_name, "group1=file1,file2" ] sweep = sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", config_path="configs/unspecified_mandatory_default.yaml", overrides=base_overrides + overrides, strict=True, ) expected_overrides = [["group1=file1"], ["group1=file2"]] expected_conf = [ OmegaConf.create({"foo": 10}), OmegaConf.create({"foo": 20}) ] with sweep: assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] assert job_ret.overrides == expected_overrides[i] assert job_ret.cfg == expected_conf[i] verify_dir_outputs(job_ret, job_ret.overrides)
def sweep_2_jobs(sweep_runner, overrides): """ Runs a sweep with two jobs """ overrides.append("a=0,1") sweep = sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", config_path="configs/compose.yaml", overrides=overrides, ) base = OmegaConf.create({"foo": 10, "bar": 100, "a": 0}) with sweep: temp_dir = Path(sweep.temp_dir) assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] expected_conf = OmegaConf.merge( base, OmegaConf.from_dotlist(job_ret.overrides) ) assert job_ret.overrides == ["a={}".format(i)] assert job_ret.cfg == expected_conf assert job_ret.hydra_cfg.hydra.job.name == "a_module" verify_dir_outputs(job_ret, job_ret.overrides) path = temp_dir / str(i) assert path.exists(), "'{}' does not exist, dirs: {}".format( path, [x for x in temp_dir.iterdir() if x.is_dir()] )
def test_app_with_config_path_backward_compatibility( hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str, ) -> None: msg = dedent("""\ Using config_path to specify the config name is not supported, specify the config name via config_name. See https://hydra.cc/docs/next/upgrades/0.11_to_1.0/config_path_changes """) with pytest.raises(ValueError, match=re.escape(msg)): task = hydra_task_runner( calling_file=calling_file, calling_module=calling_module, config_path= "conf/config.yaml", # Testing legacy mode, both path and named are in config_path config_name=None, configure_logging=True, ) with task: assert task.job_ret is not None and task.job_ret.cfg == { "optimizer": { "type": "nesterov", "lr": 0.001 } } verify_dir_outputs(task.job_ret)
def sweep_two_config_groups(sweep_runner, overrides): """ Make sure that optimizers=adam,nesterov is interpreted correctly """ overrides.extend(["group1=file1,file2"]) sweep = sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", config_path="configs/compose.yaml", overrides=overrides, ) expected_overrides = [["group1=file1"], ["group1=file2"]] expected_conf = [ OmegaConf.create({ "foo": 10, "bar": 100 }), OmegaConf.create({ "foo": 20, "bar": 100 }), ] with sweep: assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] assert job_ret.overrides == expected_overrides[i] assert job_ret.cfg == expected_conf[i] verify_dir_outputs(job_ret, job_ret.overrides)
def sweep_1_job( hydra_sweep_runner: TSweepRunner, overrides: List[str], task_function: Optional[TaskFunction], ) -> None: """ Runs a sweep with one job """ sweep = hydra_sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=task_function, config_path="configs", config_name="compose.yaml", overrides=overrides, ) with sweep: assert sweep.returns is not None job_ret = sweep.returns[0] assert len(job_ret) == 1 assert job_ret[0].overrides == [] assert job_ret[0].cfg == {"foo": 10, "bar": 100} assert job_ret[0].hydra_cfg.hydra.job.name == "a_module", ( "Unexpected job name: " + job_ret[0].hydra_cfg.hydra.job.name) verify_dir_outputs(sweep.returns[0][0])
def sweep_two_config_groups( hydra_sweep_runner: TSweepRunner, overrides: List[str], task_function: Optional[TaskFunction], temp_dir: Path, ) -> None: """ Make sure that optimizers=adam,nesterov is interpreted correctly """ overrides = copy.deepcopy(overrides) overrides.extend(["group1=file1,file2"]) sweep = hydra_sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=task_function, config_path="configs", config_name="compose", overrides=overrides, temp_dir=temp_dir, ) expected_overrides = [["group1=file1"], ["group1=file2"]] expected_conf = [ OmegaConf.create({"foo": 10, "bar": 100}), OmegaConf.create({"foo": 20, "bar": 100}), ] with sweep: assert sweep.returns is not None assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] assert job_ret.overrides == expected_overrides[i] assert job_ret.cfg == expected_conf[i] verify_dir_outputs(job_ret, job_ret.overrides)
def not_sweeping_hydra_overrides( hydra_sweep_runner: TSweepRunner, overrides: List[str], task_function: Optional[TaskFunction], ) -> None: """ Runs a sweep with two jobs """ overrides.extend(["+a=0,1", "hydra.verbose=true,false"]) sweep = hydra_sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=task_function, config_path="configs", config_name="compose.yaml", overrides=overrides, ) base = OmegaConf.create({"foo": 10, "bar": 100}) with sweep: assert sweep.returns is not None assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] expected_conf = OmegaConf.merge(base, {"a": i}) assert job_ret.overrides == [f"+a={i}"] assert job_ret.cfg == expected_conf verify_dir_outputs(job_ret, job_ret.overrides)
def test_app_with_config_path_backward_compatibility( hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str, ) -> None: msg = ( "\nUsing config_path to specify the config name is deprecated, specify the config name via config_name" "\nSee https://hydra.cc/next/upgrades/0.11_to_1.0/config_path_changes") with pytest.warns(expected_warning=UserWarning, match=re.escape(msg)): task = hydra_task_runner( calling_file=calling_file, calling_module=calling_module, config_path= "conf/config.yaml", # Testing legacy mode, both path and named are in config_path config_name=None, ) with task: assert task.job_ret is not None and task.job_ret.cfg == { "optimizer": { "type": "nesterov", "lr": 0.001 } } verify_dir_outputs(task.job_ret)
def test_sweep_over_unspecified_mandatory_default( self, hydra_sweep_runner: TSweepRunner, launcher_name: str, overrides: List[str], ) -> None: base_overrides = [ "hydra/launcher=" + launcher_name, "group1=file1,file2" ] sweep = hydra_sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=self.task_function, config_path="configs", config_name="unspecified_mandatory_default", overrides=base_overrides + overrides, ) expected_overrides = [["group1=file1"], ["group1=file2"]] expected_conf = [ OmegaConf.create({"foo": 10}), OmegaConf.create({"foo": 20}) ] with sweep: assert sweep.returns is not None assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] assert job_ret.overrides == expected_overrides[i] assert job_ret.cfg == expected_conf[i] verify_dir_outputs(job_ret, job_ret.overrides)
def test_composition_config_example(task_runner: TTaskRunner) -> None: with task_runner( calling_file="examples/tutorial/5_composition/my_app.py", calling_module=None, config_path="conf", config_name="config.yaml", overrides=["schema=school"], ) as task: assert task.job_ret is not None assert task.job_ret.cfg == { "db": {"driver": "mysql", "user": "******", "pass": "******"}, "ui": {"windows": {"create_db": True, "view": True}}, "schema": { "database": "school", "tables": [ { "name": "students", "fields": [{"name": "string"}, {"class": "int"}], }, { "name": "exams", "fields": [ {"profession": "string"}, {"time": "data"}, {"class": "int"}, ], }, ], }, } verify_dir_outputs(task.job_ret, overrides=task.overrides)
def not_sweeping_hydra_overrides(sweep_runner: TSweepRunner, overrides: List[str]) -> None: """ Runs a sweep with two jobs """ overrides.extend(["a=0,1", "hydra.verbose=true,false"]) sweep = sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", config_path="configs/compose.yaml", overrides=overrides, strict=None, ) base = OmegaConf.create({"foo": 10, "bar": 100}) with sweep: assert sweep.returns is not None assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] expected_conf = OmegaConf.merge( base, OmegaConf.from_dotlist(job_ret.overrides)) assert job_ret.overrides == ["a={}".format(i)] assert job_ret.cfg == expected_conf verify_dir_outputs(job_ret, job_ret.overrides)
def test_sweep_2_jobs_2_batches( self, sweep_runner: TSweepRunner, launcher_name: str, overrides: List[str], ) -> None: job_overrides = ["group1=file1,file2", "bar=100,200,300"] hydra_overrides = ["hydra/launcher=" + launcher_name] overrides.extend(job_overrides) overrides.extend(hydra_overrides) sweep = sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=None, config_path="configs", config_name="compose.yaml", overrides=overrides, strict=True, ) expected_overrides = [ ["group1=file1", "bar=100"], ["group1=file1", "bar=200"], ["group1=file1", "bar=300"], ["group1=file2", "bar=100"], ["group1=file2", "bar=200"], ["group1=file2", "bar=300"], ] expected_conf = [ {"foo": 10, "bar": 100}, {"foo": 10, "bar": 200}, {"foo": 10, "bar": 300}, {"foo": 20, "bar": 100}, {"foo": 20, "bar": 200}, {"foo": 20, "bar": 300}, ] dirs: Set[str] = set() with sweep: temp_dir = sweep.temp_dir assert temp_dir is not None multirun_cfg_path = Path(temp_dir) / "multirun.yaml" assert multirun_cfg_path.exists() multirun_cfg = OmegaConf.load(multirun_cfg_path) assert multirun_cfg.hydra.overrides.task == job_overrides assert sweep.returns is not None # expecting 3 batches of 2 assert len(sweep.returns) == 3 for batch in sweep.returns: assert len(batch) == 2 flat = [rt for batch in sweep.returns for rt in batch] assert len(flat) == 6 # with a total of 6 jobs for idx, job_ret in enumerate(flat): assert job_ret.overrides == expected_overrides[idx] assert job_ret.cfg == expected_conf[idx] dirs.add(job_ret.working_dir) verify_dir_outputs(job_ret, job_ret.overrides) assert len(dirs) == 6 # and a total of 6 unique output directories
def test_objects_example(tmpdir, task_runner, args, output_conf): # noqa: F811 with task_runner( calling_file="examples/patterns/objects/my_app.py", calling_module=None, config_path="conf/config.yaml", overrides=[], ) as task: assert task.job_ret.cfg == output_conf verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_sweep_and_override( self, hydra_sweep_runner: TSweepRunner, launcher_name: str, overrides: List[str], tmpdir: Path, ) -> None: """ Tests that we can override things in the configs merged in only during the sweep config construction db.user=someone does not exist db_conf.yaml, and is only appear when we merge in db=mysql or db=postgresql. This presents a tricky situation when operating in strict mode, this tests verifies it's handled correctly. """ base_overrides = [ "hydra/launcher=" + launcher_name, "db=mysql,postgresql", "db.user=someone", ] sweep = hydra_sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=None, config_path="configs", config_name="db_conf.yaml", overrides=base_overrides + overrides, temp_dir=tmpdir, ) expected_overrides = [ ["db=mysql", "db.user=someone"], ["db=postgresql", "db.user=someone"], ] expected_conf = [ { "db": { "driver": "mysql", "password": "******", "user": "******" } }, { "db": { "user": "******", "driver": "postgresql", "password": "******", "timeout": 10, } }, ] with sweep: assert sweep.returns is not None assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] assert job_ret.overrides == expected_overrides[i] assert job_ret.cfg == expected_conf[i] verify_dir_outputs(job_ret, job_ret.overrides)
def test_app_without_config__with_overrides( task_runner, calling_file, calling_module # noqa: F811 ): with task_runner( calling_file=calling_file, calling_module=calling_module, config_path="", overrides=["abc=123", "a.b=1", "a.a=2"], ) as task: assert task.job_ret.cfg == dict(abc=123, a=dict(b=1, a=2)) verify_dir_outputs(task.job_ret, task.overrides)
def test_app_with_config_groups__override_all_configs( task_runner, calling_file, calling_module # noqa: F811 ): with task_runner( calling_file=calling_file, calling_module=calling_module, config_path="conf", overrides=["optimizer=adam", "optimizer.lr=10"], ) as task: assert task.job_ret.cfg == dict(optimizer=dict(type="adam", lr=10, beta=0.01)) verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_app_with_split_config(task_runner, calling_file, calling_module): # noqa: F811 with task_runner( calling_file=calling_file, calling_module=calling_module, config_path="config.yaml", ) as task: assert task.job_ret.cfg == dict( dataset=dict(name="imagenet", path="/datasets/imagenet"), optimizer=dict(lr=0.001, type="nesterov"), ) verify_dir_outputs(task.job_ret)
def test_specializing_config_example(task_runner): # noqa: F811 with task_runner( calling_file="examples/patterns/specializing_config/example.py", calling_module=None, config_path="conf/config.yaml", overrides=["dataset=cifar10"], ) as task: assert task.job_ret.cfg == dict( dataset=dict(name="cifar10", path="/datasets/cifar10"), model=dict(num_layers=5, type="alexnet"), ) verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_app_with_config_file__no_overrides( task_runner, calling_file, calling_module # noqa: F811 ): with task_runner( calling_file=calling_file, calling_module=calling_module, config_path="config.yaml", ) as task: assert task.job_ret.cfg == dict( dataset=dict(name="imagenet", path="/datasets/imagenet") ) verify_dir_outputs(task.job_ret)
def test_objects_example( tmpdir: Path, task_runner: TTaskRunner, args: List[str], output_conf: DictConfig ) -> None: with task_runner( calling_file="examples/patterns/objects/my_app.py", calling_module=None, config_path="conf", config_name="config.yaml", overrides=[], ) as task: assert task.job_ret is not None assert task.job_ret.cfg == output_conf verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_app_with_config_groups__override_all_configs( task_runner: TTaskRunner, calling_file: str, calling_module: str) -> None: with task_runner( calling_file=calling_file, calling_module=calling_module, config_path="conf", config_name=None, overrides=["optimizer=adam", "optimizer.lr=10"], ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( optimizer=dict(type="adam", lr=10, beta=0.01)) verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_app_with_split_config(task_runner: TTaskRunner, calling_file: str, calling_module: str) -> None: with task_runner( calling_file=calling_file, calling_module=calling_module, config_path=None, config_name="config.yaml", ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( dataset=dict(name="imagenet", path="/datasets/imagenet"), optimizer=dict(lr=0.001, type="nesterov"), ) verify_dir_outputs(task.job_ret)
def test_app_without_config__with_overrides(task_runner: TTaskRunner, calling_file: str, calling_module: str) -> None: with task_runner( calling_file=calling_file, calling_module=calling_module, config_path="", config_name=None, overrides=["abc=123", "a.b=1", "a.a=2"], ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( abc=123, a=dict(b=1, a=2)) verify_dir_outputs(task.job_ret, task.overrides)
def test_app_with_config_file__with_overide(task_runner: TTaskRunner, calling_file: str, calling_module: str) -> None: with task_runner( calling_file=calling_file, calling_module=calling_module, config_path=None, config_name="config.yaml", overrides=["dataset.path=/datasets/imagenet2"], ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( dataset=dict(name="imagenet", path="/datasets/imagenet2")) verify_dir_outputs(task.job_ret, task.overrides)
def test_sweep_2_jobs_2_batches( self, sweep_runner: TSweepRunner, launcher_name: str, overrides: List[str] ) -> None: # noqa: F811 overrides.extend( # order sensitive? ["hydra/launcher=" + launcher_name, "group1=file1,file2", "bar=100,200,300"] ) sweep = sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=None, config_path="configs", config_name="compose.yaml", overrides=overrides, strict=True, ) expected_overrides = [ ["group1=file1", "bar=100"], ["group1=file1", "bar=200"], ["group1=file1", "bar=300"], ["group1=file2", "bar=100"], ["group1=file2", "bar=200"], ["group1=file2", "bar=300"], ] expected_conf = [ {"foo": 10, "bar": 100}, {"foo": 10, "bar": 200}, {"foo": 10, "bar": 300}, {"foo": 20, "bar": 100}, {"foo": 20, "bar": 200}, {"foo": 20, "bar": 300}, ] dirs: Set[str] = set() with sweep: assert sweep.returns is not None # expecting 3 batches of 2 assert len(sweep.returns) == 3 for batch in sweep.returns: assert len(batch) == 2 flat = [rt for batch in sweep.returns for rt in batch] assert len(flat) == 6 # with a total of 6 jobs for idx, job_ret in enumerate(flat): assert job_ret.overrides == expected_overrides[idx] assert job_ret.cfg == expected_conf[idx] dirs.add(job_ret.working_dir) verify_dir_outputs(job_ret, job_ret.overrides) assert len(dirs) == 6 # and a total of 6 unique output directories
def sweep_2_jobs( hydra_sweep_runner: TSweepRunner, overrides: List[str], task_function: Optional[TaskFunction], temp_dir: Path, ) -> None: """ Runs a sweep with two jobs """ job_overrides = ["+a=0,1"] overrides.extend(job_overrides) sweep = hydra_sweep_runner( calling_file=None, calling_module="hydra.test_utils.a_module", task_function=task_function, config_path="configs", config_name="compose", overrides=overrides, temp_dir=temp_dir, ) base_cfg = {"foo": 10, "bar": 100, "a": 0} with sweep: assert sweep.temp_dir is not None assert sweep.returns is not None temp_dir = Path(sweep.temp_dir) multirun_cfg_path = temp_dir / "multirun.yaml" assert multirun_cfg_path.exists() multirun_cfg = OmegaConf.load(multirun_cfg_path) assert multirun_cfg.hydra.overrides.task == job_overrides assert len(sweep.returns[0]) == 2 for i in range(2): job_ret = sweep.returns[0][i] expected_conf = OmegaConf.create(base_cfg) expected_conf.a = i assert job_ret.overrides == [f"+a={i}"] assert job_ret.cfg == expected_conf assert job_ret.hydra_cfg.hydra.job.name == "a_module", ( "Unexpected job name: " + job_ret.hydra_cfg.hydra.job.name ) assert job_ret.hydra_cfg.hydra.job.id is not None assert job_ret.hydra_cfg.hydra.job.num is not None verify_dir_outputs(job_ret, job_ret.overrides) path = temp_dir / str(i) lst = [x for x in temp_dir.iterdir() if x.is_dir()] assert path.exists(), f"'{path}' does not exist, dirs: {lst}"
def test_specializing_config_example(hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner) -> None: with hydra_task_runner( calling_file="examples/patterns/specializing_config/example.py", calling_module=None, config_path="conf", config_name="config.yaml", overrides=["dataset=cifar10"], ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( dataset=dict(name="cifar10", path="/datasets/cifar10"), model=dict(num_layers=5, type="alexnet"), ) verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_app_with_config_file__no_overrides( task_runner: TTaskRunner, calling_file: str, calling_module: str # noqa: F811 ) -> None: task = task_runner( calling_file=calling_file, calling_module=calling_module, config_path="config.yaml", ) with task: assert task.job_ret is not None and task.job_ret.cfg == dict( dataset=dict(name="imagenet", path="/datasets/imagenet")) verify_dir_outputs(task.job_ret)
def test_app_without_config__with_append( hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str, ) -> None: with hydra_task_runner( calling_file=calling_file, calling_module=calling_module, config_path="", config_name=None, overrides=["+abc=123", "+a.b=1", "+a.a=2"], ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( abc=123, a=dict(b=1, a=2)) verify_dir_outputs(task.job_ret, task.overrides)
def test_app_with_config_file__with_overide( hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str, ) -> None: with hydra_task_runner( calling_file=calling_file, calling_module=calling_module, config_path=".", config_name="config.yaml", overrides=["dataset.path=/datasets/imagenet2"], configure_logging=True, ) as task: assert task.job_ret is not None and task.job_ret.cfg == dict( dataset=dict(name="imagenet", path="/datasets/imagenet2")) verify_dir_outputs(task.job_ret, task.overrides)