def test_initialize_bad_version_base(hydra_restore_singletons: Any) -> None: assert not GlobalHydra().is_initialized() with raises( TypeError, match="expected string or bytes-like object", ): initialize(version_base=1.1) # type: ignore
def main() -> None: with initialize(config_path="conf"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "main" with initialize(config_path="conf", job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job" abs_config_dir = os.path.abspath("initialization_test_app/conf") with initialize_config_dir(config_dir=abs_config_dir): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "app" with initialize_config_dir(config_dir=abs_config_dir, job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job" # Those tests can only work if the module is installed if len(sys.argv) > 1 and sys.argv[1] == "module_installed": with initialize_config_module(config_module="initialization_test_app.conf"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "app" with initialize_config_module( config_module="initialization_test_app.conf", job_name="test_job" ): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job"
def test_initialize_old_version_base(hydra_restore_singletons: Any) -> None: assert not GlobalHydra().is_initialized() with raises( HydraException, match=f'version_base must be >= "{version.__compat_version__}"', ): initialize(version_base="1.0")
def test_initialize_compat_version_base(hydra_restore_singletons: Any) -> None: assert not GlobalHydra().is_initialized() with raises( UserWarning, match=f"Will assume defaults for version {version.__compat_version__}", ): initialize() assert version.base_at_least(str(version.__compat_version__))
def test_initialize_with_config_path(hydra_restore_singletons: Any) -> None: assert not GlobalHydra().is_initialized() initialize(config_path="../hydra/test_utils/configs") assert GlobalHydra().is_initialized() gh = GlobalHydra.instance() assert gh.hydra is not None config_search_path = gh.hydra.config_loader.get_search_path() assert isinstance(config_search_path, ConfigSearchPathImpl) idx = config_search_path.find_first_match( SearchPathQuery(provider="main", path=None)) assert idx != -1
def test_utils(): """ Test that util functions work. This just runs them. Perhaps slightly harder to check they work how they should. """ os.environ["NEPTUNE_API_TOKEN"] = "not_a_token" hydra.core.global_hydra.GlobalHydra.instance().clear() initialize(config_path="../configs", job_name="test_app") config = compose(config_name="config") extras(config) print_config(config)
def test_jobname_override_initialize_ctx(hydra_restore_singletons: Any, job_name: Optional[str], expected: str) -> None: with initialize(config_path="../examples/jupyter_notebooks/cloud_app/conf", job_name=job_name): ret = compose(return_hydra_config=True) assert ret.hydra.job.name == expected
def test_check_onnx_model_single(capsys): with capsys.disabled(): with initialize(config_path="hydraConf"): cfg = compose(config_name="yolov4") ort_session = onnxruntime.InferenceSession( "testModels/yolov4_singleBatch.onnx") # Model Instantiate model = Darknet(cfg.onnx.cfg_darknet_path) model.load_weights(cfg.onnx.model_darknet_path) model.eval() cfg.onnx.model_batch_size = 1 dummy_input_real = torch.randn( (cfg.onnx.model_batch_size, cfg.onnx.model_channels, model.height, model.width), requires_grad=True) ort_inputs = {ort_session.get_inputs( )[0].name: to_numpy(dummy_input_real)} boxes, conf = model(dummy_input_real) ort_outs = ort_session.run(None, ort_inputs) print(np.testing.assert_allclose( to_numpy(boxes), ort_outs[0], rtol=1e-03, atol=1e-05)) print(np.testing.assert_allclose( to_numpy(conf), ort_outs[1], rtol=1e-03, atol=1e-05)) assert True
def get_surveys(names="Rubin", overrides: Iterable = ()): """Return specified surveys as `btk.survey.Survey` objects. NOTE: The surveys currently implemented correspond to config files inside `conf/surveys`. See the documentation for how to add your own surveys via custom config files. Args: names (str or list): A single str specifying a survey from conf/surveys or a list with multiple survey names. overrides (Iterable): List or tuple containg overrides for the survey config files. An example element of overrides could be 'surveys.Rubin.airmass=1.1', i.e. what you would pass into the CLI in order to customize the surveys used (here specified by `names`). Returns: btk.survey.Survey object or list of such objects. """ if isinstance(names, str): names = [names] if not isinstance(names, list): raise TypeError( "Argument 'names' of `get_surveys` should be a str or list.") overrides = [f"surveys={names}", *overrides] surveys = [] with initialize(config_path="../conf"): cfg = compose("config", overrides=overrides) for survey_name in cfg.surveys: survey_conf = cfg.surveys[survey_name] surveys.append(get_survey_from_cfg(survey_conf)) if len(surveys) == 1: return surveys[0] return surveys
def initialize_hydra_no_path() -> Any: try: init = initialize(config_path=None) init.__enter__() yield finally: init.__exit__(*sys.exc_info())
def initialize_hydra(config_path: Optional[str]) -> Any: try: init = initialize(config_path=config_path) init.__enter__() yield finally: init.__exit__(*sys.exc_info())
def initialize_hydra_no_path() -> Any: try: init = initialize(version_base=None) init.__enter__() yield finally: init.__exit__(*sys.exc_info())
def test_initialize_ctx_with_absolute_dir(hydra_restore_singletons: Any, tmpdir: Any) -> None: with raises( HydraException, match=re.escape("config_path in initialize() must be relative")): with initialize(config_path=str(tmpdir)): compose(overrides=["+test_group=test"])
def test_training(capsys): """ Execute Training for 2 epoch to check for error """ Path("testModels").mkdir(parents=True, exist_ok=True) with capsys.disabled(): with initialize(config_path="conf"): cfg = compose(config_name="semanticsegmentation") cfg.trainer.default.callbacks[ 0].dirpath = "/home/Develop/ai4prod_python/semanticSegmentation/testModels" cfg.trainer.default.callbacks[0].filename = "U2Squared" cfg.trainer.default.max_epochs = 2 # Dataset Setup dm = instantiate(cfg.dataset) dm.setup() # Model Instantiate model = instantiate(cfg.model) trainer = instantiate(cfg.trainer.default) trainer.fit(model=model, datamodule=dm) assert True
def get_test_configuration(config_override: list = []): test_path = os.path.dirname(os.path.abspath(__file__)) source_path = os.path.join(test_path, 'data') output_path = os.path.join(test_path, 'output') if not os.path.exists(output_path): os.makedirs(output_path) if not OmegaConf.has_resolver('output_path'): OmegaConf.register_new_resolver('output_path', lambda sub_path: output_path) OmegaConf.register_new_resolver('source_path', lambda sub_path: source_path) if not GlobalHydra().is_initialized(): hydra.initialize(config_path='data/config', caller_stack_depth=2) return hydra.compose("test_config.yaml", overrides=config_override)
def test_hydra_main_passthrough(hydra_restore_singletons: Any) -> None: with initialize( version_base=None, config_path="test_apps/app_with_cfg_groups/conf" ): from tests.test_apps.app_with_cfg_groups.my_app import my_app # type: ignore cfg = compose(config_name="config", overrides=["optimizer.lr=1.0"]) assert my_app(cfg) == {"optimizer": {"type": "nesterov", "lr": 1.0}}
def test_schedulers(sch_name: str) -> None: scheduler_name = sch_name.split('.')[0] with initialize(config_path='../conf'): cfg = compose( config_name='config', overrides=[f'scheduler={scheduler_name}', 'optimizer=sgd', 'private=default'] ) optimizer = load_obj(cfg.optimizer.class_name)(torch.nn.Linear(1, 1).parameters(), **cfg.optimizer.params) load_obj(cfg.scheduler.class_name)(optimizer, **cfg.scheduler.params)
def test_with_initialize() -> None: with initialize(version_base=None, config_path="../hydra_app/conf"): # config is relative to a module cfg = compose(config_name="config", overrides=["app.user=test_user"]) assert cfg == { "app": {"user": "******", "num1": 10, "num2": 20}, "db": {"host": "localhost", "port": 3306}, }
def test_initialize_without_config_path(tmpdir: Path) -> None: expected = dedent("""\ config_path is not specified in hydra.initialize(). See https://hydra.cc/docs/next/upgrades/1.0_to_1.1/changes_to_hydra_main_config_path for more information.""" ) with warns(expected_warning=UserWarning, match=re.escape(expected)): with initialize(): pass
def test_training_from_scratch(capsys): """ Execute Training for 2 epoch to check for error """ Path("testModels").mkdir(parents=True, exist_ok=True) with capsys.disabled(): with initialize(config_path="conf"): cfg = compose(config_name="classification") seed_everything(42, workers=cfg.trainer.workers) cfg.trainer.default.callbacks[ 0].dirpath = "/home/Develop/ai4prod_python/classification/testModels" cfg.trainer.default.callbacks[0].filename = MODEL_NAME cfg.trainer.default.max_epochs = 2 @dataclass class ImageClassificationInputTransform(InputTransform): # transforms added to input training data def train_input_per_sample_transform(self): return instantiate(cfg.dataset.train_transform, _convert_="all") # transform label to tensor def target_per_sample_transform(self) -> Callable: return torch.as_tensor # transforms added to input validation data def val_input_per_sample_transform(self): return instantiate(cfg.dataset.val_transform, _convert_="all") # Dataset Setup dm = ImageClassificationData.from_folders( train_folder=cfg.dataset.datasetPath + "train", train_transform=ImageClassificationInputTransform, val_folder=cfg.dataset.datasetPath + "val", val_transform=ImageClassificationInputTransform, batch_size=cfg.dataset.batch_size) # Model Instantiate model = instantiate(cfg.model.image_classifier) if cfg.model.from_scratch: cfg.model.image_classifier.pretrained = False trainer = instantiate(cfg.trainer.default) trainer.fit(model=model, datamodule=dm) assert True
def test_initialize_ctx( self, config_file: str, overrides: List[str], expected: Any ) -> None: with initialize( version_base=None, config_path="../examples/jupyter_notebooks/cloud_app/conf", ): ret = compose(config_file, overrides) assert ret == expected
def test_config_in_dir() -> None: with initialize(version_base=None, config_path="../some_namespace/namespace_test/dir"): config_loader = GlobalHydra.instance().config_loader() assert "cifar10" in config_loader.get_group_options("dataset") assert "imagenet" in config_loader.get_group_options("dataset") assert "level1" in config_loader.list_groups("") assert "level2" in config_loader.list_groups("level1") assert "nested1" in config_loader.get_group_options("level1/level2") assert "nested2" in config_loader.get_group_options("level1/level2")
def test_config_installed() -> None: """ Tests that color options are available for both hydra/hydra_logging and hydra/job_logging """ with initialize(config_path="../hydra_plugins/hydra_colorlog/conf"): config_loader = GlobalHydra.instance().config_loader() assert "colorlog" in config_loader.get_group_options( "hydra/job_logging") assert "colorlog" in config_loader.get_group_options( "hydra/hydra_logging")
def ansatz_from_name(name, mol, force=False, **kwargs): with initialize('conf/ansatz'): ansatz = compose(config_name=name) if force: OmegaConf.set_struct(ansatz, False) for k, v in kwargs.items(): OmegaConf.update(ansatz, k, v) ansatz.mol = None ansatz = OmegaConf.to_object(ansatz) ansatz['mol'] = mol return instantiate(ansatz)
def test_train(): os.environ["NEPTUNE_API_TOKEN"] = "not_a_token" # for Github actions need to create this tilemapbase.init(create=True) initialize(config_path="../configs", job_name="test_app") config = compose( config_name="config", overrides=[ "logger=csv", "experiment=example_simple", "datamodule.fake_data=true", "datamodule.data_path=tests/configs/dataset", "trainer.fast_dev_run=true", ], ) train(config=config)
def test_onnx_conversion_single_dynamic(capsys): """ Conversion in onnx for batch= None. Could be any value """ Path("testModels").mkdir( parents=True, exist_ok=True) with capsys.disabled(): with initialize(config_path="hydraConf"): cfg = compose(config_name="yolov4") model = Darknet(cfg.onnx.cfg_darknet_path) model.load_weights(cfg.onnx.model_darknet_path) model.eval() # ---------- # SETUP DUMMY TENSOR # ----------- input_names = ["input"] output_names = ['boxes', 'confs'] dynamic = False dynamic_axes = None batch_size = -1 dynamic_axes = {"input": {0: "batch_size"}, "boxes": {0: "batch_size"}, "confs": {0: "batch_size"}} x = torch.randn((1, cfg.onnx.model_channels, model.height, model.width), requires_grad=True) # Export the model print('Export the onnx model ...') torch.onnx.export(model, x, "testModels/yolov4_dynamicBatch.onnx", export_params=True, opset_version=cfg.onnx.opset_version, do_constant_folding=True, input_names=input_names, output_names=output_names, dynamic_axes=dynamic_axes) onnx_model = onnx.load("testModels/yolov4_dynamicBatch.onnx") onnx.checker.check_model(onnx_model) assert os.path.isfile("testModels/yolov4_dynamicBatch.onnx")
def get_sdss_galaxies_cfg(overrides, devices): overrides.update({ "gpus": devices.gpus, "paths.root": Path(__file__).parents[3].as_posix() }) overrides = [ f"{k}={v}" if v is not None else f"{k}=null" for k, v in overrides.items() ] with initialize(config_path="../../../case_studies/sdss_galaxies/config"): cfg = compose("config", overrides=overrides) return cfg
def test_deprecated_compose() -> None: from hydra import initialize from hydra.experimental import compose as expr_compose with initialize(config_path=None): with warns( expected_warning=UserWarning, match=re.escape( "hydra.experimental.compose() is no longer experimental. Use hydra.compose()" ), ): assert expr_compose() == {}
def get_star_basic_cfg(overrides, devices): overrides.update({"gpus": devices.gpus}) overrides.update({ "training.weight_save_path": None, "paths.root": Path(__file__).parents[2].as_posix() }) overrides = [ f"{k}={v}" if v is not None else f"{k}=null" for k, v in overrides.items() ] with initialize(config_path="."): cfg = compose("star_basic", overrides=overrides) return cfg
def cfg(tmp_path_factory: TempPathFactory) -> DictConfig: test_cfg_tmpdir = tmp_path_factory.mktemp("test_train_tmpdir") with initialize(config_path="../conf"): cfg = compose(config_name="default", return_hydra_config=True) HydraConfig().set_config(cfg) # Force the wandb dir to be in the temp folder os.environ["WANDB_DIR"] = str(test_cfg_tmpdir) # Force the storage dir to be in the temp folder cfg.core.storage_dir = str(test_cfg_tmpdir) yield cfg shutil.rmtree(test_cfg_tmpdir)