def get_config( overrides: List[str] = None, config_path: str = "conf", config_name: str = "config", directory: str = None, as_config_class: bool = False, ) -> Union[DictConfig, Config]: """Get config (instead of running command line, as in a jupyter notebook. :param overrides: list of config overrides :param config_path: config directory path :param config_name: main config name :param directory: :return: DictConfig configuration """ initialize_config() directory = directory or os.getcwd() with local(directory).as_cwd(): overrides = overrides or [] config_path = os.path.join(directory, config_path) with initialize_config_dir(config_path): cfg = hydra_compose_config(config_name=config_name, overrides=overrides) if as_config_class: cfg = Config.from_dict_config(cfg) return cfg
def load_old_config(path: Path, delete_fields: List[str] = []) -> DictConfig: """Loads a previous config from the `.hydra` folder, optionally removing some attributes. Args: path (Path): path to the previous run delete_fields (List[str], optional): List of fields from the previous run to eliminate. Defaults to []. Returns: DictConfig: OmegaCof configuration """ initialize_config_dir(config_dir=os.path.join(path, ".hydra")) config = compose(config_name="config", return_hydra_config=False) for field in delete_fields: config = delete_field(config, field) config.work_dir = os.path.abspath(path) return config
def test_efficientnets(name, dm, num_classes, pretrained): with initialize_config_dir(os.getcwd() + "/conf"): cfg = compose( config_name="efficientnets", overrides=[ f"name={name}", f"dm={dm}", f"pretrained={pretrained}", f"num_classes={num_classes}", "logger=false", "pl.max_epochs=1", "pl.gpus=0", "pl.limit_train_batches=5", "pl.limit_val_batches=5", "pl.limit_test_batches=5", "dm.train_dataloader_conf.batch_size=1", "dm.train_dataloader_conf.pin_memory=false", "dm.train_dataloader_conf.shuffle=false", "dm.train_dataloader_conf.num_workers=1", "dm.val_dataloader_conf.batch_size=1", "dm.val_dataloader_conf.pin_memory=false", "dm.val_dataloader_conf.num_workers=1", ], ) efficientnets_main(cfg)
def test_forward(self): with initialize_config_dir(config_dir=get_config_directory()): data_folder, dataset_name = get_test_data_info() config = compose("main", overrides=[ f"data_folder={data_folder}", f"dataset.name={dataset_name}" ]) dataset_folder = join(config.data_folder, config.dataset.name) vocabulary = Vocabulary.load_vocabulary( join(dataset_folder, config.vocabulary_name)) data_file_path = join( dataset_folder, f"{config.dataset.name}.{config.train_holdout}.c2s") dataset = PathContextDataset(data_file_path, config, vocabulary, False) batch = PathContextBatch( [dataset[i] for i in range(config.hyper_parameters.batch_size)]) model = PathEncoder( config.encoder, config.decoder.decoder_size, len(vocabulary.token_to_id), vocabulary.token_to_id[PAD], len(vocabulary.node_to_id), vocabulary.node_to_id[PAD], ) output = model(batch.contexts) true_shape = (sum(batch.contexts_per_label), config.decoder.decoder_size) self.assertTupleEqual(true_shape, output.shape)
def test_missing_bad_config_dir_error(hydra_restore_singletons: Any) -> None: expected = ( "Primary config directory not found." "\nCheck that the config directory '/no_way_in_hell_1234567890' exists and readable" ) with pytest.raises(Exception, match=re.escape(expected)): with initialize_config_dir(config_dir="/no_way_in_hell_1234567890"): hydra = GlobalHydra.instance().hydra assert hydra is not None compose(config_name="test.yaml", overrides=[])
def load_hydra_config(file_path): """Load a hydra composable config or a single yaml config :param file_path: path to the hydra main default_config :return: a DictConfig containing the configuration """ file_path = to_absolute_path(file_path) conf_dir, file_name = os.path.split(file_path) with initialize_config_dir(conf_dir): cfg = compose(config_name=file_name) return cfg
def test_initialize_config_dir_ctx_with_absolute_dir( hydra_restore_singletons: Any, tmpdir: Any) -> None: tmpdir = Path(tmpdir) (tmpdir / "test_group").mkdir(parents=True) cfg = OmegaConf.create({"foo": "bar"}) cfg_file = tmpdir / "test_group" / "test.yaml" with open(str(cfg_file), "w") as f: OmegaConf.save(cfg, f) with initialize_config_dir(config_dir=str(tmpdir)): ret = compose(overrides=["+test_group=test"]) assert ret == {"test_group": cfg}
def from_config(file_path): """Load environment using a yaml configuration file or a composable hydra config :param file_path: path to the config file :return: A configured Expando environment """ file_path = to_absolute_path(file_path) conf_dir, file_name = os.path.split(file_path) with initialize_config_dir(conf_dir): cfg = compose(config_name=file_name) env = Expando(**cfg) return env
def main() -> None: with initialize(config_path="conf"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "main" with initialize(config_path="conf", job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job" abs_config_dir = os.path.abspath("initialization_test_app/conf") with initialize_config_dir(config_dir=abs_config_dir): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "app" with initialize_config_dir(config_dir=abs_config_dir, job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job" # Those tests can only work if the module is installed if len(sys.argv) > 1 and sys.argv[1] == "module_installed": with initialize_config_module( config_module="initialization_test_app.conf"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "app" with initialize_config_module( config_module="initialization_test_app.conf", job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job"
def test_initialize_config_dir_ctx_with_relative_dir( self, config_file: str, overrides: List[str], expected: Any) -> None: with pytest.raises( HydraException, match=re.escape( "initialize_config_dir() requires an absolute config_dir as input" ), ): with initialize_config_dir( config_dir="../examples/jupyter_notebooks/cloud_app/conf", job_name="job_name", ): ret = compose(config_file, overrides) assert ret == expected
def get_cnf(conf_path: str): """ 設定値の辞書を取得 @param conf_path: str @return cnf: OmegaDict """ conf_dir = os.path.join(os.getcwd(), "conf") if not os.path.isdir(conf_dir): print(f"Can not find file: {conf_dir}.") sys.exit(-1) with initialize_config_dir(config_dir=conf_dir): cnf = compose(config_name="default.yaml") return cnf
def get_config( model: str, dataset: str = None, log_offline: bool = False, pb_refresh_rate: int = 1, additional_params: Dict[str, Any] = None, ) -> DictConfig: overrides = [ f"model={model}", f"log_offline={log_offline}", f"progress_bar_refresh_rate={pb_refresh_rate}", ] if dataset is not None: overrides.append(f"dataset.name={dataset}") if additional_params is not None: for key, value in additional_params.items(): overrides.append(f"{key}={value}") with initialize_config_dir(get_config_directory()): config = compose("main", overrides=overrides) return config
def test_forward(self): with initialize_config_dir(config_dir=get_test_resources_dir()): config = compose("code2seq-test", overrides=[f"data_folder={get_test_resources_dir()}"]) dataset_folder = join(config.data_folder, config.dataset.name) vocabulary = Vocabulary.load_vocabulary(join(dataset_folder, config.vocabulary_name)) data_file_path = join(dataset_folder, f"{config.dataset.name}.{config.train_holdout}.c2s") dataset = PathContextDataset(data_file_path, config, vocabulary, False) batch = PathContextBatch([dataset[i] for i in range(config.hyper_parameters.batch_size)]) number_of_paths = sum(batch.contexts_per_label) model = PathDecoder(config.decoder, len(vocabulary.label_to_id), 0, 0) fake_encoder_output = torch.rand(number_of_paths, config.decoder.decoder_size) output = model(fake_encoder_output, batch.contexts_per_label, config.dataset.target.max_parts) true_shape = ( config.dataset.target.max_parts, config.hyper_parameters.batch_size, len(vocabulary.label_to_id), ) self.assertTupleEqual(true_shape, output.shape)
def get_config( overrides: List[str] = None, config_path: str = "conf", config_name: str = "config", directory: str = None, ) -> DictConfig: init_config_store() directory = directory or os.getcwd() with local(directory).as_cwd(): overrides = overrides or [] config_path = os.path.join(directory, config_path) with initialize_config_dir(config_path): cfg = compose(config_name=config_name, overrides=overrides) # # correct port from docker-compose file # ports = cfg.docker.services.neo4j.ports # port_mapping = {} # for p in ports: # _from, _to = p.split(":") # port_mapping[_from] = _to # cfg.neo.port = port_mapping["7687"] return cfg
def setup_dict(): """ if model is "ground_truth": tensorType = 'torch.DoubleTensor' torch.set_default_tensor_type(tensorType) else: tensorType = 'torch.FloatTensor' torch.set_default_tensor_type(tensorType) """ # Set all seeds to ensure reproducibility random.seed(0) np.random.seed(1) torch.manual_seed(0) # Load configuration abs_config_dir = os.path.abspath("conf") with initialize_config_dir(config_dir=abs_config_dir): # compose from config.yaml, this composes a bunch of defaults in: cfg = hydra_compose(config_name="torch_robot_model_gt.yaml") robot_model = DifferentiableRobotModel(**cfg.model) test_case = sample_test_case(robot_model) return {"robot_model": robot_model, "test_case": test_case}
def test_jobname_override_initialize_config_dir_ctx( hydra_restore_singletons: Any, tmpdir: Any) -> None: with initialize_config_dir(config_dir=str(tmpdir), job_name="test_job"): ret = compose(return_hydra_config=True) assert ret.hydra.job.name == "test_job"
def convert_vocabulary(config: Dict, original_vocabulary_path: str) -> Vocabulary: counters: Dict[str, TypeCounter[str]] = {} with open(original_vocabulary_path, "rb") as dict_file: counters["token"] = Counter(pickle.load(dict_file)) counters["path"] = Counter(pickle.load(dict_file)) counters["target"] = Counter(pickle.load(dict_file)) return _counters_to_vocab(config, counters) def preprocess(config: DictConfig): dataset_directory = join(config.data_folder, config.dataset.name) possible_dict = join(dataset_directory, f"{config.dataset.name}.dict.c2s") train_holdout = join(dataset_directory, f"{config.dataset.name}.{config.train_holdout}.c2s") dict_data_config = OmegaConf.to_container(config.dataset, True) if not isinstance(dict_data_config, dict): raise ValueError if exists(possible_dict): vocabulary = convert_vocabulary(dict_data_config, possible_dict) else: vocabulary = collect_vocabulary(dict_data_config, train_holdout) vocabulary.dump_vocabulary(join(dataset_directory, "vocabulary.pkl")) if __name__ == "__main__": with initialize_config_dir(get_config_directory()): _config = compose("main", overrides=argv[1:]) preprocess(_config)
torch.manual_seed(0) class NMSELoss(torch.nn.Module): def __init__(self, var): super(NMSELoss, self).__init__() self.var = var def forward(self, yp, yt): err = (yp - yt)**2 werr = err/self.var return werr.mean() abs_config_dir=os.path.abspath(os.path.join(differentiable_robot_model.__path__[0], "../conf")) with initialize_config_dir(config_dir=abs_config_dir): learnable_robot_model_cfg = hydra_compose(config_name="torch_robot_model_learnable_l4dc_constraints.yaml") # ground truth robot model (with known kinematics and dynamics parameters) - used to generate data gt_robot_model = DifferentiableKUKAiiwa() gt_robot_model.print_link_names() #train_data = generate_sine_motion_inverse_dynamics_data(gt_robot_model, n_data=1000, dt=1.0/250.0, freq=0.05) train_loader = DataLoader(dataset=train_data, batch_size=100, shuffle=False) # learnable robot model urdf_path = os.path.join(diff_robot_data.__path__[0], learnable_robot_model_cfg.model.rel_urdf_path) learnable_robot_model = DifferentiableRobotModel(urdf_path, learnable_robot_model_cfg.model.learnable_rigid_body_config, learnable_robot_model_cfg.model.name)
def hydra_initialize_config_dir() -> None: abs_conf_dir = Path.cwd() / "../../hydra/test_utils/configs" initialize_config_dir(config_dir=str(abs_conf_dir))
initialize_config_module, ) if __name__ == "__main__": with initialize(): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "main" with initialize(job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job" abs_config__dir = os.path.abspath(".") with initialize_config_dir(config_dir=abs_config__dir): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "app" with initialize_config_dir(config_dir=abs_config__dir, job_name="test_job"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "test_job" # This works because pkg_resource is weird. # It may stop working if I switch to importlib_resources with initialize_config_module(config_module="main"): cfg = compose(config_name="config", return_hydra_config=True) assert cfg.config == {"hello": "world"} assert cfg.hydra.job.name == "app"
#!/usr/bin/env python from dotenv import dotenv_values from pathlib import Path from omegaconf import OmegaConf from hydra.experimental import initialize_config_dir, compose STAGES = ["process_data", "compute_lagged_features", "train"] CONFIG_DIR = dotenv_values().get("CONFIG_DIR") # cfg = OmegaConf.create() with initialize_config_dir(CONFIG_DIR): for stage in STAGES: cfg = compose(config_name=stage, return_hydra_config=True) OmegaConf.save(cfg, f"params/{stage}.yaml")