Beispiel #1
0
 def test_searchpath_in_primary_config(
     self,
     init_configs: Any,
     config_name: str,
     overrides: List[str],
     expected: Any,
 ) -> None:
     cfg = compose(config_name=config_name, overrides=overrides)
     assert cfg == expected
Beispiel #2
0
 def __init__(self, **data: Any):
     super().__init__(**data)
     with initialize(config_path=self.config_path):
         object.__setattr__(
             self,
             'configuration',
             compose(self.config_filename)
         )
         logger.debug("Configuration: \n" + OmegaConf.to_yaml(self.configuration))
Beispiel #3
0
 def test_initialize_config_module_ctx(
     self, config_file: str, overrides: List[str], expected: Any,
 ) -> None:
     with initialize_config_module(
         config_module="examples.jupyter_notebooks.cloud_app.conf",
         job_name="job_name",
     ):
         ret = compose(config_file, overrides)
         assert ret == expected
Beispiel #4
0
    def test_strict_failure_call_is_strict(
        self,
        config_dir: str,
        hydra_global_context: TGlobalHydraContext,
        config_file: str,
        overrides: List[str],
        expected: Any,
    ) -> None:
        # default strict false, but call is strict
        with hydra_global_context(config_dir=config_dir, strict=False):
            with pytest.raises(AttributeError):
                compose(config_name=config_file, overrides=overrides, strict=True)

        # default strict true, but call is false
        with hydra_global_context(config_dir=config_dir, strict=True):

            with does_not_raise():
                compose(config_name=config_file, overrides=overrides, strict=False)
Beispiel #5
0
def test_initialize_with_module(hydra_restore_singletons: Any) -> None:
    initialize_with_module(module="tests.test_apps.app_with_cfg_groups.my_app",
                           config_path="conf")
    assert compose(config_name="config") == {
        "optimizer": {
            "type": "nesterov",
            "lr": 0.001
        }
    }
Beispiel #6
0
def get_global_cfg():
    """
    WARNING: does not work, hydra compose API seems to work only in Jupyter now
    Use experimental Hydra compose API
    """
    raise NotImplementedError
    from hydra.experimental import initialize, compose
    initialize(config_dir='../../conf', strict=True)
    return compose('config.yaml')
Beispiel #7
0
def test_jobname_override_initialize_ctx(hydra_restore_singletons: Any,
                                         job_name: Optional[str],
                                         expected: str) -> None:
    with initialize(
            config_path="../examples/jupyter_notebooks/cloud_app/conf",
            job_name=job_name,
    ):
        ret = compose(return_hydra_config=True)
        assert ret.hydra.job.name == expected
Beispiel #8
0
def test_initialize_config_module_ctx(hydra_restore_singletons: Any) -> None:
    with initialize_config_module(
        config_module="examples.jupyter_notebooks.cloud_app.conf"
    ):
        ret = compose(return_hydra_config=True)
        assert ret.hydra.job.name == "app"

    with initialize_config_module(
        config_module="examples.jupyter_notebooks.cloud_app.conf", job_name="test_job"
    ):
        ret = compose(return_hydra_config=True)
        assert ret.hydra.job.name == "test_job"

    with initialize_config_module(
        config_module="examples.jupyter_notebooks.cloud_app.conf", job_name="test_job"
    ):
        ret = compose(return_hydra_config=True)
        assert ret.hydra.job.name == "test_job"
Beispiel #9
0
def test_dataloader_drops_last(tmpdir) -> None:
    config_path = "./configs/classification"
    with initialize(config_path=str(config_path)):
        os.environ["AUTOALBUMENT_TEST_DATASET_LENGTH"] = "17"
        os.environ["AUTOALBUMENT_CONFIG_DIR"] = str((Path(__file__).parent / config_path).resolve())
        os.chdir(tmpdir)
        cfg = compose(config_name="search", overrides=["data.dataloader.batch_size=12"])
        faa_searcher = get_faa_searcher(cfg)
        faa_searcher.search()
Beispiel #10
0
 def test_compose_config(
     self,
     config_path: str,
     config_file: str,
     overrides: List[str],
     expected: Any,
 ) -> None:
     with initialize_ctx(config_path=config_path):
         cfg = compose(config_file, overrides)
         assert cfg == expected
Beispiel #11
0
 def test_initialize_ctx(
     self,
     config_file: str,
     overrides: List[str],
     expected: Any,
 ) -> None:
     with initialize(
             config_path="../examples/jupyter_notebooks/cloud_app/conf"):
         ret = compose(config_file, overrides)
         assert ret == expected
Beispiel #12
0
def test_initialize_with_module(hydra_restore_singletons: Any) -> None:
    with initialize_config_module(
            config_module="tests.test_apps.app_with_cfg_groups.conf",
            job_name="my_pp"):
        assert compose(config_name="config") == {
            "optimizer": {
                "type": "nesterov",
                "lr": 0.001
            }
        }
Beispiel #13
0
    def test_add_config_group(self) -> None:
        ConfigStore.instance().store(group="group", name="a0", node={"key": 0})
        ConfigStore.instance().store(group="group", name="a1", node={"key": 1})
        # overriding non existing group throws
        with raises(ConfigCompositionException):
            compose(overrides=["group=a0"])

        # appending a new group
        cfg = compose(overrides=["+group=a0"])
        assert cfg == {"group": {"key": 0}}

        # force adding is not supported for config groups.
        with raises(
                ConfigCompositionException,
                match=re.escape(
                    "force-add of config groups is not supported: '++group=a1'"
                ),
        ):
            compose(overrides=["++group=a1"])
Beispiel #14
0
def test_initialize_with_file(restore_singletons: Any) -> None:
    initialize_with_file(
        calling_file="tests/test_apps/app_with_cfg_groups/my_app.py",
        config_path="conf")
    assert compose(config_name="config") == {
        "optimizer": {
            "type": "nesterov",
            "lr": 0.001
        }
    }
Beispiel #15
0
def test_adding_to_sc_dict(hydra_restore_singletons: Any, overrides: List[str],
                           expected: Any) -> None:
    @dataclass
    class Config:
        map: Dict[str, str] = field(default_factory=dict)

    ConfigStore.instance().store(name="config", node=Config)

    cfg = compose(config_name="config", overrides=overrides)
    assert cfg == expected
Beispiel #16
0
    def test_training_only_cnn_weights(self):
        with initialize(config_path=os.path.join("..", "fixtures", "conf")):
            config = compose(
                config_name="config",
                overrides=[
                    "runner/model=resnet18fusionpolicy",
                    "runner/dataset=filetextbertforwardsatosapolicy",
                    "runner/val_dataset=filetextbertforwardsatosapolicy",
                    "runner.runner_name=SOCTextForwardPolicyRunner"
                ])
            config.runner.dataset.dataset_path = _RAW_TEXT_BERT_DATASET_PATH
            config.runner.val_dataset.dataset_path = _RAW_TEXT_BERT_DATASET_PATH
            config.runner.train_cnn = True
            config.runner.train_fusion = False
            config.runner.train_heads = True

            config.trainer.default_root_dir = self.folder
            config.trainer.fast_dev_run = False

            # We rely on seeds to copy the init weights
            seed_everything(config['runner']['seed'])
            r_copy = make_runner(config['runner'])
            r_copy.setup('fit')

            seed_everything(config['runner']['seed'])
            runner = make_runner(config['runner'])
            trainer = Trainer(**config['trainer'], deterministic=True)
            trainer.fit(runner)

            zipped_params = zip(r_copy.model.fusion.parameters(),
                                runner.model.fusion.parameters())
            for param_copy, param in zipped_params:
                assert torch.all(torch.eq(param_copy, param))

            zipped_params = zip(r_copy.model.spatial_state_head.parameters(),
                                runner.model.spatial_state_head.parameters())
            for param_copy, param in zipped_params:
                assert not torch.all(torch.eq(param_copy, param))

            zipped_params = zip(r_copy.model.linear_state_head.parameters(),
                                runner.model.linear_state_head.parameters())
            for param_copy, param in zipped_params:
                assert not torch.all(torch.eq(param_copy, param))

            # zipped_params = zip(
            #     r_copy.model.policy_head.parameters(), runner.model.policy_head.parameters()
            # )
            # for param_copy, param in zipped_params:
            #     assert not torch.all(torch.eq(param_copy, param))

            zipped_params = zip(r_copy.model.cnn.parameters(),
                                runner.model.cnn.parameters())
            for param_copy, param in zipped_params:
                assert not torch.all(torch.eq(param_copy, param))
                break  # Not all layers are learnable so we check only the first one
Beispiel #17
0
 def setUp(self):
     MockedApiWorkflowSetup.setUp(self)
     self.create_fake_dataset()
     with initialize(config_path="../../lightly/cli/config",
                     job_name="test_app"):
         self.cfg = compose(config_name="config",
                            overrides=[
                                "token='123'",
                                f"input_dir={self.folder_path}",
                                "trainer.max_epochs=0"
                            ])
Beispiel #18
0
def get_features_cfg(cfg):
    # Get features configs/overrides
    features_overrides = parse_processed_data_overrides(cfg)
    features_overrides.extend(parse_override(cfg.lagged_features))
    features_cfg = compose(
        config_name="compute_lagged_features",
        return_hydra_config=True,
        overrides=features_overrides,
    )

    return features_cfg
Beispiel #19
0
def get_data_cfg(cfg):

    # Get data configs/overrides
    data_overrides = parse_data_overrides(cfg)
    data_cfg = compose(
        config_name="process_data",
        return_hydra_config=True,
        overrides=data_overrides,
    )

    return data_cfg
Beispiel #20
0
 def test_generated_config(self) -> None:
     try:
         # config is relative to a module
         initialize_with_module(calling_module="hydra_app.main", config_path="conf")
         cfg = compose(config_name="config", overrides=["app.user=test_user"])
         assert cfg == {
             "app": {"user": "******", "num1": 10, "num2": 20},
             "db": {"host": "localhost", "port": 3306},
         }
     finally:
         GlobalHydra.instance().clear()
Beispiel #21
0
def test_schedulers(sch_name: str) -> None:
    scheduler_name = sch_name.split('.')[0]
    with initialize(config_path='../conf'):
        cfg = compose(config_name='config',
                      overrides=[
                          f'scheduler={scheduler_name}', 'optimizer=sgd',
                          'private=default'
                      ])
        optimizer = load_obj(cfg.optimizer.class_name)(torch.nn.Linear(
            1, 1).parameters(), **cfg.optimizer.params)
        load_obj(cfg.scheduler.class_name)(optimizer, **cfg.scheduler.params)
Beispiel #22
0
 def setUp(self):
     self.folder_path, self.sample_names = self.create_dataset_no_subdir(10)
     with initialize(config_path='../../lightly/cli/config', job_name='test_app'):
         self.cfg = compose(
             config_name='config',
             overrides=[
                 'token="123"',
                 f'input_dir={self.folder_path}',
                 'trainer.max_epochs=0',
             ],
         )
def test_main():
    # Ensure our training script runs top to bottom without errors.
    initialize(config_path='../conf')
    overrides = dict(num_steps=5,
                     train_bs=2,
                     val_bs=2,
                     test_mode=True,
                     eval_freq=2,
                     report_freq=2)
    cfg = compose("config.yaml", overrides=[f'{k}={v}' for k, v in overrides.items()])
    main(cfg)
Beispiel #24
0
 def test_compose_config(
     self,
     config_dir: str,
     hydra_global_context: TGlobalHydraContext,
     config_file: str,
     overrides: List[str],
     expected: Any,
 ) -> None:
     with hydra_global_context(config_dir=config_dir):
         cfg = compose(config_file, overrides)
         assert cfg == expected
Beispiel #25
0
 def test_compose_decorator(
     self,
     hydra_global_context: TGlobalHydraContext,
     config_dir: str,
     config_file: str,
     overrides: List[str],
     expected: Any,
 ) -> None:
     with hydra_global_context(config_dir=config_dir):
         ret = compose(config_file, overrides)
         assert ret == expected
Beispiel #26
0
def load_hydra_config(file_path):
    """Load a hydra composable config or a single yaml config

    :param file_path: path to the hydra main default_config
    :return: a DictConfig containing the configuration
    """
    file_path = to_absolute_path(file_path)
    conf_dir, file_name = os.path.split(file_path)
    with initialize_config_dir(conf_dir):
        cfg = compose(config_name=file_name)
    return cfg
def test_model_save_hyper_parameters_interpolation_with_hydra(tmpdir):
    """
    This test relies on configuration saved under tests/models/conf/config.yaml
    """
    class TestHydraModel(BoringModel):
        def __init__(self, args_0, args_1, args_2, kwarg_1=None):
            self.save_hyperparameters()
            self.test_hparams()
            config_file = f"{tmpdir}/hparams.yaml"
            save_hparams_to_yaml(config_file, self.hparams)
            self.hparams = load_hparams_from_yaml(config_file)
            self.test_hparams()
            super().__init__()

        def test_hparams(self):
            assert self.hparams.args_0.log == "Something"
            assert self.hparams.args_1['cfg'].log == "Something"
            assert self.hparams.args_2[0].log == "Something"
            assert self.hparams.kwarg_1['cfg'][0].log == "Something"

    with initialize(config_path="conf"):
        args_0 = compose(config_name="config")
        args_1 = {"cfg": compose(config_name="config")}
        args_2 = [compose(config_name="config")]
        kwarg_1 = {"cfg": [compose(config_name="config")]}
        model = TestHydraModel(args_0, args_1, args_2, kwarg_1=kwarg_1)
        epochs = 2
        checkpoint_callback = ModelCheckpoint(monitor=None,
                                              dirpath=tmpdir,
                                              save_top_k=-1)
        trainer = Trainer(
            default_root_dir=tmpdir,
            callbacks=[checkpoint_callback],
            limit_train_batches=10,
            limit_val_batches=10,
            max_epochs=epochs,
            logger=False,
        )
        trainer.fit(model)
        _ = TestHydraModel.load_from_checkpoint(
            checkpoint_callback.best_model_path)
Beispiel #28
0
def test_reward_decoder(overrides: List[str]) -> None:
    with initialize(config_path="../../../config"):
        config = compose(
            config_name="config",
            overrides=overrides,
        )

        batch_size = 128
        reward_feature_dim = config.agent.reward_decoder.feature_dim
        reward_decoder = RewardDecoder(feature_dim=reward_feature_dim, )
        batch = torch.rand((batch_size, reward_feature_dim))
        assert reward_decoder(batch).shape == (batch_size, 1)
Beispiel #29
0
def load_config(config_path:str='config', config_file:str='config', overrides:dict=None):
    """
    Args:
        config_path (str):
        config_file (str):
    Return:
        config (dict):
    """
    try:
        initialize(f'../../{config_path}')
    except ValueError:
        from hydra.core.global_hydra import GlobalHydra
        GlobalHydra.instance().clear()
        initialize(f'../../{config_path}')

    if overrides is not None:
        cfg = compose(config_name=config_file, overrides=overrides)
    else:
        cfg = compose(config_name=config_file)
        
    return cfg
def setup_pathmanager():
    """
    Setup PathManager. A bit hacky -- we use the #set_env_vars method to setup pathmanager
    and as such we need to create a dummy config, and dummy values for local_rank and node_id.
    """
    with initialize_config_module(config_module="vissl.config"):
        cfg = compose(
            "defaults",
            overrides=["config=test/integration_test/quick_swav"],
        )
    config = AttrDict(cfg).config
    set_env_vars(local_rank=0, node_id=0, cfg=config)