Пример #1
0
def test_stage_multi_requires() -> None:
    class RequireStage(Stage[NewData, MainConfig],
                       requires=[FirstStage, SecondStage]):
        def run(self, first_stage: ResultMap[MyModel],
                second_stage: ResultMap[NewData]) -> ResultMap[NewData]:
            transformed = []
            for m in first_stage.values():
                for n in second_stage.values():
                    transformed.append(NewData(m.string + ' ' + n.id))

            return ResultMap(transformed)

        def report_results(self, results: ResultMap[NewData]) -> None:
            ...

        @property
        def cache_file_name(self) -> str:
            return 'test.json'

    config = MainConfig(CONFIG)
    config.cache = False
    config.report = False
    config.dataset = 'test_data'
    output = RequireStage.process(config)

    assert ([d.id for d in output.values()
             ] == ['Ran append', 'Ran test', 'Ran 2 append', 'Ran 2 test'])
Пример #2
0
def test_stage_load_cache(stage_cache: Path, cache_miss: bool,
                          my_stage: Type['MyStage']) -> None:
    config = MainConfig(CONFIG)
    config.dataset = 'test_data'
    config.output = stage_cache
    stage = my_stage(config)

    if cache_miss:
        with pytest.raises(CacheMiss):
            stage.load_cache_results()
    else:
        results = stage.load_cache_results()
        assert results == TEST_DATA_CACHE
Пример #3
0
def test_stage_store_cache(tmp_path: Path, my_stage: Type['MyStage']) -> None:
    config = MainConfig(CONFIG)
    config.dataset = 'test_data'
    config.output = tmp_path
    config.output_directory.mkdir()
    stage = my_stage(config)
    cache_file = config.output_directory / CACHE_NAME

    stage.store_cache_results(TEST_DATA_CACHE)

    assert cache_file.is_file()
    assert (json.loads(cache_file.read_text()) == {
        k: v.to_json_obj()
        for k, v in TEST_DATA_CACHE.items()
    })
Пример #4
0
def test_stage_requires_wrong_config() -> None:
    class RequireStage(Stage[NewData, MainConfig], requires=ThirdStage):
        def run(self, third_stage: ResultMap[NewData]) -> ResultMap[NewData]:
            ...

        def report_results(self, results: ResultMap[NewData]) -> None:
            ...

        @property
        def cache_file_name(self) -> str:
            return 'test.json'

    config = MainConfig(CONFIG)
    config.cache = False
    config.report = False
    config.dataset = 'test_data'

    with pytest.raises(click.UsageError):
        RequireStage.process(config)
Пример #5
0
def test_args_to_config_sub_config() -> None:
    main = MainConfig()
    ctx = click.Context(click.Command('test'), obj=main)
    config_options = cli._get_configuration_options(MyConfig)
    cfg = cli._args_to_config(ctx,
                              MyConfig,
                              config_options,
                              opt1=False,
                              opt2='hello',
                              cache=False)

    assert not cfg.opt1
    assert cfg.opt2 == 'hello'
    # Defaults set by main config
    assert not cfg.report
    assert not cfg.progress
    # Option overridden
    assert not cfg.cache
    assert main.cache
Пример #6
0
def test_stage_requires() -> None:
    class RequireStage(Stage[NewData, MainConfig], requires=FirstStage):
        def run(self, first_stage: ResultMap[MyModel]) -> ResultMap[NewData]:
            transformed = [
                NewData(m.string * m.number) for m in first_stage.values()
            ]
            return ResultMap(transformed)

        def report_results(self, results: ResultMap[NewData]) -> None:
            ...

        @property
        def cache_file_name(self) -> str:
            return 'test.json'

    config = MainConfig(CONFIG)
    config.cache = False
    config.report = False
    config.dataset = 'test_data'
    output = RequireStage.process(config)

    assert ([d.id for d in output.values()
             ] == ['RanRanRan', 'Ran 2Ran 2Ran 2Ran 2'])
Пример #7
0
def test_stage_use_caching(stage_cache: Path, should_use_cache: bool,
                           should_have_used_cache: bool,
                           my_stage: Type['MyStage']) -> None:
    config = MainConfig(CONFIG)
    config.cache = should_use_cache
    config.dataset = 'test_data'
    config.output = stage_cache

    output = my_stage.process(config)

    assert my_stage.has_run != should_have_used_cache
    if should_have_used_cache:
        assert output == TEST_DATA_CACHE
    else:
        assert output == TEST_DATA_RUN

    if should_use_cache:
        cache_path = config.output_directory / CACHE_NAME
        assert cache_path.is_file()
        assert (json.loads(cache_path.read_text()) == {
            k: v.to_json_obj()
            for k, v in output.items()
        })
Пример #8
0
def test_stage_reporting(stage_cache: Path, should_use_cache: bool,
                         should_report: bool,
                         my_stage: Type['MyStage']) -> None:
    config = MainConfig(CONFIG)
    config.cache = should_use_cache
    config.report = should_report
    config.dataset = 'test_data'
    config.output = stage_cache

    output = my_stage.process(config)

    # Report when it should report
    assert my_stage.has_reported == should_report
    if should_report:
        # Report the correct results based on the cache state
        assert my_stage.reported_data == output

    if should_use_cache:
        cache_path = config.output_directory / CACHE_NAME
        assert cache_path.is_file()
        assert (json.loads(cache_path.read_text()) == {
            k: v.to_json_obj()
            for k, v in output.items()
        })
Пример #9
0
    string: str
    boolean: bool

    @property
    def id(self) -> str:
        return self.string


CACHE_NAME = 'cache.json'
TEST_DATA_CACHE = ResultMap(
    [MyModel(1, 'test', False),
     MyModel(2, 'test2', True)])
TEST_DATA_RUN = ResultMap(
    [MyModel(3, 'Ran', False),
     MyModel(4, 'Ran 2', True)])
CONFIG = MainConfig()

# Forward declarations of classes created in fixtures.
if TYPE_CHECKING:

    class MyStage(Stage[MyModel, MainConfig]):
        has_run: bool
        has_reported: bool
        reported_data: ResultMap[MyModel]


@pytest.fixture(scope='function', autouse=True)
def clean_stages() -> None:
    base.STAGES = {}

Пример #10
0
def config(tmp_path: Path) -> CloneConfig:
    mc = MainConfig()
    mc.output = tmp_path
    mc.dataset = 'test'
    c = CloneConfig(mc)
    return c