Exemplo n.º 1
0
def test_keys_append(tmpdir):
    path = Path(tmpdir) / 'test.hdf'
    test_artifact = Artifact(path)
    test_keys = test_artifact._keys

    test_artifact.write('test.keys', 'data')
    assert 'test.keys' in test_artifact
    assert 'test.keys' in test_keys
    assert test_keys._keys == ['metadata.keyspace', 'test.keys'
                               ] == [str(k) for k in test_artifact.keys]
Exemplo n.º 2
0
def test_keys_initialization(tmpdir):
    path = Path(tmpdir) / 'test.hdf'
    test_artifact = Artifact(path)
    test_key = test_artifact._keys

    assert test_artifact._path == test_key._path
    assert test_key._keys == ['metadata.keyspace']

    test_artifact.write('new.keys', 'data')
    assert test_key._keys == ['metadata.keyspace', 'new.keys']
    assert test_key.to_list() == test_artifact.keys
Exemplo n.º 3
0
def test_loading_key_leaves_filters_unchanged(hdf_mock):
    # loading each key will drop the fake_filter from filter_terms for that key
    # make sure that artifact's filter terms stay the same though
    path = str(Path(__file__).parent / 'artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10', 'fake_filter']

    a = Artifact(path, filter_terms=filter_terms)

    for key in _KEYS:
        a.load(key)
        assert a.filter_terms == filter_terms
Exemplo n.º 4
0
def test_artifact_load_key_has_no_data(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'no_data.key'

    a = Artifact(path, filter_terms)

    with pytest.raises(AssertionError) as err_info:
        a.load(key)

    assert f"Data for {key} is not available. Check your model specification." == str(
        err_info.value)
    assert hdf_mock.load.called_once_with(path, key, filter_terms)
    assert a._cache == {}
Exemplo n.º 5
0
def test_artifact_load_missing_key(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'not.a_real.key'

    a = Artifact(path, filter_terms)
    hdf_mock.load.called_once_with('metadata.keyspace')
    hdf_mock.load.reset_mock()
    with pytest.raises(ArtifactException) as err_info:
        a.load(key)

    assert f"{key} should be in {path}." == str(err_info.value)
    hdf_mock.load.assert_not_called()
    assert a._cache == {}
Exemplo n.º 6
0
def test_replace_nonexistent_key(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'new.key'

    a = Artifact(path, filter_terms=filter_terms)
    hdf_mock.called_once_with(key)
    assert key not in a.keys

    hdf_mock.reset_mock()
    with pytest.raises(ArtifactException):
        a.replace(key, "new_data")

    hdf_mock.write.assert_not_called()
    hdf_mock.remove.assert_not_called()
Exemplo n.º 7
0
def test_create_hdf(tmpdir):
    path = Path(tmpdir) / 'test.hdf'
    assert not path.is_file()

    test_artifact = Artifact(path)
    assert path.is_file()
    assert 'metadata.keyspace' in test_artifact

    test_artifact.write('new.key', 'data')
    assert 'new.key' in test_artifact

    #  check whether the existing path was NOT wiped out
    new_artifact = Artifact(test_artifact.path)
    assert new_artifact.path == test_artifact.path
    assert 'new.key' in new_artifact
Exemplo n.º 8
0
    def _load_artifact(self, configuration: ConfigTree) -> Optional[Artifact]:
        """Looks up the path to the artifact hdf file, builds a default filter,
        and generates the data artifact. Stores any configuration specified filter
        terms separately to be applied on loading, because not all columns are
        available via artifact filter terms.

        Parameters
        ----------
        configuration :
            Configuration block of the model specification containing the input data parameters.

        Returns
        -------
            An interface to the data artifact.
        """
        if not configuration.input_data.artifact_path:
            return None

        artifact_path = parse_artifact_path_config(configuration)
        draw = configuration.input_data.input_draw_number
        location = configuration.input_data.location
        base_filter_terms = [f'draw == {draw}', get_location_term(location)]
        logger.debug(
            f'Running simulation from artifact located at {artifact_path}.')
        logger.debug(f'Artifact base filter terms are {base_filter_terms}.')
        logger.debug(
            f'Artifact additional filter terms are {self.config_filter_term}.')
        return Artifact(artifact_path, base_filter_terms)
Exemplo n.º 9
0
def test_artifact_creation(hdf_mock, keys_mock):
    path = Path('path/to/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']

    a = Artifact(path)

    assert a.filter_terms is None
    assert a._cache == {}
    assert a.keys == keys_mock
    hdf_mock.load.called_once_with('metadata.keyspace')

    a = Artifact(path, filter_terms)

    assert a.path == str(path)
    assert a.filter_terms == filter_terms
    assert a._cache == {}
    assert a.keys == keys_mock
    hdf_mock.load.called_once_with('metadata.keyspace')
Exemplo n.º 10
0
def test_remove(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'population.structure'

    a = Artifact(path, filter_terms)
    a._cache[key] = 'data'

    assert key in a.keys
    assert key in a._cache

    a.remove(key)

    assert key not in a.keys
    assert key not in a._cache

    expected_calls = [call(path, 'metadata.keyspace'), call(path, key)]
    assert hdf_mock.remove.call_args_list == expected_calls
Exemplo n.º 11
0
def test_artifact_write_no_data(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'new.key'

    a = Artifact(path, filter_terms)
    initial_keys = a.keys

    assert key not in a.keys

    with pytest.raises(ArtifactException):
        a.write(key, None)

    assert key not in a.keys
    assert key not in a._cache
    hdf_mock.write.called_once_with(path, 'metadata.keyspace',
                                    ['metadata.keyspace'])
    hdf_mock.remove.assert_not_called()
    assert a.keys == initial_keys
Exemplo n.º 12
0
def test_artifact_write_duplicate_key(hdf_mock, keys_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'population.structure'

    art = Artifact(path, filter_terms)
    initial_keys = art.keys
    assert initial_keys == keys_mock

    with pytest.raises(ArtifactException) as err_info:
        art.write(key, "data")

    assert f'{key} already in artifact.' == str(err_info.value)
    assert key in art
    assert key in art.keys
    assert key not in art._cache
    hdf_mock.write.called_once_with(path, 'metadata.keyspace',
                                    ['metadata.keyspace'])
    hdf_mock.remove.assert_not_called()
    assert art.keys == initial_keys
Exemplo n.º 13
0
def test_remove_bad_key(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'non_existent.key'
    a = Artifact(path, filter_terms)
    initial_keys = a.keys

    assert key not in a.keys
    assert key not in a._cache

    with pytest.raises(ArtifactException) as err_info:
        a.remove(key)

    assert f'Trying to remove non-existent key {key} from artifact.' == str(
        err_info.value)
    assert key not in a.keys
    assert key not in a._cache
    hdf_mock.remove.assert_not_called()
    hdf_mock.write.called_once_with(path, 'metadata.keyspace',
                                    ['metadata.keyspace'])
    assert a.keys == initial_keys
Exemplo n.º 14
0
def test_artifact_write(hdf_mock, keys_mock):
    path = Path(Path('/place/with/artifact.hdf'))
    filter_terms = ['location == Global', 'draw == 10']
    key = 'new.key'

    a = Artifact(path, filter_terms)
    initial_keys = a.keys

    assert key not in a.keys

    a.write(key, "data")

    assert key in a.keys
    assert key not in a._cache
    expected_call = [
        call(path, 'metadata.keyspace', ['metadata.keyspace']),
        call(path, 'metadata.keyspace', keys_mock + [key]),
        call(path, key, 'data')
    ]
    assert hdf_mock.write.call_args_list == expected_call
    assert set(a.keys) == set(initial_keys + [key])
Exemplo n.º 15
0
def test_artifact_load(hdf_mock, keys_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']

    a = Artifact(path, filter_terms)
    keys_without_metadata = set(keys_mock) - {
        'metadata.locations', 'metadata.keyspace', 'metadata.versions'
    }
    for key in keys_without_metadata:
        if key == 'no_data.key':
            continue

        assert key not in a._cache

        result = a.load(key)

        assert hdf_mock.load.called_once_with(path, key, filter_terms)
        assert key in a._cache
        assert a._cache[key] == 'data'
        assert result == 'data'

        hdf_mock.load.reset_mock()
Exemplo n.º 16
0
def test_remove_no_cache(hdf_mock, keys_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'population.structure'

    a = Artifact(path, filter_terms)

    initial_keys = a.keys[:]

    assert key in initial_keys
    assert key not in a._cache

    a.remove(key)

    assert key not in a.keys
    assert key not in a._cache
    assert set(initial_keys).difference(a.keys) == {key}
    expected_calls_remove = [call(path, 'metadata.keyspace'), call(path, key)]
    assert hdf_mock.remove.call_args_list == expected_calls_remove
    expected_calls_write = [
        call(path, 'metadata.keyspace', ['metadata.keyspace']),
        call(path, 'metadata.keyspace', [k for k in keys_mock if k != key])
    ]
    assert hdf_mock.write.call_args_list == expected_calls_write
Exemplo n.º 17
0
def test_clear_cache(hdf_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'population.structure'

    a = Artifact(path, filter_terms)
    a.clear_cache()

    assert a._cache == {}

    a._cache[key] = 'data'
    a.clear_cache()

    assert a._cache == {}
Exemplo n.º 18
0
def test_keys_remove(tmpdir):
    path = Path(tmpdir) / 'test.hdf'
    test_artifact = Artifact(path)
    test_keys = test_artifact._keys

    test_artifact.write('test.keys1', 'data')
    test_artifact.write('test.keys2', 'data')
    assert 'test.keys1' in test_artifact and 'test.keys2' in test_artifact
    assert 'test.keys1' in test_keys and 'test.keys2' in test_keys

    test_artifact.remove('test.keys2')
    assert 'test.keys1' in test_artifact and 'test.keys2' not in test_artifact
    assert 'test.keys1' in test_keys and 'test.keys2' not in test_keys
Exemplo n.º 19
0
def test_replace(hdf_mock, keys_mock):
    path = Path('/place/with/artifact.hdf')
    filter_terms = ['location == Global', 'draw == 10']
    key = 'new.key'

    a = Artifact(path, filter_terms=filter_terms)

    assert key not in a.keys

    a.write(key, "data")
    keyspace_key = 'metadata.keyspace'
    new_keyspace = [k for k in keys_mock + [key]]

    assert hdf_mock.write.call_args_list == [
        call(path, keyspace_key, [str(keyspace_key)]),
        call(path, keyspace_key, new_keyspace),
        call(path, key, 'data')
    ]

    hdf_mock.reset_mock()

    a.replace(key, "new_data")

    # keyspace will be remove first in self.remove from a.replace then self.write from a.replace
    expected_calls_remove = [
        call(path, keyspace_key),
        call(path, key),
        call(path, keyspace_key)
    ]
    assert hdf_mock.remove.call_args_list == expected_calls_remove

    expected_calls_write = [
        call(path, keyspace_key, new_keyspace),
        call(path, keyspace_key, new_keyspace),
        call(path, key, 'new_data')
    ]
    assert hdf_mock.write.call_args_list == expected_calls_write
    assert key in a.keys