def test_push_split(cli, tmpdir, datafiles): project_dir = str(datafiles) # First build the project without the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" indexshare = os.path.join(str(tmpdir), "indexshare") storageshare = os.path.join(str(tmpdir), "storageshare") # Set up an artifact cache. with create_split_share(indexshare, storageshare) as (index, storage): rootcache_dir = os.path.join(str(tmpdir), "cache") user_config = { "scheduler": {"pushers": 1}, "artifacts": { "servers": [ {"url": index.repo, "push": True, "type": "index"}, {"url": storage.repo, "push": True, "type": "storage"}, ], }, "cachedir": rootcache_dir, } config_path = str(tmpdir.join("buildstream.conf")) _yaml.roundtrip_dump(user_config, file=config_path) element_key = _push(cli, rootcache_dir, project_dir, config_path, "target.bst") proto = index.get_artifact_proto( cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key) ) assert storage.get_cas_files(proto) is not None
def test_pull_missing_blob_split_share(cli, tmpdir, datafiles): project = str(datafiles) indexshare = os.path.join(str(tmpdir), "indexshare") storageshare = os.path.join(str(tmpdir), "storageshare") with create_split_share(indexshare, storageshare) as (index, storage): cli.configure({ "artifacts": { "servers": [ { "url": index.repo, "push": True, "type": "index" }, { "url": storage.repo, "push": True, "type": "storage" }, ] } }) _test_pull_missing_blob(cli, project, index, storage)