def test_fetch_fallback(cli, tmpdir, datafiles): project_dir = str(datafiles) element_name, repo, ref = create_test_element(tmpdir, project_dir) cache_dir = os.path.join(str(tmpdir), "cache") # use artifact cache for sources for now, they should work the same with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: with context_with_source_cache(cli, cache_dir, share, tmpdir) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements([element_name])[0] element._initialize_state() assert not element._cached_sources() source = list(element.sources())[0] assert not share.get_source_proto(source._get_source_name()) assert not os.path.exists(os.path.join(cache_dir, "sources")) # Now check if it falls back to the source fetch method. res = cli.run(project=project_dir, args=["source", "fetch", element_name]) res.assert_success() brief_key = source._get_brief_display_key() assert ( "Remote source service ({}) does not have source {} cached".format(share.repo, brief_key) ) in res.stderr assert ("SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])) in res.stderr # Check that the source in both in the source dir and the local CAS element = project.load_elements([element_name])[0] element._initialize_state() assert element._cached_sources()
def test_source_fetch(cli, tmpdir, datafiles): project_dir = str(datafiles) element_name, _repo, _ref = create_test_element(tmpdir, project_dir) cache_dir = os.path.join(str(tmpdir), "cache") # use artifact cache for sources for now, they should work the same with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: with context_with_source_cache(cli, cache_dir, share, tmpdir) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements([element_name])[0] element._initialize_state() assert not element._cached_sources() source = list(element.sources())[0] assert not share.get_source_proto(source._get_source_name()) # Just check that we sensibly fetch and build the element res = cli.run(project=project_dir, args=["build", element_name]) res.assert_success() assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != [] # get root digest of source sourcecache = context.sourcecache digest = sourcecache.export(source)._get_digest() # Push the source to the remote res = cli.run(project=project_dir, args=["source", "push", "--remote", share.repo, element_name]) res.assert_success() # check the share has the proto and the object assert share.get_source_proto(source._get_source_name()) assert share.has_object(digest) # Delete the source locally shutil.rmtree(os.path.join(str(cache_dir), "sources")) shutil.rmtree(os.path.join(str(cache_dir), "cas")) state = cli.get_element_state(project_dir, element_name) assert state == "fetch needed" # Now fetch the source and check res = cli.run(project=project_dir, args=["source", "fetch", element_name]) res.assert_success() assert "Pulled source" in res.stderr with context_with_source_cache(cli, cache_dir, share, tmpdir) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements([element_name])[0] element._initialize_state() # check that we have the source in the cas now and it's not fetched assert element._cached_sources() assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == []
def test_pull_fail(cli, tmpdir, datafiles): project_dir = str(datafiles) element_name, repo, _ref = create_test_element(tmpdir, project_dir) cache_dir = os.path.join(str(tmpdir), "cache") with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: with context_with_source_cache(cli, cache_dir, share, tmpdir) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements([element_name])[0] element._query_source_cache() assert not element._cached_sources() source = list(element.sources())[0] # remove files and check that it doesn't build shutil.rmtree(repo.repo) # Should fail in stream, with a plugin task causing the error res = cli.run(project=project_dir, args=["build", element_name]) res.assert_main_error(ErrorDomain.STREAM, None) res.assert_task_error(ErrorDomain.SOURCE, None) assert ("Remote source service ({}) does not have source {} cached" .format(share.repo, source._get_brief_display_key()) in res.stderr)
def _push(cli, cache_dir, project_dir, config_file, target): with dummy_context(config=config_file) as context: # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(cache_dir, element, element_key) # Create a local artifact cache handle artifactcache = context.artifactcache # Initialize remotes context.initialize_remotes(True, True, None, None) # Query local cache element._load_artifact(pull=False) assert artifactcache.has_push_remotes( plugin=element), "No remote configured for element target.bst" assert element._push(), "Push operation failed" return element_key
def test_source_fetch(tmpdir, cli, datafiles): project_dir = os.path.join(datafiles.dirname, datafiles.basename, "project") cachedir = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": cachedir}) res = cli.run(project=project_dir, args=["source", "fetch", "import-dev.bst"]) res.assert_success() with dummy_context() as context: context.cachedir = cachedir # load project and sourcecache project = Project(project_dir, context) project.ensure_fully_loaded() cas = context.get_cascache() sourcecache = context.sourcecache element = project.load_elements(["import-dev.bst"])[0] element._query_source_cache() source = list(element.sources())[0] assert element._cached_sources() # check that the directory structures are identical digest = sourcecache.export(source)._get_digest() extractdir = os.path.join(str(tmpdir), "extract") cas.checkout(extractdir, digest) dir1 = extractdir dir2 = os.path.join(project_dir, "files", "dev-files") assert list(relative_walk(dir1)) == list(relative_walk(dir2))
def test_source_staged(tmpdir, cli, datafiles): project_dir = os.path.join(datafiles.dirname, datafiles.basename, "project") cachedir = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": cachedir}) res = cli.run(project=project_dir, args=["build", "import-bin.bst"]) res.assert_success() with dummy_context() as context: context.cachedir = cachedir # load project and sourcecache project = Project(project_dir, context) project.ensure_fully_loaded() sourcecache = context.sourcecache cas = context.get_cascache() # now check that the source is in the refs file, this is pretty messy but # seems to be the only way to get the sources? element = project.load_elements(["import-bin.bst"])[0] element._query_source_cache() source = list(element.sources())[0] assert element._cached_sources() assert sourcecache.contains(source) # Extract the file and check it's the same as the one we imported digest = sourcecache.export(source)._get_digest() extractdir = os.path.join(str(tmpdir), "extract") cas.checkout(extractdir, digest) dir1 = extractdir dir2 = os.path.join(project_dir, "files", "bin-files") assert list(relative_walk(dir1)) == list(relative_walk(dir2))
def test_staged_source_build(tmpdir, datafiles, cli): project_dir = os.path.join(datafiles.dirname, datafiles.basename, "project") cachedir = os.path.join(str(tmpdir), "cache") element_path = "elements" source_protos = os.path.join(str(tmpdir), "cache", "source_protos") elementsources = os.path.join(str(tmpdir), "cache", "elementsources") source_dir = os.path.join(str(tmpdir), "cache", "sources") cli.configure({"cachedir": cachedir}) create_element_size("target.bst", project_dir, element_path, [], 10000) with dummy_context() as context: context.cachedir = cachedir project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements(["import-dev.bst"])[0] # check consistency of the source element._query_source_cache() assert not element._cached_sources() res = cli.run(project=project_dir, args=["build", "target.bst"]) res.assert_success() # delete artifacts check state is buildable cli.remove_artifact_from_cache(project_dir, "target.bst") states = cli.get_element_states(project_dir, ["target.bst"]) assert states["target.bst"] == "buildable" # delete source dir and check that state is still buildable shutil.rmtree(source_dir) states = cli.get_element_states(project_dir, ["target.bst"]) assert states["target.bst"] == "buildable" # build and check that no fetching was done. res = cli.run(project=project_dir, args=["build", "target.bst"]) res.assert_success() assert "Fetching from" not in res.stderr # assert the source directory is still empty (though there may be # directories from staging etc.) files = [] for _, _, filename in os.walk(source_dir): files.extend(filename) assert files == [] # Now remove the source refs and check the state shutil.rmtree(source_protos) shutil.rmtree(elementsources) cli.remove_artifact_from_cache(project_dir, "target.bst") states = cli.get_element_states(project_dir, ["target.bst"]) assert states["target.bst"] == "fetch needed" # Check that it now fetches from when building the target res = cli.run(project=project_dir, args=["build", "target.bst"]) res.assert_success() assert "Fetching from" in res.stderr
def _push(cli, cache_dir, project_dir, config_file, target): with dummy_context(config=config_file) as context: # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(cache_dir, element, element_key) # Create a local artifact cache handle artifactcache = context.artifactcache # Ensure the element's artifact memeber is initialised # This is duplicated from Pipeline.resolve_elements() # as this test does not use the cli frontend. for e in element._dependencies(_Scope.ALL): e._initialize_state() # Initialize remotes context.initialize_remotes(True, True, None, None) assert artifactcache.has_push_remotes(plugin=element), "No remote configured for element target.bst" assert element._push(), "Push operation failed" return element_key
def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles): project_dir = str(datafiles) element_name, repo, ref = create_test_element(tmpdir, project_dir) cache_dir = os.path.join(str(tmpdir), "cache") # use artifact cache for sources for now, they should work the same with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: with context_with_source_cache(cli, cache_dir, share, tmpdir) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements([element_name])[0] element._query_source_cache() assert not element._cached_sources() source = list(element.sources())[0] assert not share.get_artifact_proto(source._get_source_name()) # Just check that we sensibly fetch and build the element res = cli.run(project=project_dir, args=["build", element_name]) res.assert_success() assert os.listdir( os.path.join(str(tmpdir), "cache", "sources", "tar")) != [] # get root digest of source sourcecache = context.sourcecache digest = sourcecache.export(source)._get_digest() # Push the source to the remote res = cli.run(project=project_dir, args=[ "source", "push", "--source-remote", share.repo, element_name ]) res.assert_success() # Remove the cas content, only keep the proto and such around shutil.rmtree( os.path.join(str(tmpdir), "sourceshare", "repo", "cas", "objects")) # check the share doesn't have the object assert not share.has_object(digest) # Delete the source locally shutil.rmtree(os.path.join(str(cache_dir), "sources")) shutil.rmtree(os.path.join(str(cache_dir), "cas")) state = cli.get_element_state(project_dir, element_name) assert state == "fetch needed" # Now fetch the source and check res = cli.run(project=project_dir, args=["source", "fetch", element_name]) res.assert_success() assert ("SUCCESS Fetching {}".format( repo.source_config(ref=ref)["url"])) in res.stderr
def test_source_push(cli, tmpdir, datafiles): cache_dir = os.path.join(str(tmpdir), "cache") project_dir = str(datafiles) with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "source-caches": { "servers": [{ "url": share.repo, "push": True, }] }, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # get the source object with dummy_context(config=user_config_file) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements(["push.bst"])[0] element._initialize_state() assert not element._cached_sources() source = list(element.sources())[0] # check we don't have it in the current cache assert not share.get_source_proto(source._get_source_name()) # build the element, this should fetch and then push the source to the # remote res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() assert "Pushed source" in res.stderr # check that we've got the remote locally now sourcecache = context.sourcecache assert sourcecache.contains(source) # check that the remote CAS now has it digest = sourcecache.export(source)._get_digest() assert share.has_object(digest)
def test_pull(cli, tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share cache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "artifacts": { "url": share.repo, "push": True, }, "cachedir": cache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) # Ensure CLI calls will use it cli.configure(user_config) # First build the project with the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Assert that we shared/pushed the cached artifact assert share.get_artifact( cli.get_artifact_name(project_dir, "test", "target.bst")) # Delete the artifact locally cli.remove_artifact_from_cache(project_dir, "target.bst") # Assert that we are not cached locally anymore assert cli.get_element_state(project_dir, "target.bst") != "cached" with dummy_context(config=user_config_file) as context: # Load the project project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is **not** cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert not cli.artifact.is_cached(cache_dir, element, element_key) context.cachedir = cache_dir context.casdir = os.path.join(cache_dir, "cas") context.tmpdir = os.path.join(cache_dir, "tmp") # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Create a local artifact cache handle artifactcache = context.artifactcache # Manually setup the CAS remote artifactcache.setup_remotes(use_config=True) assert artifactcache.has_push_remotes( plugin=element), "No remote configured for element target.bst" assert artifactcache.pull(element, element_key), "Pull operation failed" assert cli.artifact.is_cached(cache_dir, element, element_key)
def test_pull_tree(cli, tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share rootcache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "artifacts": { "url": share.repo, "push": True, }, "cachedir": rootcache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) # Ensure CLI calls will use it cli.configure(user_config) # First build the project with the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Assert that we shared/pushed the cached artifact assert share.get_artifact( cli.get_artifact_name(project_dir, "test", "target.bst")) with dummy_context(config=user_config_file) as context: # Load the project and CAS cache project = Project(project_dir, context) project.ensure_fully_loaded() cas = context.get_cascache() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(rootcache_dir, element, element_key) # Retrieve the Directory object from the cached artifact artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key) artifactcache = context.artifactcache # Manually setup the CAS remote artifactcache.setup_remotes(use_config=True) assert artifactcache.has_push_remotes() directory = remote_execution_pb2.Directory() with open(cas.objpath(artifact_digest), "rb") as f: directory.ParseFromString(f.read()) # Build the Tree object while we are still cached tree = remote_execution_pb2.Tree() tree_maker(cas, tree, directory) # Push the Tree as a regular message tree_digest = artifactcache.push_message(project, tree) tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes assert tree_hash and tree_size # Now delete the artifact locally cli.remove_artifact_from_cache(project_dir, "target.bst") # Assert that we are not cached locally anymore artifactcache.close_grpc_channels() cas.close_grpc_channels() assert cli.get_element_state(project_dir, "target.bst") != "cached" tree_digest = remote_execution_pb2.Digest(hash=tree_hash, size_bytes=tree_size) # Pull the artifact using the Tree object directory_digest = artifactcache.pull_tree(project, artifact_digest) directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes # Directory size now zero with AaaP and stack element commit #1cbc5e63dc assert directory_hash and not directory_size directory_digest = remote_execution_pb2.Digest( hash=directory_hash, size_bytes=directory_size) # Ensure the entire Tree stucture has been pulled assert os.path.exists(cas.objpath(directory_digest))