def test_artifact_list_exact_contents(cli, datafiles, target, with_project): project = str(datafiles) # Get the cache key of our test element key = cli.get_element_key(project, "import-bin.bst") # Ensure we have an artifact to read result = cli.run(project=project, args=["build", "import-bin.bst"]) result.assert_success() if target == "element-name": arg = "import-bin.bst" elif target == "artifact-name": key = cli.get_element_key(project, "import-bin.bst") arg = "test/import-bin/" + key # Delete the project.conf if we're going to try this without a project if not with_project: os.remove(os.path.join(project, "project.conf")) # List the contents via the key result = cli.run(project=project, args=["artifact", "list-contents", arg]) # Expect to fail if we try to list by element name and there is no project if target == "element-name" and not with_project: result.assert_main_error(ErrorDomain.STREAM, "project-not-loaded") else: result.assert_success() expected_output_template = "{target}:\n\tusr\n\tusr/bin\n\tusr/bin/hello\n\n" expected_output = expected_output_template.format(target=arg) assert expected_output in result.output
def test_artifact_list_exact_contents_glob(cli, datafiles): project = str(datafiles) # Ensure we have an artifact to read result = cli.run(project=project, args=["build", "target.bst"]) assert result.exit_code == 0 # List the contents via glob result = cli.run(project=project, args=["artifact", "list-contents", "test/**"]) assert result.exit_code == 0 # get the cahe keys for each element in the glob import_bin_key = cli.get_element_key(project, "import-bin.bst") import_dev_key = cli.get_element_key(project, "import-dev.bst") compose_all_key = cli.get_element_key(project, "compose-all.bst") target_key = cli.get_element_key(project, "target.bst") expected_artifacts = [ "test/import-bin/" + import_bin_key, "test/import-dev/" + import_dev_key, "test/compose-all/" + compose_all_key, "test/target/" + target_key, ] for artifact in expected_artifacts: assert artifact in result.output
def test_unique_key(cli, tmpdir, datafiles): """This test confirms that the 'filename' parameter is honoured when it comes to generating a cache key for the source. """ project = str(datafiles) generate_project(project, {"aliases": { "tmpdir": "file:///" + str(tmpdir) }}) states = cli.get_element_states( project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"]) assert states["target.bst"] == "fetch needed" assert states["target-custom.bst"] == "fetch needed" assert states["target-custom-executable.bst"] == "fetch needed" # Try to fetch it cli.run(project=project, args=["source", "fetch", "target.bst"]) # We should download_yaml the file only once states = cli.get_element_states( project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"]) assert states["target.bst"] == "buildable" assert states["target-custom.bst"] == "buildable" assert states["target-custom-executable.bst"] == "buildable" # But the cache key is different because the 'filename' is different. assert (cli.get_element_key(project, "target.bst") != cli.get_element_key( project, "target-custom.bst") != cli.get_element_key( project, "target-custom-executable.bst"))
def test_argument_artifact(cli, datafiles): project = str(datafiles) # Build an import element with no dependencies (as there will only be ONE cache key) result = cli.run(project=project, args=["build", "import-bin.bst"]) # Has no dependencies result.assert_success() # Get the key and the artifact ref ($project/$element_name/$key) key = cli.get_element_key(project, "import-bin.bst") artifact = os.path.join("test", "import-bin", key) # Test autocompletion of the artifact cmds = ["bst artifact log ", "bst artifact log t", "bst artifact log test/"] for i, cmd in enumerate(cmds): word_idx = 3 result = cli.run( project=project, cwd=project, env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx)}, ) if result.output: words = result.output.splitlines() # This leaves an extra space on each e.g. ['foo.bst '] words = [word.strip() for word in words] if i == 0: expected = PROJECT_ELEMENTS + [artifact] # We should now be able to see the artifact elif i == 1: expected = ["target.bst", artifact] elif i == 2: expected = [artifact] assert expected == words
def _push(cli, cache_dir, project_dir, config_file, target): with dummy_context(config=config_file) as context: # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(cache_dir, element, element_key) # Create a local artifact cache handle artifactcache = context.artifactcache # Initialize remotes context.initialize_remotes(True, True, None, None) # Query local cache element._load_artifact(pull=False) assert artifactcache.has_push_remotes( plugin=element), "No remote configured for element target.bst" assert element._push(), "Push operation failed" return element_key
def test_push_artifact_glob(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build it without the artifact cache configured result = cli.run(project=project, args=["build", element]) result.assert_success() # Assert that the *artifact* is cached locally cache_key = cli.get_element_key(project, element) artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key) assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref)) # Configure artifact share cli.configure({"artifacts": {"url": share.repo, "push": True}}) # Run bst artifact push with a wildcard. # This matches two artifact refs (weak and strong cache keys). result = cli.run(project=project, args=["artifact", "push", "test/target/*"]) result.assert_success() assert len(result.get_pushed_elements()) == 2
def _push(cli, cache_dir, project_dir, config_file, target): with dummy_context(config=config_file) as context: # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(cache_dir, element, element_key) # Create a local artifact cache handle artifactcache = context.artifactcache # Ensure the element's artifact memeber is initialised # This is duplicated from Pipeline.resolve_elements() # as this test does not use the cli frontend. for e in element._dependencies(_Scope.ALL): e._initialize_state() # Initialize remotes context.initialize_remotes(True, True, None, None) assert artifactcache.has_push_remotes(plugin=element), "No remote configured for element target.bst" assert element._push(), "Push operation failed" return element_key
def test_artifact_delete_artifact(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) # First build an element so that we can find its artifact result = cli.run(project=project, args=["build", element]) result.assert_success() # Obtain the artifact ref cache_key = cli.get_element_key(project, element) artifact = os.path.join("test", os.path.splitext(element)[0], cache_key) # Explicitly check that the ARTIFACT exists in the cache assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact)) # Delete the artifact result = cli.run(project=project, args=["artifact", "delete", artifact]) result.assert_success() # Check that the ARTIFACT is no longer in the cache assert not os.path.exists( os.path.join(local_cache, "cas", "refs", "heads", artifact))
def test_push_cross_junction(cli, tmpdir, datafiles): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") junction_path = os.path.join(project, "elements", "junction.bst") generate_junction(tmpdir, subproject_path, junction_path, store_ref=True) result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"]) result.assert_success() assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached" with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }], }}) cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"]) cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst") assert share.get_artifact( cli.get_artifact_name(project, "subtest", "import-etc.bst", cache_key=cache_key))
def test_artifact_list_exact_contents_long(cli, datafiles, target): project = str(datafiles) # Ensure we have an artifact to read result = cli.run(project=project, args=["build", "import-bin.bst"]) assert result.exit_code == 0 if target == "element-name": arg = "import-bin.bst" elif target == "artifact-name": key = cli.get_element_key(project, "import-bin.bst") arg = "test/import-bin/" + key # List the contents via the element name result = cli.run(project=project, args=["artifact", "list-contents", "--long", arg]) assert result.exit_code == 0 expected_output_template = ( "{target}:\n" "\tdrwxr-xr-x dir 1 usr\n" "\tdrwxr-xr-x dir 1 usr/bin\n" "\t-rw-r--r-- reg 107 usr/bin/hello\n\n") expected_output = expected_output_template.format(target=arg) assert expected_output in result.output
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" dep = "compose-all.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) # First build an element so that we can find its artifact result = cli.run(project=project, args=["build", element]) result.assert_success() assert cli.get_element_states(project, [element, dep], deps="none") == { element: "cached", dep: "cached", } # Obtain the artifact ref cache_key = cli.get_element_key(project, element) artifact = os.path.join("test", os.path.splitext(element)[0], cache_key) # Explicitly check that the ARTIFACT exists in the cache assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact)) # Delete the artifact result = cli.run(project=project, args=["artifact", "delete", artifact, dep]) result.assert_success() # Check that the ARTIFACT is no longer in the cache assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact)) # Check that the dependency ELEMENT is no longer cached assert cli.get_element_state(project, dep) != "cached"
def open_cross_junction(cli, tmpdir): project = prepare_junction_project(cli, tmpdir) element = "sub.bst:data.bst" oldkey = cli.get_element_key(project, element) workspace = tmpdir.join("workspace") args = ["workspace", "open", "--directory", str(workspace), element] result = cli.run(project=project, args=args) result.assert_success() assert cli.get_element_state(project, element) == "buildable" assert os.path.exists(str(workspace.join("hello.txt"))) assert cli.get_element_key(project, element) != oldkey return project, workspace
def test_artifact_delete_artifacts_build_deps(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) # First build an element so that we can find its artifact result = cli.run(project=project, args=["build", element]) result.assert_success() # Obtain the artifact ref cache_key = cli.get_element_key(project, element) artifact = os.path.join("test", os.path.splitext(element)[0], cache_key) # Explicitly check that the ARTIFACT exists in the cache assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact)) # get the artifact refs of the build dependencies bdep_refs = [] bdep_states = cli.get_element_states(project, [element], deps="build") for bdep in bdep_states.keys(): bdep_refs.append( os.path.join("test", _get_normal_name(bdep), cli.get_element_key(project, bdep))) # Assert build dependencies are cached for ref in bdep_refs: assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", ref)) # Delete the artifact result = cli.run(project=project, args=["artifact", "delete", "--deps", "build", artifact]) result.assert_success() # Check that the artifact's build deps are no longer in the cache # Assert build dependencies have been deleted and that the artifact remains for ref in bdep_refs: assert not os.path.exists( os.path.join(local_cache, "artifacts", "refs", ref)) assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact))
def test_build_checkout_runtime_deps_using_ref_fails(datafiles, cli): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") result = cli.run(project=project, args=["build", "checkout-deps.bst"]) result.assert_success() key = cli.get_element_key(project, "checkout-deps.bst") checkout_args = ["artifact", "checkout", "--directory", checkout, "--deps", "run", "test/checkout-deps/" + key] result = cli.run(project=project, args=checkout_args) result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported")
def test_push_artifact(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build it without the artifact cache configured result = cli.run(project=project, args=["build", element]) result.assert_success() # Assert that the *artifact* is cached locally cache_key = cli.get_element_key(project, element) artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key) assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact_ref)) # Configure artifact share cli.configure({ # # FIXME: This test hangs "sometimes" if we allow # concurrent push. # # It's not too bad to ignore since we're # using the local artifact cache functionality # only, but it should probably be fixed. # "scheduler": { "pushers": 1 }, "artifacts": { "servers": [{ "url": share.repo, "push": True, }] }, }) # Now try bst artifact push all the deps result = cli.run(project=project, args=["artifact", "push", artifact_ref]) result.assert_success() # And finally assert that all the artifacts are in the share # # Note that assert shared tests that an element is shared by obtaining # the artifact ref and asserting that the path exists in the share assert_shared(cli, share, project, element)
def test_artifact_show_artifact_ref(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" result = cli.run(project=project, args=["build", element]) result.assert_success() cache_key = cli.get_element_key(project, element) artifact_ref = "test/target/" + cache_key result = cli.run(project=project, args=["artifact", "show", artifact_ref]) result.assert_success() assert "cached {}".format(artifact_ref) in result.output
def test_build_checkout_using_ref(datafiles, cli): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") result = cli.run(project=project, args=["build", "checkout-deps.bst"]) result.assert_success() key = cli.get_element_key(project, "checkout-deps.bst") checkout_args = ["artifact", "checkout", "--directory", checkout, "--deps", "none", "test/checkout-deps/" + key] result = cli.run(project=project, args=checkout_args) result.assert_success() filename = os.path.join(checkout, "etc", "buildstream", "config") assert os.path.exists(filename)
def test_modify_and_revert(datafiles, cli, strict): project = str(datafiles) # First build target and dependencies result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() # Remember cache key of first build target_cache_key = cli.get_element_key(project, "target.bst") # Modify dependency new_header_path = os.path.join(project, "files", "dev-files", "usr", "include", "new.h") with open(new_header_path, "w", encoding="utf-8") as f: f.write("#define NEW") # Trigger rebuild. This will also rebuild the unmodified target as this # follows a strict build plan. result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() assert "target.bst" in result.get_built_elements() assert cli.get_element_key(project, "target.bst") != target_cache_key # Revert previous modification in dependency os.unlink(new_header_path) # Rebuild again, everything should be cached. result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() assert len(result.get_built_elements()) == 0 # Verify that cache key now again matches the first build in both # strict and non-strict mode. cli.configure({"projects": {"test": {"strict": strict == "strict"}}}) assert cli.get_element_key(project, "target.bst") == target_cache_key
def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles): project = str(datafiles) element = "checkout-deps.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build it without the artifact cache configured result = cli.run(project=project, args=["build", element]) result.assert_success() # Assert that the *artifact* is cached locally cache_key = cli.get_element_key(project, element) artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key) assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact_ref)) # Configure artifact share cli.configure({ # # FIXME: This test hangs "sometimes" if we allow # concurrent push. # # It's not too bad to ignore since we're # using the local artifact cache functionality # only, but it should probably be fixed. # "scheduler": { "pushers": 1 }, "artifacts": { "servers": [{ "url": share.repo, "push": True, }] }, }) # Now try bst artifact push all the deps result = cli.run( project=project, args=["artifact", "push", "--deps", "all", artifact_ref]) result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported")
def test_pull_artifact(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Configure a local cache local_cache = os.path.join(str(tmpdir), "cache") cli.configure({"cachedir": local_cache}) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build the target element and push to the remote. cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }] }}) result = cli.run(project=project, args=["build", element]) result.assert_success() # Assert that the *artifact* is cached locally cache_key = cli.get_element_key(project, element) artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key) assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact_ref)) # Assert that the target is shared (note that assert shared will use the artifact name) assert_shared(cli, share, project, element) # Now we've pushed, remove the local cache shutil.rmtree(os.path.join(local_cache, "artifacts")) # Assert that nothing is cached locally anymore assert not os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact_ref)) # Now try bst artifact pull result = cli.run(project=project, args=["artifact", "pull", artifact_ref]) result.assert_success() # And assert that it's again in the local cache, without having built assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", artifact_ref))
def test_artifact_delete_artifact_with_deps_all_fails(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # First build an element so that we can find its artifact result = cli.run(project=project, args=["build", element]) result.assert_success() # Obtain the artifact ref cache_key = cli.get_element_key(project, element) artifact = os.path.join("test", os.path.splitext(element)[0], cache_key) # Try to delete the artifact with all of its dependencies result = cli.run(project=project, args=["artifact", "delete", "--deps", "all", artifact]) result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported")
def test_artifact_show_artifact_name(cli, tmpdir, datafiles, with_project): project = str(datafiles) element = "target.bst" result = cli.run(project=project, args=["build", element]) result.assert_success() cache_key = cli.get_element_key(project, element) artifact_ref = "test/target/" + cache_key # Delete the project.conf if we're going to try this without a project if not with_project: os.remove(os.path.join(project, "project.conf")) result = cli.run(project=project, args=["artifact", "show", artifact_ref]) result.assert_success() assert "cached {}".format(artifact_ref) in result.output
def test_artifact_delete_unbuilt_artifact(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # delete it, just in case it's there _ = cli.run(project=project, args=["artifact", "delete", element]) # Ensure the element is not cached assert cli.get_element_state(project, element) != "cached" # Now try and remove it again (now we know its not there) result = cli.run(project=project, args=["artifact", "delete", element]) cache_key = cli.get_element_key(project, element) artifact = os.path.join("test", os.path.splitext(element)[0], cache_key) expected_err = "WARNING Could not find ref '{}'".format(artifact) assert expected_err in result.stderr
def test_artifact_list_exact_contents_ref(cli, datafiles): project = str(datafiles) # Get the cache key of our test element key = cli.get_element_key(project, "import-bin.bst") # Ensure we have an artifact to read result = cli.run(project=project, args=["build", "import-bin.bst"]) assert result.exit_code == 0 # List the contents via the key result = cli.run( project=project, args=["artifact", "list-contents", "test/import-bin/" + key]) assert result.exit_code == 0 expected_output = "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n" assert expected_output in result.output
def test_build_checkout_tarball_using_ref(datafiles, cli): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout.tar") result = cli.run(project=project, args=["build", "checkout-deps.bst"]) result.assert_success() builddir = os.path.join(cli.directory, "build") assert os.path.isdir(builddir) assert not os.listdir(builddir) key = cli.get_element_key(project, "checkout-deps.bst") checkout_args = ["artifact", "checkout", "--deps", "none", "--tar", checkout, "test/checkout-deps/" + key] result = cli.run(project=project, args=checkout_args) result.assert_success() with tarfile.TarFile(checkout) as tar: assert os.path.join(".", "etc", "buildstream", "config") in tar.getnames()
def test_build_checkout_build_deps_using_ref(datafiles, cli): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") result = cli.run(project=project, args=["build", "checkout-deps.bst"]) result.assert_success() key = cli.get_element_key(project, "checkout-deps.bst") checkout_args = ["artifact", "checkout", "--directory", checkout, "--deps", "build", "test/checkout-deps/" + key] result = cli.run(project=project, args=checkout_args) result.assert_success() build_dep_files = os.path.join(checkout, "usr", "include", "pony.h") runtime_dep_files = os.path.join(checkout, "usr", "bin", "hello") target_files = os.path.join(checkout, "etc", "buildstream", "config") assert os.path.exists(build_dep_files) assert not os.path.exists(runtime_dep_files) assert not os.path.exists(target_files)
def test_pull(cli, tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share cache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "artifacts": { "url": share.repo, "push": True, }, "cachedir": cache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) # Ensure CLI calls will use it cli.configure(user_config) # First build the project with the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Assert that we shared/pushed the cached artifact assert share.get_artifact( cli.get_artifact_name(project_dir, "test", "target.bst")) # Delete the artifact locally cli.remove_artifact_from_cache(project_dir, "target.bst") # Assert that we are not cached locally anymore assert cli.get_element_state(project_dir, "target.bst") != "cached" with dummy_context(config=user_config_file) as context: # Load the project project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is **not** cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert not cli.artifact.is_cached(cache_dir, element, element_key) context.cachedir = cache_dir context.casdir = os.path.join(cache_dir, "cas") context.tmpdir = os.path.join(cache_dir, "tmp") # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Create a local artifact cache handle artifactcache = context.artifactcache # Manually setup the CAS remote artifactcache.setup_remotes(use_config=True) assert artifactcache.has_push_remotes( plugin=element), "No remote configured for element target.bst" assert artifactcache.pull(element, element_key), "Pull operation failed" assert cli.artifact.is_cached(cache_dir, element, element_key)
def test_pull_tree(cli, tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share rootcache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "artifacts": { "url": share.repo, "push": True, }, "cachedir": rootcache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) # Ensure CLI calls will use it cli.configure(user_config) # First build the project with the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Assert that we shared/pushed the cached artifact assert share.get_artifact( cli.get_artifact_name(project_dir, "test", "target.bst")) with dummy_context(config=user_config_file) as context: # Load the project and CAS cache project = Project(project_dir, context) project.ensure_fully_loaded() cas = context.get_cascache() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(rootcache_dir, element, element_key) # Retrieve the Directory object from the cached artifact artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key) artifactcache = context.artifactcache # Manually setup the CAS remote artifactcache.setup_remotes(use_config=True) assert artifactcache.has_push_remotes() directory = remote_execution_pb2.Directory() with open(cas.objpath(artifact_digest), "rb") as f: directory.ParseFromString(f.read()) # Build the Tree object while we are still cached tree = remote_execution_pb2.Tree() tree_maker(cas, tree, directory) # Push the Tree as a regular message tree_digest = artifactcache.push_message(project, tree) tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes assert tree_hash and tree_size # Now delete the artifact locally cli.remove_artifact_from_cache(project_dir, "target.bst") # Assert that we are not cached locally anymore artifactcache.close_grpc_channels() cas.close_grpc_channels() assert cli.get_element_state(project_dir, "target.bst") != "cached" tree_digest = remote_execution_pb2.Digest(hash=tree_hash, size_bytes=tree_size) # Pull the artifact using the Tree object directory_digest = artifactcache.pull_tree(project, artifact_digest) directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes # Directory size now zero with AaaP and stack element commit #1cbc5e63dc assert directory_hash and not directory_size directory_digest = remote_execution_pb2.Digest( hash=directory_hash, size_bytes=directory_size) # Ensure the entire Tree stucture has been pulled assert os.path.exists(cas.objpath(directory_digest))