def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Set up remote and local shares local_cache = os.path.join(str(tmpdir), "artifacts") with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote: cli.configure({ "artifacts": { "servers": [{ "url": remote.repo, "push": True }] }, "cachedir": local_cache, }) # Build the element result = cli.run(project=project, args=["build", element]) result.assert_success() # Make sure it's in the share assert remote.get_artifact( cli.get_artifact_name(project, "test", element)) # Delete the artifact from the local cache result = cli.run(project=project, args=["artifact", "delete", element]) result.assert_success() assert cli.get_element_state(project, element) != "cached" result = cli.run(project=project, args=["artifact", "show", element]) result.assert_success() assert "available {}".format(element) in result.output
def test_push(cli, tmpdir, datafiles): project_dir = str(datafiles) # First build the project without the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share rootcache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": {"pushers": 1}, "artifacts": {"servers": [{"url": share.repo, "push": True,}]}, "cachedir": rootcache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) element_key = _push(cli, rootcache_dir, project_dir, user_config_file, "target.bst") assert share.get_artifact(cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key))
def test_push_cross_junction(cli, tmpdir, datafiles): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") junction_path = os.path.join(project, "elements", "junction.bst") generate_junction(tmpdir, subproject_path, junction_path, store_ref=True) result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"]) result.assert_success() assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached" with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }], }}) cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"]) cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst") assert share.get_artifact( cli.get_artifact_name(project, "subtest", "import-etc.bst", cache_key=cache_key))
def test_push_split(cli, tmpdir, datafiles): project_dir = str(datafiles) # First build the project without the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" indexshare = os.path.join(str(tmpdir), "indexshare") storageshare = os.path.join(str(tmpdir), "storageshare") # Set up an artifact cache. with create_split_share(indexshare, storageshare) as (index, storage): rootcache_dir = os.path.join(str(tmpdir), "cache") user_config = { "scheduler": {"pushers": 1}, "artifacts": { "servers": [ {"url": index.repo, "push": True, "type": "index"}, {"url": storage.repo, "push": True, "type": "storage"}, ], }, "cachedir": rootcache_dir, } config_path = str(tmpdir.join("buildstream.conf")) _yaml.roundtrip_dump(user_config, file=config_path) element_key = _push(cli, rootcache_dir, project_dir, config_path, "target.bst") proto = index.get_artifact_proto( cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key) ) assert storage.get_cas_files(proto) is not None
def create_built_artifact_share(tmpdir, cache_buildtrees, integration_cache): element_name = "build-shell/buildtree.bst" # Replicate datafiles behavior and do work entirely in the temp directory project = os.path.join(tmpdir, "project") shutil.copytree(DATA_DIR, project) # Create the share to be hosted from this temp directory share = ArtifactShare(os.path.join(tmpdir, "artifactcache")) # Create a Cli instance to build and populate the share cli = Cli(os.path.join(tmpdir, "cache")) cli.configure( {"artifacts": {"servers": [{"url": share.repo, "push": True}]}, "sourcedir": integration_cache.sources} ) # Optionally cache build trees args = [] if cache_buildtrees: args += ["--cache-buildtrees", "always"] args += ["build", element_name] # Build result = cli.run(project=project, args=args) result.assert_success() # Assert that the artifact is indeed in the share assert cli.get_element_state(project, element_name) == "cached" artifact_name = cli.get_artifact_name(project, "test", element_name) assert share.get_artifact(artifact_name) return share
def test_push_deps(cli, tmpdir, datafiles, deps, expected_states): project = str(datafiles) target = "checkout-deps.bst" build_dep = "import-dev.bst" runtime_dep = "import-bin.bst" with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build it without the artifact cache configured result = cli.run(project=project, args=["build", target]) result.assert_success() # Configure artifact share cli.configure({ # # FIXME: This test hangs "sometimes" if we allow # concurrent push. # # It's not too bad to ignore since we're # using the local artifact cache functionality # only, but it should probably be fixed. # "scheduler": { "pushers": 1 }, "artifacts": { "servers": [{ "url": share.repo, "push": True, }] }, }) # Now try bst artifact push all the deps result = cli.run(project=project, args=["artifact", "push", target, "--deps", deps]) result.assert_success() # And finally assert that the selected artifacts are in the share states = [] for element in (target, build_dep, runtime_dep): is_cached = share.get_artifact( cli.get_artifact_name(project, "test", element)) is not None states.append(is_cached) assert states == expected_states
def _test_pull_missing_blob(cli, project, index, storage): # First build the target element and push to the remote. result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() assert cli.get_element_state(project, "target.bst") == "cached" # Assert that everything is now cached in the remote. all_elements = [ "target.bst", "import-bin.bst", "import-dev.bst", "compose-all.bst" ] for element_name in all_elements: project_name = "test" artifact_name = cli.get_artifact_name(project, project_name, element_name) artifact_proto = index.get_artifact_proto(artifact_name) assert artifact_proto assert storage.get_cas_files(artifact_proto) # Now we've pushed, delete the user's local artifact cache # directory and try to redownload it from the share # casdir = os.path.join(cli.directory, "cas") shutil.rmtree(casdir) artifactdir = os.path.join(cli.directory, "artifacts") shutil.rmtree(artifactdir) # Assert that nothing is cached locally anymore for element_name in all_elements: assert cli.get_element_state(project, element_name) != "cached" # Now delete blobs in the remote without deleting the artifact ref. # This simulates scenarios with concurrent artifact expiry. remote_objdir = os.path.join(storage.repodir, "cas", "objects") shutil.rmtree(remote_objdir) # Now try bst build result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() # Assert that no artifacts were pulled assert not result.get_pulled_elements()
def test_artifact_delete_pulled_artifact_without_buildtree( cli, tmpdir, datafiles): project = str(datafiles) element = "target.bst" # Set up remote and local shares local_cache = os.path.join(str(tmpdir), "artifacts") with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote: cli.configure({ "artifacts": { "servers": [{ "url": remote.repo, "push": True }] }, "cachedir": local_cache }) # Build the element result = cli.run(project=project, args=["build", element]) result.assert_success() # Make sure it's in the share assert remote.get_artifact( cli.get_artifact_name(project, "test", element)) # Delete and then pull the artifact (without its buildtree) result = cli.run(project=project, args=["artifact", "delete", element]) result.assert_success() assert cli.get_element_state(project, element) != "cached" result = cli.run(project=project, args=["artifact", "pull", element]) result.assert_success() assert cli.get_element_state(project, element) == "cached" # Now delete it again (it should have been pulled without the buildtree, but # a digest of the buildtree is pointed to in the artifact's metadata result = cli.run(project=project, args=["artifact", "delete", element]) result.assert_success() assert cli.get_element_state(project, element) != "cached"
def test_pull(cli, tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share cache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "artifacts": { "url": share.repo, "push": True, }, "cachedir": cache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) # Ensure CLI calls will use it cli.configure(user_config) # First build the project with the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Assert that we shared/pushed the cached artifact assert share.get_artifact( cli.get_artifact_name(project_dir, "test", "target.bst")) # Delete the artifact locally cli.remove_artifact_from_cache(project_dir, "target.bst") # Assert that we are not cached locally anymore assert cli.get_element_state(project_dir, "target.bst") != "cached" with dummy_context(config=user_config_file) as context: # Load the project project = Project(project_dir, context) project.ensure_fully_loaded() # Assert that the element's artifact is **not** cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert not cli.artifact.is_cached(cache_dir, element, element_key) context.cachedir = cache_dir context.casdir = os.path.join(cache_dir, "cas") context.tmpdir = os.path.join(cache_dir, "tmp") # Load the project manually project = Project(project_dir, context) project.ensure_fully_loaded() # Create a local artifact cache handle artifactcache = context.artifactcache # Manually setup the CAS remote artifactcache.setup_remotes(use_config=True) assert artifactcache.has_push_remotes( plugin=element), "No remote configured for element target.bst" assert artifactcache.pull(element, element_key), "Pull operation failed" assert cli.artifact.is_cached(cache_dir, element, element_key)
def test_pull_tree(cli, tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share rootcache_dir = os.path.join(str(tmpdir), "cache") user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "artifacts": { "url": share.repo, "push": True, }, "cachedir": rootcache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) # Ensure CLI calls will use it cli.configure(user_config) # First build the project with the artifact cache configured result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally assert cli.get_element_state(project_dir, "target.bst") == "cached" # Assert that we shared/pushed the cached artifact assert share.get_artifact( cli.get_artifact_name(project_dir, "test", "target.bst")) with dummy_context(config=user_config_file) as context: # Load the project and CAS cache project = Project(project_dir, context) project.ensure_fully_loaded() cas = context.get_cascache() # Assert that the element's artifact is cached element = project.load_elements(["target.bst"])[0] element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(rootcache_dir, element, element_key) # Retrieve the Directory object from the cached artifact artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key) artifactcache = context.artifactcache # Manually setup the CAS remote artifactcache.setup_remotes(use_config=True) assert artifactcache.has_push_remotes() directory = remote_execution_pb2.Directory() with open(cas.objpath(artifact_digest), "rb") as f: directory.ParseFromString(f.read()) # Build the Tree object while we are still cached tree = remote_execution_pb2.Tree() tree_maker(cas, tree, directory) # Push the Tree as a regular message tree_digest = artifactcache.push_message(project, tree) tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes assert tree_hash and tree_size # Now delete the artifact locally cli.remove_artifact_from_cache(project_dir, "target.bst") # Assert that we are not cached locally anymore artifactcache.close_grpc_channels() cas.close_grpc_channels() assert cli.get_element_state(project_dir, "target.bst") != "cached" tree_digest = remote_execution_pb2.Digest(hash=tree_hash, size_bytes=tree_size) # Pull the artifact using the Tree object directory_digest = artifactcache.pull_tree(project, artifact_digest) directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes # Directory size now zero with AaaP and stack element commit #1cbc5e63dc assert directory_hash and not directory_size directory_digest = remote_execution_pb2.Digest( hash=directory_hash, size_bytes=directory_size) # Ensure the entire Tree stucture has been pulled assert os.path.exists(cas.objpath(directory_digest))
def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist, with_project): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Build the element to push it to cache cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }] }}) # Build it result = cli.run(project=project, args=["build", "target-import.bst"]) result.assert_success() # Assert it is cached locally and remotely assert cli.get_element_state(project, "target-import.bst") == "cached" assert share.get_artifact( cli.get_artifact_name(project, "test", "target-import.bst")) # Obtain the artifact name for pulling purposes artifact_name = cli.get_artifact_name(project, "test", "target-import.bst") # Discard the local cache shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas"))) shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts"))) assert cli.get_element_state(project, "target-import.bst") != "cached" # Delete the project.conf if we're going to try this without a project if not with_project: os.remove(os.path.join(project, "project.conf")) # Now checkout the artifact result = cli.run( project=project, args=[ "artifact", "checkout", "--directory", checkout, "--pull", "--deps", deps, artifact_name ], ) if deps in ["all", "run"]: result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported") else: result.assert_success() # After checkout, assert that we have the expected files and assert that # we don't have any of the unexpected files. # for expect in expect_exist: filename = os.path.join(checkout, expect) assert os.path.exists(filename) for expect in expect_noexist: filename = os.path.join(checkout, expect) assert not os.path.exists(filename)
def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project): project = str(datafiles) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Build the element to push it to cache, and explicitly configure local cache so we can check it local_cache = os.path.join(str(tmpdir), "cache") cli.configure({ "cachedir": local_cache, "artifacts": { "servers": [{ "url": share.repo, "push": True }] } }) # Build it result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() # Assert it is cached locally and remotely assert cli.get_element_state(project, "target.bst") == "cached" assert share.get_artifact( cli.get_artifact_name(project, "test", "target.bst")) # Obtain the artifact name for pulling purposes artifact_name = cli.get_artifact_name(project, "test", "target.bst") # Translate the expected element names into artifact names expect_cached_artifacts = [ cli.get_artifact_name(project, "test", element_name) for element_name in expect_cached ] # Discard the local cache shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas"))) shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts"))) assert cli.get_element_state(project, "target.bst") != "cached" # Delete the project.conf if we're going to try this without a project if not with_project: os.remove(os.path.join(project, "project.conf")) # Now run our pull test result = cli.run( project=project, args=["artifact", "pull", "--deps", deps, artifact_name]) if deps in ["all", "run"]: result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported") else: result.assert_success() # After pulling, assert that we have the expected elements cached again. # # Note that we do not use cli.get_element_states() here because the project.conf # might not be present, so we poke at the cache directly for this assertion. for expect in expect_cached_artifacts: assert os.path.exists( os.path.join(local_cache, "artifacts", "refs", expect))
def test_buildtree_options(cli, tmpdir, datafiles): project = str(datafiles) element_name = "build-shell/buildtree.bst" with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Build the element to push it to cache cli.configure({"artifacts": {"url": share.repo, "push": True}}) result = cli.run( project=project, args=["--cache-buildtrees", "always", "build", element_name]) result.assert_success() assert cli.get_element_state(project, element_name) == "cached" assert share.get_artifact( cli.get_artifact_name(project, "test", element_name)) # Discard the cache shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas"))) shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts"))) assert cli.get_element_state(project, element_name) != "cached" # Pull from cache, but do not include buildtrees. result = cli.run( project=project, args=["artifact", "pull", "--deps", "all", element_name]) result.assert_success() # Check it's not using the cached build tree res = cli.run(project=project, args=[ "shell", "--build", element_name, "--use-buildtree", "never", "--", "cat", "test" ]) res.assert_shell_error() assert "Hi" not in res.output # Check it's not using the cached build tree, default is to ask, and fall back to not # for non interactive behavior res = cli.run( project=project, args=["shell", "--build", element_name, "--", "cat", "test"]) res.assert_shell_error() assert "Hi" not in res.output # Check correctly handling the lack of buildtree, with 'try' not attempting to # pull the buildtree as the user context is by default set to not pull them # and --pull not given res = cli.run(project=project, args=[ "shell", "--build", element_name, "--use-buildtree", "try", "--", "cat", "test" ]) assert "Hi" not in res.output assert "Attempting to fetch missing artifact buildtrees" not in res.stderr assert "WARNING: buildtree is not cached locally, shell will be loaded without it" in res.stderr # Check correctly handling the lack of buildtree, with 'try' attempting and succeeding # to pull the buildtree as the user context allow the pulling of buildtrees and it is # available in the remote and --pull given res = cli.run( project=project, args=[ "--pull-buildtrees", "shell", "--build", element_name, "--pull", "--use-buildtree", "try", "--", "cat", "test", ], ) assert "Attempting to fetch missing artifact buildtree" in res.stderr assert "Hi" in res.output shutil.rmtree(os.path.join(os.path.join(str(tmpdir), "cache", "cas"))) shutil.rmtree( os.path.join(os.path.join(str(tmpdir), "cache", "artifacts"))) assert cli.get_element_state(project, element_name) != "cached" # Check it's not loading the shell at all with always set for the buildtree, when the # user context does not allow for buildtree pulling and --pull is not given result = cli.run( project=project, args=["artifact", "pull", "--deps", "all", element_name]) result.assert_success() res = cli.run(project=project, args=[ "shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test" ]) res.assert_main_error(ErrorDomain.APP, None) assert ( "Artifact has a buildtree but it isn't cached. Can be retried with --pull and pull-buildtrees configured" in res.stderr) assert "Hi" not in res.output assert "Attempting to fetch missing artifact buildtree" not in res.stderr # Check that when user context is set to pull buildtrees and a remote has the buildtree, # 'always' will attempt and succeed at pulling the missing buildtree with --pull set. res = cli.run( project=project, args=[ "--pull-buildtrees", "shell", "--build", element_name, "--pull", "--use-buildtree", "always", "--", "cat", "test", ], ) assert "Hi" in res.output assert ( "buildtree is not cached locally but did exist, will attempt to pull from available remotes" in res.stderr) assert "Attempting to fetch missing artifact buildtree" in res.stderr