def test_custom_logging(cli, tmpdir, datafiles): project = str(datafiles) bin_files_path = os.path.join(project, "files", "bin-files") element_path = os.path.join(project, "elements") element_name = "fetch-test-git.bst" custom_log_format = "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},%{key},%{element},%{action},%{message}" user_config = {"logging": {"message-format": custom_log_format}} cli.configure(user_config) # Create our repo object of the given source type with # the bin files, and then collect the initial ref. # repo = create_repo("tar", str(tmpdir)) ref = repo.create(bin_files_path) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # Now try to fetch it result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() m = re.search( r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*" r",SUCCESS,Query cache", result.stderr, ) assert m is not None
def test_pull_missing_local_blob(cli, tmpdir, datafiles): project = os.path.join(datafiles.dirname, datafiles.basename) repo = create_repo("tar", str(tmpdir)) repo.create(os.path.join(str(datafiles), "files")) element_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) project_config = { "name": "pull-missing-local-blob", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_name = "input.bst" input_file = os.path.join(element_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) depends_name = "depends.bst" depends_config = {"kind": "stack", "depends": [input_name]} depends_file = os.path.join(element_dir, depends_name) _yaml.roundtrip_dump(depends_config, depends_file) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build the import-bin element and push to the remote. cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }] }}) result = cli.run(project=project, args=["source", "track", input_name]) result.assert_success() result = cli.run(project=project, args=["build", input_name]) result.assert_success() assert cli.get_element_state(project, input_name) == "cached" # Delete a file blob from the local cache. # This is a placeholder to test partial CAS handling until we support # partial artifact pulling (or blob-based CAS expiry). # digest = utils.sha256sum( os.path.join(project, "files", "bin-files", "usr", "bin", "hello")) objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:]) os.unlink(objpath) # Now try bst build result = cli.run(project=project, args=["build", depends_name]) result.assert_success() # Assert that the import-bin artifact was pulled (completing the partial artifact) assert result.get_pulled_elements() == [input_name]
def test_default_target_fetch(cli, tmpdir, datafiles): project = str(datafiles) project_path = os.path.join(project, "project.conf") target = "track-fetch-test.bst" # First, create an element with trackable sources repo = create_repo("tar", str(tmpdir)) ref = repo.create(project) element_conf = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element_conf, os.path.join(project, "elements", target)) # Then, make it the default target project_conf = { "name": "test-default-target", "min-version": "2.0", "element-path": "elements", "defaults": { "targets": [target] }, } _yaml.roundtrip_dump(project_conf, project_path) # Setup finished. Track it now assert cli.get_element_state(project, target) == "fetch needed" result = cli.run(project=project, args=["source", "fetch"]) result.assert_success() assert cli.get_element_state(project, target) == "buildable"
def prepare_junction_project(cli, tmpdir): main_project = tmpdir.join("main") sub_project = tmpdir.join("sub") os.makedirs(str(main_project)) os.makedirs(str(sub_project)) _yaml.roundtrip_dump({ "name": "main", "min-version": "2.0" }, str(main_project.join("project.conf"))) _yaml.roundtrip_dump({ "name": "sub", "min-version": "2.0" }, str(sub_project.join("project.conf"))) import_dir = tmpdir.join("import") os.makedirs(str(import_dir)) with open(str(import_dir.join("hello.txt")), "w", encoding="utf-8") as f: f.write("hello!") import_repo_dir = tmpdir.join("import_repo") os.makedirs(str(import_repo_dir)) import_repo = create_repo("tar", str(import_repo_dir)) import_ref = import_repo.create(str(import_dir)) _yaml.roundtrip_dump( { "kind": "import", "sources": [import_repo.source_config(ref=import_ref)] }, str(sub_project.join("data.bst"))) sub_repo_dir = tmpdir.join("sub_repo") os.makedirs(str(sub_repo_dir)) sub_repo = create_repo("tar", str(sub_repo_dir)) sub_ref = sub_repo.create(str(sub_project)) _yaml.roundtrip_dump( { "kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)] }, str(main_project.join("sub.bst"))) args = ["source", "fetch", "sub.bst"] result = cli.run(project=str(main_project), args=args) result.assert_success() return str(main_project)
def test_filter_track_multi(datafiles, cli, tmpdir): repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(str(datafiles), "files")) elements_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) input_name = "input.bst" input2_name = "input2.bst" project_config = { "name": "filter-track-test", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_file = os.path.join(elements_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) input2_config = dict(input_config) input2_file = os.path.join(elements_dir, input2_name) _yaml.roundtrip_dump(input2_config, input2_file) filter1_config = {"kind": "filter", "depends": [{"filename": input_name, "type": "build"}]} filter1_file = os.path.join(elements_dir, "filter1.bst") _yaml.roundtrip_dump(filter1_config, filter1_file) filter2_config = {"kind": "filter", "depends": [{"filename": input2_name, "type": "build"}]} filter2_file = os.path.join(elements_dir, "filter2.bst") _yaml.roundtrip_dump(filter2_config, filter2_file) # Assert that a fetch is needed states = cli.get_element_states(project, [input_name, input2_name]) assert states == { input_name: "no reference", input2_name: "no reference", } # Now try to track it result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"]) result.assert_success() # Now check that a ref field exists new_input = _yaml.load(input_file, shortname=None) source_node = new_input.get_sequence("sources").mapping_at(0) new_ref = source_node.get_str("ref") assert new_ref == ref new_input2 = _yaml.load(input2_file, shortname=None) source_node2 = new_input2.get_sequence("sources").mapping_at(0) new_ref2 = source_node2.get_str("ref") assert new_ref2 == ref
def create_test_element(tmpdir, project_dir): repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "fetch.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) return element_name, repo, ref
def test_source_push(cli, tmpdir, datafiles): cache_dir = os.path.join(str(tmpdir), "cache") project_dir = str(datafiles) with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "source-caches": { "servers": [{ "url": share.repo, "push": True, }] }, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # get the source object with dummy_context(config=user_config_file) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements(["push.bst"])[0] element._query_source_cache() assert not element._cached_sources() source = list(element.sources())[0] # check we don't have it in the current cache assert not share.get_source_proto(source._get_source_name()) # build the element, this should fetch and then push the source to the # remote res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() assert "Pushed source" in res.stderr # check that we've got the remote locally now sourcecache = context.sourcecache assert sourcecache.contains(source) # check that the remote CAS now has it digest = sourcecache.export(source)._get_digest() assert share.has_object(digest)
def create_element(project, name, dependencies): dev_files_path = os.path.join(project, "files", "dev-files") element_path = os.path.join(project, "elements") repo = create_repo("tar", project, "{}-repo".format(name)) ref = repo.create(dev_files_path) element = { "kind": "import", "sources": [repo.source_config(ref=ref)], "depends": dependencies } _yaml.roundtrip_dump(element, os.path.join(element_path, name)) return repo
def test_tar_show(cli, tmpdir, datafiles): project = os.path.join(str(datafiles), "use-repo") # Create the repo from 'baserepo' subdir repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project, "baserepo")) # Write out junction element with tar source element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(project, "base.bst")) # Check that bst show succeeds with implicit subproject fetching and the # pipeline includes the subproject element element_list = cli.get_pipeline(project, ["target.bst"]) assert "base.bst:target.bst" in element_list
def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True, options=None): # Create a repo to hold the subproject and generate # a junction element for it # repodir = os.path.join(tmpdir, "junction-repo") os.makedirs(repodir) repo = create_repo("tar", repodir) source_ref = ref = repo.create(subproject_path) if not store_ref: source_ref = None element = {"kind": "junction", "sources": [repo.source_config(ref=source_ref)]} if options: element["config"] = {"options": options} _yaml.roundtrip_dump(element, junction_path) return ref
def test_no_fetch_cached(cli, tmpdir, datafiles): project = str(datafiles) # Create the repo from 'files' subdir repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project, "files")) # Write out test target with a cached and a non-cached source element = { "kind": "import", "sources": [repo.source_config(ref=ref), { "kind": "always_cached" }] } generate_element(project, "target.bst", element) # Test fetch of target with a cached and a non-cached source result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success()
def test_tar_missing_project_conf(cli, tmpdir, datafiles): project = datafiles / "use-repo" # Remove the project.conf from this repo os.remove(datafiles / "use-repo" / "baserepo" / "project.conf") # Create the repo from 'base' subdir repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project, "baserepo")) # Write out junction element with tar source element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, str(project / "base.bst")) result = cli.run(project=project, args=["build", "target.bst"]) result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_JUNCTION) # Assert that we have the expected provenance encoded into the error assert "target.bst [line 3 column 2]" in result.stderr
def test_push_pull(cli, datafiles, tmpdir): project_dir = str(datafiles) cache_dir = os.path.join(str(tmpdir), "cache") with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "source-caches": { "servers": [{ "url": share.repo, "push": True, }] }, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) # create repo to pull from repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() # remove local cache dir, and repo files and check it all works shutil.rmtree(cache_dir) os.makedirs(cache_dir) shutil.rmtree(repo.repo) # check it's pulls from the share res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success()
def test_push_fail(cli, tmpdir, datafiles): project_dir = str(datafiles) cache_dir = os.path.join(str(tmpdir), "cache") # set up config with remote that we'll take down with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: remote = share.repo user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "source-caches": { "servers": [{ "url": share.repo, "push": True, }] }, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) # create repo to pull from repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # build and check that it fails to set up the remote res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() assert "Failed to initialize remote {}".format(remote) in res.stderr assert "Pushing" not in res.stderr assert "Pushed" not in res.stderr
def test_default_logging(cli, tmpdir, datafiles): project = str(datafiles) bin_files_path = os.path.join(project, "files", "bin-files") element_path = os.path.join(project, "elements") element_name = "fetch-test-git.bst" # Create our repo object of the given source type with # the bin files, and then collect the initial ref. # repo = create_repo("tar", str(tmpdir)) ref = repo.create(bin_files_path) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # Now try to fetch it result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() m = re.search(r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Query cache", result.stderr) assert m is not None
def create_element_size(name, project_dir, elements_path, dependencies, size): full_elements_path = os.path.join(project_dir, elements_path) os.makedirs(full_elements_path, exist_ok=True) # Create a repo repodir = os.path.join(project_dir, "repos") repo = create_repo("tar", repodir, subdir=name) with utils._tempdir(dir=project_dir) as tmp: # We use a data/ subdir in the git repo we create, # and we set the import element to only extract that # part; this ensures we never include a .git/ directory # in the cached artifacts for these sized elements. # datadir = os.path.join(tmp, "data") os.makedirs(datadir) # Use /dev/urandom to create the sized file in the datadir with open(os.path.join(datadir, name), "wb+") as f: f.write(os.urandom(size)) # Create the git repo from the temp directory ref = repo.create(tmp) element = { "kind": "import", "sources": [repo.source_config(ref=ref)], "config": { # Extract only the data directory "source": "data" }, "depends": dependencies, } _yaml.roundtrip_dump(element, os.path.join(project_dir, elements_path, name)) # Return the repo, so that it can later be used to add commits return repo
def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles): """ Junction elements never depend on fully include processed project. """ project = os.path.join(str(datafiles), "file_with_subproject") subproject_path = os.path.join(project, "subproject") junction_path = os.path.join(project, "junction.bst") repo = create_repo("tar", str(tmpdir)) ref = repo.create(subproject_path) element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, junction_path) result = cli.run( project=project, args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"]) result.assert_success() loaded = _yaml.load_data(result.output) assert loaded.get_str("included", default=None) is not None
def test_source_push_build_fail(cli, tmpdir, datafiles): project_dir = str(datafiles) cache_dir = os.path.join(str(tmpdir), "cache") with create_artifact_share(os.path.join(str(tmpdir), "share")) as share: user_config = { "scheduler": { "pushers": 1 }, "source-caches": { "servers": [{ "url": share.repo, "push": True, }] }, "cachedir": cache_dir, } cli.configure(user_config) repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "always-fail.bst" element = { "kind": "always_fail", "sources": [repo.source_config(ref=ref)] } _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) res = cli.run(project=project_dir, args=["build", "always-fail.bst"]) res.assert_main_error(ErrorDomain.STREAM, None) res.assert_task_error(ErrorDomain.ELEMENT, None) # Sources are not pushed as the build queue is before the source push # queue. assert "Pushed source " not in res.stderr
def test_build_tar_cross_junction_names(cli, tmpdir, datafiles): project = os.path.join(str(datafiles), "use-repo") checkoutdir = os.path.join(str(tmpdir), "checkout") # Create the repo from 'base' subdir repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project, "baserepo")) # Write out junction element with tar source element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(project, "base.bst")) # Build (with implicit fetch of subproject), checkout result = cli.run(project=project, args=["build", "base.bst:target.bst"]) result.assert_success() result = cli.run(project=project, args=[ "artifact", "checkout", "base.bst:target.bst", "--directory", checkoutdir ]) result.assert_success() # Check that the checkout contains the expected files from both projects assert os.path.exists(os.path.join(checkoutdir, "base.txt"))
def test_track_skip(cli, tmpdir, datafiles): project = str(datafiles) dev_files_path = os.path.join(project, "files", "dev-files") element_path = os.path.join(project, "elements") element_dep_name = "track-test-dep.bst" element_workspace_name = "track-test-workspace.bst" element_target_name = "track-test-target.bst" workspace_dir = os.path.join(str(tmpdir), "workspace") # Generate an import element with some local source plugins, these # do not implement track() and thus can be skipped. # element = { "kind": "import", "sources": [ { "kind": "local", "path": "files/dev-files", "directory": "/foo" }, { "kind": "local", "path": "files/dev-files", "directory": "/bar" }, ], } _yaml.roundtrip_dump(element, os.path.join(element_path, element_dep_name)) # Generate a regular import element which will have a workspace open # repo = create_repo("tar", str(tmpdir)) repo.create(dev_files_path) generate_element(repo, os.path.join(element_path, element_workspace_name)) # Generate a stack element which depends on the import of local files # # Stack elements do not have any sources, as such they are also skipped. # element = { "kind": "stack", "depends": [element_dep_name, element_workspace_name], } _yaml.roundtrip_dump(element, os.path.join(element_path, element_target_name)) # First track and fetch the workspace element result = cli.run( project=project, args=["source", "track", "--deps", "none", element_workspace_name]) result.assert_success() result = cli.run( project=project, args=["source", "fetch", "--deps", "none", element_workspace_name]) result.assert_success() # Open the workspace so it really is a workspace result = cli.run(project=project, args=[ "workspace", "open", "--directory", workspace_dir, element_workspace_name ]) result.assert_success() # Now run track on the stack and all the deps result = cli.run( project=project, args=["source", "track", "--deps", "all", element_target_name]) result.assert_success() # Assert we got the expected skip messages pattern = r"\[.*track:track-test-dep\.bst.*\] SKIPPED" assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 1 pattern = r"\[.*track:track-test-workspace\.bst.*\] SKIPPED" assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 1 # For now, we expect to not see the job for stack elements # # This may be revisited, need to consider if we should emit # START/SKIPPED message pairs for jobs which were assessed to # be unneeded before ever processing. # pattern = r"\[.*track:track-test-target\.bst.*\]" assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 0
def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir): project = str(datafiles) checkout_dir = os.path.join(str(tmpdir), "checkout") repo = create_repo("tar", str(tmpdir)) repo.create(os.path.join(str(datafiles), "files")) element_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) project_config = { "name": "partial-artifact-checkout-fetch", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_name = "input.bst" input_file = os.path.join(element_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }] }}) result = cli.run(project=project, args=["source", "track", input_name]) result.assert_success() result = cli.run(project=project, args=["build", input_name]) result.assert_success() # A push artifact cache means we have to pull to push to them, so # delete some blobs from that CAS such that we have to fetch digest = utils.sha256sum( os.path.join(project, "files", "bin-files", "usr", "bin", "hello")) objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:]) os.unlink(objpath) # Verify that the build-only dependency is not (complete) in the local cache cli.configure({"artifacts": {}}) result = cli.run(project=project, args=[ "artifact", "checkout", input_name, "--directory", checkout_dir ]) result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt") # Verify that the pull method fetches relevant artifacts in order to stage cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }] }}) result = cli.run(project=project, args=[ "artifact", "checkout", input_name, "--directory", checkout_dir ]) result.assert_success() # should have pulled whatever was deleted previous assert input_name in result.get_pulled_elements()
def test_junction_build_remote(cli, tmpdir, datafiles): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") subproject_element_path = os.path.join(subproject_path, "elements") amhello_files_path = os.path.join(subproject_path, "files") element_path = os.path.join(project, "elements") junction_path = os.path.join(element_path, "junction.bst") # We need a repo for real trackable elements repo = create_repo("tar", str(tmpdir)) ref = repo.create(amhello_files_path) # ensure that the correct project directory is also listed in the junction subproject_conf = os.path.join(subproject_path, "project.conf") with open(subproject_conf, encoding="utf-8") as f: config = f.read() config = config.format(project_dir=subproject_path) with open(subproject_conf, "w", encoding="utf-8") as f: f.write(config) # Create a trackable element to depend on the cross junction element, # this one has it's ref resolved already create_element(repo, "sub-target.bst", subproject_element_path, ["autotools/amhello.bst"], ref=ref) # Create a trackable element to depend on the cross junction element create_element(repo, "target.bst", element_path, [{ "junction": "junction.bst", "filename": "sub-target.bst" }]) # Create a repo to hold the subproject and generate a junction element for it generate_junction(tmpdir, subproject_path, junction_path, store_ref=False) # Now create a compose element at the top level element = { "kind": "compose", "depends": [{ "filename": "target.bst", "type": "build" }] } _yaml.roundtrip_dump(element, os.path.join(element_path, "composed.bst")) # We're doing remote execution so ensure services are available services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # track the junction first to ensure we have refs result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() # track target to ensure we have refs result = cli.run(project=project, args=["source", "track", "--deps", "all", "composed.bst"]) result.assert_success() # build result = cli.run(project=project, silent=True, args=["build", "composed.bst"]) result.assert_success() # Assert that the main target is cached as a result assert cli.get_element_state(project, "composed.bst") == "cached"