def test_submodule_fetch_checkout(cli, tmpdir, datafiles): project = str(datafiles) checkoutdir = os.path.join(str(tmpdir), "checkout") # Create the submodule first from the 'subrepofiles' subdir subrepo = create_repo("git", str(tmpdir), "subrepo") subrepo.create(os.path.join(project, "subrepofiles")) # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(project, "repofiles")) # Add a submodule pointing to the one we created ref = repo.add_submodule("subdir", "file://" + subrepo.repo) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} generate_element(project, "target.bst", element) # Fetch, build, checkout result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() result = cli.run(project=project, args=["build", "target.bst"]) result.assert_success() result = cli.run(project=project, args=[ "artifact", "checkout", "target.bst", "--directory", checkoutdir ]) result.assert_success() # Assert we checked out both files at their expected location assert os.path.exists(os.path.join(checkoutdir, "file.txt")) assert os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
def test_unlisted_submodule(cli, tmpdir, datafiles, fail): project = str(datafiles) # Make the warning an error if we're testing errors if fail == "error": generate_project(project, config={"fatal-warnings": ["git:unlisted-submodule"]}) # Create the submodule first from the 'subrepofiles' subdir subrepo = create_repo("git", str(tmpdir), "subrepo") subrepo.create(os.path.join(project, "subrepofiles")) # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(project, "repofiles")) # Add a submodule pointing to the one we created ref = repo.add_submodule("subdir", "file://" + subrepo.repo) # Create the source, and delete the explicit configuration # of the submodules. # # We expect this to cause an unlisted submodule warning # after the source has been fetched. # gitsource = repo.source_config(ref=ref) del gitsource["submodules"] # Write out our test target element = {"kind": "import", "sources": [gitsource]} generate_element(project, "target.bst", element) # The warning or error is reported during fetch. There should be no # error with `bst show`. result = cli.run(project=project, args=["show", "target.bst"]) result.assert_success() assert "git:unlisted-submodule" not in result.stderr # We will notice this directly in fetch, as it will try to fetch # the submodules it discovers as a result of fetching the primary repo. result = cli.run(project=project, args=["source", "fetch", "target.bst"]) # Assert a warning or an error depending on what we're checking if fail == "error": result.assert_main_error(ErrorDomain.STREAM, None) result.assert_task_error(ErrorDomain.PLUGIN, "git:unlisted-submodule") else: result.assert_success() assert "git:unlisted-submodule" in result.stderr # Verify that `bst show` will still not error out after fetching. result = cli.run(project=project, args=["show", "target.bst"]) result.assert_success() assert "git:unlisted-submodule" not in result.stderr
def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail): project = str(datafiles) # Make the warning an error if we're testing errors if fail == "error": generate_project(project, config={"fatal-warnings": ["git:unlisted-submodule"]}) # Create the submodule first from the 'subrepofiles' subdir subrepo = create_repo("git", str(tmpdir), "subrepo") subrepo.create(os.path.join(project, "subrepofiles")) # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project, "repofiles")) # Add a submodule pointing to the one we created, but use # the original ref, let the submodules appear after tracking repo.add_submodule("subdir", "file://" + subrepo.repo) # Create the source, and delete the explicit configuration # of the submodules. gitsource = repo.source_config(ref=ref) del gitsource["submodules"] # Write out our test target element = {"kind": "import", "sources": [gitsource]} generate_element(project, "target.bst", element) # Fetch the repo, we will not see the warning because we # are still pointing to a ref which predates the submodules result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() assert "git:unlisted-submodule" not in result.stderr # We won't get a warning/error when tracking either, the source # has not become cached so the opportunity to check # for the warning has not yet arisen. result = cli.run(project=project, args=["source", "track", "target.bst"]) result.assert_success() assert "git:unlisted-submodule" not in result.stderr # Fetching the repo at the new ref will finally reveal the warning result = cli.run(project=project, args=["source", "fetch", "target.bst"]) if fail == "error": result.assert_main_error(ErrorDomain.STREAM, None) result.assert_task_error(ErrorDomain.PLUGIN, "git:unlisted-submodule") else: result.assert_success() assert "git:unlisted-submodule" in result.stderr
def test_track_invalid_submodule(cli, tmpdir, datafiles, fail): project = str(datafiles) # Make the warning an error if we're testing errors if fail == "error": generate_project(project, config={"fatal-warnings": ["git:invalid-submodule"]}) # Create the submodule first from the 'subrepofiles' subdir subrepo = create_repo("git", str(tmpdir), "subrepo") subrepo.create(os.path.join(project, "subrepofiles")) # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(project, "repofiles")) # Add a submodule pointing to the one we created ref = repo.add_submodule("subdir", "file://" + subrepo.repo) # Add a commit beyond the ref which *removes* the submodule we've added repo.remove_path("subdir") # Create the source, this will keep the submodules so initially # the configuration is valid for the ref we're using gitsource = repo.source_config(ref=ref) # Write out our test target element = {"kind": "import", "sources": [gitsource]} generate_element(project, "target.bst", element) # Fetch the repo, we will not see the warning because we # are still pointing to a ref which predates the submodules result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() assert "git:invalid-submodule" not in result.stderr # After tracking we're pointing to a ref, which would trigger an invalid # submodule warning. However, cache validation is only performed as part # of fetch. result = cli.run(project=project, args=["source", "track", "target.bst"]) result.assert_success() # Fetch to trigger cache validation result = cli.run(project=project, args=["source", "fetch", "target.bst"]) if fail == "error": result.assert_main_error(ErrorDomain.STREAM, None) result.assert_task_error(ErrorDomain.PLUGIN, "git:invalid-submodule") else: result.assert_success() assert "git:invalid-submodule" in result.stderr
def test_fetch_shallow(cli, tmpdir, datafiles): project = str(datafiles) workspacedir = os.path.join(str(tmpdir), "workspace") # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(project, "repofiles")) first_commit = repo.latest_commit() repo.add_commit() repo.add_tag("tag") ref = "tag-0-g" + repo.latest_commit() element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} generate_element(project, "target.bst", element) result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() result = cli.run( project=project, args=["workspace", "open", "--directory", workspacedir, "target.bst"]) result.assert_success() assert subprocess.call(["git", "show", "tag"], cwd=workspacedir) == 0 assert subprocess.call(["git", "show", first_commit], cwd=workspacedir) != 0
def test_ref_not_in_track(cli, tmpdir, datafiles, fail): project = str(datafiles) # Make the warning an error if we're testing errors if fail == "error": generate_project( project, config={"fatal-warnings": [CoreWarnings.REF_NOT_IN_TRACK]}) # Create the repo from 'repofiles', create a branch without latest commit repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project, "repofiles")) gitsource = repo.source_config(ref=ref) # Overwrite the track value to the added branch gitsource["track"] = "foo" # Write out our test target element = {"kind": "import", "sources": [gitsource]} generate_element(project, "target.bst", element) result = cli.run(project=project, args=["build", "target.bst"]) # Assert a warning or an error depending on what we're checking if fail == "error": result.assert_main_error(ErrorDomain.STREAM, None) result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.REF_NOT_IN_TRACK) else: result.assert_success() assert "ref-not-in-track" in result.stderr
def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles): project = str(datafiles) # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project, "repofiles")) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} generate_element(project, "target.bst", element) # Now add a .gitmodules file with an inconsistent submodule, # we are calling this inconsistent because the file was created # but `git submodule add` was never called, so there is no reference # associated to the submodule. # repo.add_file( os.path.join(project, "inconsistent-submodule", ".gitmodules")) # Fetch should work, we're not yet at the offending ref result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() # Track to update to the offending commit result = cli.run(project=project, args=["source", "track", "target.bst"]) result.assert_success() # Fetch after track will encounter an inconsistent submodule without any ref result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() # Assert that we are just fine without it, and emit a warning to the user. assert "Ignoring inconsistent submodule" in result.stderr
def test_default_do_not_track_tags(cli, tmpdir, datafiles): project = str(datafiles) project_config = load_yaml(os.path.join(project, "project.conf")) project_config["ref-storage"] = "inline" generate_project(project, config=project_config) repofiles = os.path.join(str(tmpdir), "repofiles") os.makedirs(repofiles, exist_ok=True) file0 = os.path.join(repofiles, "file0") with open(file0, "w", encoding="utf-8") as f: f.write("test\n") repo = create_repo("git", str(tmpdir)) repo.create(repofiles) repo.add_tag("tag") config = repo.source_config() config["track"] = repo.latest_commit() # Write out our test target element = { "kind": "import", "sources": [config], } generate_element(project, "target.bst", element) element_path = os.path.join(project, "target.bst") result = cli.run(project=project, args=["source", "track", "target.bst"]) result.assert_success() element = load_yaml(element_path) source = element.get_sequence("sources").mapping_at(0) assert "tags" not in source
def test_pull_missing_local_blob(cli, tmpdir, datafiles): project = os.path.join(datafiles.dirname, datafiles.basename) repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(str(datafiles), "files")) element_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) project_config = { "name": "pull-missing-local-blob", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_name = "input.bst" input_file = os.path.join(element_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) depends_name = "depends.bst" depends_config = {"kind": "stack", "depends": [input_name]} depends_file = os.path.join(element_dir, depends_name) _yaml.roundtrip_dump(depends_config, depends_file) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build the import-bin element and push to the remote. cli.configure( {"artifacts": { "servers": [{ "url": share.repo, "push": True }] }}) result = cli.run(project=project, args=["source", "track", input_name]) result.assert_success() result = cli.run(project=project, args=["build", input_name]) result.assert_success() assert cli.get_element_state(project, input_name) == "cached" # Delete a file blob from the local cache. # This is a placeholder to test partial CAS handling until we support # partial artifact pulling (or blob-based CAS expiry). # digest = utils.sha256sum( os.path.join(project, "files", "bin-files", "usr", "bin", "hello")) objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:]) os.unlink(objpath) # Now try bst build result = cli.run(project=project, args=["build", depends_name]) result.assert_success() # Assert that the import-bin artifact was pulled (completing the partial artifact) assert result.get_pulled_elements() == [input_name]
def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind): checkout = os.path.join(cli.directory, "checkout") project = str(datafiles) dev_files_path = os.path.join(project, "files", "dev-files") element_path = os.path.join(project, "elements") element_name = "build-test-{}.bst".format(kind) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. # repo = create_repo(kind, str(tmpdir)) ref = repo.create(dev_files_path) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) assert cli.get_element_state(project, element_name) == "fetch needed" result = cli.run(project=project, args=strict_args(["build", element_name], strict)) result.assert_success() assert cli.get_element_state(project, element_name) == "cached" # Now check it out result = cli.run( project=project, args=strict_args(["artifact", "checkout", element_name, "--directory", checkout], strict) ) result.assert_success() # Check that the pony.h include from files/dev-files exists filename = os.path.join(checkout, "usr", "include", "pony.h") assert os.path.exists(filename)
def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True, options=None): # Create a repo to hold the subproject and generate # a junction element for it # repo = create_repo("git", str(tmpdir)) source_ref = ref = repo.create(subproject_path) if not store_ref: source_ref = None element = { "kind": "junction", "sources": [repo.source_config(ref=source_ref)] } if options: element["config"] = {"options": options} _yaml.roundtrip_dump(element, junction_path) return ref
def test_push_fail(cli, tmpdir, datafiles): project_dir = str(datafiles) cache_dir = os.path.join(str(tmpdir), "cache") # set up config with remote that we'll take down with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: remote = share.repo user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": {"pushers": 1}, "source-caches": {"url": share.repo, "push": True,}, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) # create repo to pull from repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # build and check that it fails to set up the remote res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() assert "Failed to initialize remote {}".format(remote) in res.stderr assert "Pushing" not in res.stderr assert "Pushed" not in res.stderr
def test_push_pull(cli, datafiles, tmpdir): project_dir = str(datafiles) cache_dir = os.path.join(str(tmpdir), "cache") with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": {"pushers": 1}, "source-caches": {"url": share.repo, "push": True,}, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) # create repo to pull from repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() # remove local cache dir, and repo files and check it all works shutil.rmtree(cache_dir) os.makedirs(cache_dir) shutil.rmtree(repo.repo) # check it's pulls from the share res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success()
def test_default_target_track(cli, tmpdir, datafiles): project = str(datafiles) project_path = os.path.join(project, "project.conf") target = "track-fetch-test.bst" # First, create an element with trackable sources repo = create_repo("git", str(tmpdir)) repo.create(project) element_conf = {"kind": "import", "sources": [repo.source_config()]} _yaml.roundtrip_dump(element_conf, os.path.join(project, "elements", target)) # Then, make it the default target project_conf = { "name": "test-default-target", "min-version": "2.0", "element-path": "elements", "defaults": {"targets": [target]}, } _yaml.roundtrip_dump(project_conf, project_path) # Setup finished. Track it now assert cli.get_element_state(project, target) == "no reference" result = cli.run(project=project, args=["source", "track"]) result.assert_success() # Tracking will result in fetching it automatically, so we expect the state # to be buildable. assert cli.get_element_state(project, target) == "buildable"
def test_source_push_build_fail(cli, tmpdir, datafiles): project_dir = str(datafiles) cache_dir = os.path.join(str(tmpdir), "cache") with create_artifact_share(os.path.join(str(tmpdir), "share")) as share: user_config = { "scheduler": {"pushers": 1}, "source-caches": {"url": share.repo, "push": True,}, "cachedir": cache_dir, } cli.configure(user_config) repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "always-fail.bst" element = {"kind": "always_fail", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) res = cli.run(project=project_dir, args=["build", "always-fail.bst"]) res.assert_main_error(ErrorDomain.STREAM, None) res.assert_task_error(ErrorDomain.ELEMENT, None) # Sources are not pushed as the build queue is before the source push # queue. assert "Pushed source " not in res.stderr
def test_fetch_checkout(cli, tmpdir, datafiles): project = str(datafiles) checkoutdir = os.path.join(str(tmpdir), "checkout") repo = create_repo("bzr", str(tmpdir)) ref = repo.create(os.path.join(project, "basic")) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} generate_element(project, "target.bst", element) # Fetch, build, checkout result = cli.run(project=project, args=["source", "fetch", "target.bst"]) assert result.exit_code == 0 result = cli.run(project=project, args=["build", "target.bst"]) assert result.exit_code == 0 result = cli.run(project=project, args=[ "artifact", "checkout", "target.bst", "--directory", checkoutdir ]) assert result.exit_code == 0 # Assert we checked out the file as it was commited with open(os.path.join(checkoutdir, "test")) as f: text = f.read() assert text == "test\n"
def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir): project = str(datafiles) checkout_dir = os.path.join(str(tmpdir), "checkout") repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(str(datafiles), "files")) element_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) project_config = { "name": "partial-artifact-checkout-fetch", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_name = "input.bst" input_file = os.path.join(element_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: cli.configure({"artifacts": {"url": share.repo, "push": True}}) result = cli.run(project=project, args=["source", "track", input_name]) result.assert_success() result = cli.run(project=project, args=["build", input_name]) result.assert_success() # A push artifact cache means we have to pull to push to them, so # delete some blobs from that CAS such that we have to fetch digest = utils.sha256sum( os.path.join(project, "files", "bin-files", "usr", "bin", "hello")) objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:]) os.unlink(objpath) # Verify that the build-only dependency is not (complete) in the local cache result = cli.run(project=project, args=[ "artifact", "checkout", input_name, "--directory", checkout_dir ]) result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt") # Verify that the pull method fetches relevant artifacts in order to stage result = cli.run(project=project, args=[ "artifact", "checkout", "--pull", input_name, "--directory", checkout_dir ]) result.assert_success() # should have pulled whatever was deleted previous assert input_name in result.get_pulled_elements()
def test_fetch_gpg_verify(cli, tmpdir, datafiles): project = os.path.join(datafiles.dirname, datafiles.basename) gpg_homedir = os.path.join(DATA_DIR, "gpghome") # Create the repo from 'repofiles' subdir repo = create_repo("ostree", str(tmpdir)) ref = repo.create( os.path.join(project, "repofiles"), gpg_sign="FFFF54C070353B52D046DEB087FA0F41A6EFD9E9", gpg_homedir=gpg_homedir, ) # Write out our test target ostreesource = repo.source_config(ref=ref, gpg_key="test.gpg") element = {"kind": "import", "sources": [ostreesource]} _yaml.roundtrip_dump(element, os.path.join(project, "target.bst")) # Assert that a fetch is needed assert cli.get_element_state(project, "target.bst") == "fetch needed" # Now try to fetch it result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success() # Assert that we are now buildable because the source is # now cached. assert cli.get_element_state(project, "target.bst") == "buildable"
def test_track_error_cannot_write_file(cli, tmpdir, datafiles): if os.geteuid() == 0: pytest.skip("This is not testable with root permissions") project = str(datafiles) dev_files_path = os.path.join(project, "files", "dev-files") element_path = os.path.join(project, "elements") element_name = "track-test.bst" configure_project(project, {"ref-storage": "inline"}) repo = create_repo("git", str(tmpdir)) repo.create(dev_files_path) element_full_path = os.path.join(element_path, element_name) generate_element(repo, element_full_path) st = os.stat(element_path) try: read_mask = stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH os.chmod(element_path, stat.S_IMODE(st.st_mode) & ~read_mask) result = cli.run(project=project, args=["source", "track", element_name]) result.assert_main_error(ErrorDomain.STREAM, None) result.assert_task_error(ErrorDomain.SOURCE, "save-ref-error") finally: os.chmod(element_path, stat.S_IMODE(st.st_mode))
def test_custom_logging(cli, tmpdir, datafiles): project = str(datafiles) bin_files_path = os.path.join(project, "files", "bin-files") element_path = os.path.join(project, "elements") element_name = "fetch-test-git.bst" custom_log_format = "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},%{key},%{element},%{action},%{message}" user_config = {"logging": {"message-format": custom_log_format}} cli.configure(user_config) # Create our repo object of the given source type with # the bin files, and then collect the initial ref. # repo = create_repo("git", str(tmpdir)) ref = repo.create(bin_files_path) # Write out our test target element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # Now try to fetch it result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() m = re.search( r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*" r",SUCCESS,Checking sources", result.stderr, ) assert m is not None
def prepare_junction_project(cli, tmpdir): main_project = tmpdir.join("main") sub_project = tmpdir.join("sub") os.makedirs(str(main_project)) os.makedirs(str(sub_project)) _yaml.roundtrip_dump({ "name": "main", "min-version": "2.0" }, str(main_project.join("project.conf"))) _yaml.roundtrip_dump({ "name": "sub", "min-version": "2.0" }, str(sub_project.join("project.conf"))) import_dir = tmpdir.join("import") os.makedirs(str(import_dir)) with open(str(import_dir.join("hello.txt")), "w") as f: f.write("hello!") import_repo_dir = tmpdir.join("import_repo") os.makedirs(str(import_repo_dir)) import_repo = create_repo("git", str(import_repo_dir)) import_ref = import_repo.create(str(import_dir)) _yaml.roundtrip_dump( { "kind": "import", "sources": [import_repo.source_config(ref=import_ref)] }, str(sub_project.join("data.bst"))) sub_repo_dir = tmpdir.join("sub_repo") os.makedirs(str(sub_repo_dir)) sub_repo = create_repo("git", str(sub_repo_dir)) sub_ref = sub_repo.create(str(sub_project)) _yaml.roundtrip_dump( { "kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)] }, str(main_project.join("sub.bst"))) args = ["source", "fetch", "sub.bst"] result = cli.run(project=str(main_project), args=args) result.assert_success() return str(main_project)
def create_test_element(tmpdir, project_dir): repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "fetch.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) return element_name, repo, ref
def test_filter_track_multi_to_one(datafiles, cli, tmpdir): repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(str(datafiles), "files")) elements_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) input_name = "input.bst" project_config = { "name": "filter-track-test", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_file = os.path.join(elements_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) filter1_config = { "kind": "filter", "depends": [{ "filename": input_name, "type": "build" }] } filter1_file = os.path.join(elements_dir, "filter1.bst") _yaml.roundtrip_dump(filter1_config, filter1_file) filter2_config = { "kind": "filter", "depends": [{ "filename": input_name, "type": "build" }] } filter2_file = os.path.join(elements_dir, "filter2.bst") _yaml.roundtrip_dump(filter2_config, filter2_file) # Assert that a fetch is needed assert cli.get_element_state(project, input_name) == "no reference" # Now try to track it result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"]) result.assert_success() # Now check that a ref field exists new_input = _yaml.load(input_file, shortname=None) source_node = new_input.get_sequence("sources").mapping_at(0) new_ref = source_node.get_str("ref") assert new_ref == ref
def create_element(project, name, dependencies): dev_files_path = os.path.join(project, "files", "dev-files") element_path = os.path.join(project, "elements") repo = create_repo("git", project, "{}-repo".format(name)) ref = repo.create(dev_files_path) element = {"kind": "import", "sources": [repo.source_config(ref=ref)], "depends": dependencies} _yaml.roundtrip_dump(element, os.path.join(element_path, name)) return repo
def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror): bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr") dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr") upstream_repodir = os.path.join(str(tmpdir), "upstream") mirror_repodir = os.path.join(str(tmpdir), "mirror") project_dir = os.path.join(str(tmpdir), "project") os.makedirs(project_dir) element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo("tar", upstream_repodir) upstream_repo.create(bin_files_path) mirror_repo = upstream_repo.copy(mirror_repodir) upstream_ref = upstream_repo.create(dev_files_path) element = { "kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref if ref_storage == "inline" else None)], } element_name = "test.bst" element_path = os.path.join(element_dir, element_name) full_repo = element["sources"][0]["url"] upstream_map, repo_name = os.path.split(full_repo) alias = "foo" aliased_repo = alias + ":" + repo_name element["sources"][0]["url"] = aliased_repo full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) os.makedirs(element_dir) _yaml.roundtrip_dump(element, element_path) if ref_storage == "project.refs": # Manually set project.refs to avoid caching the repo prematurely project_refs = {"projects": {"test": {element_name: [{"ref": upstream_ref}]}}} project_refs_path = os.path.join(project_dir, "project.refs") _yaml.roundtrip_dump(project_refs, project_refs_path) project = { "name": "test", "min-version": "2.0", "element-path": "elements", "aliases": {alias: upstream_map + "/"}, "ref-storage": ref_storage, } if mirror != "no-mirror": mirror_data = [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]}}] if mirror == "unrelated-mirror": mirror_data.insert(0, {"name": "narnia", "aliases": {"frob": ["http://www.example.com/repo"]}}) project["mirrors"] = mirror_data project_file = os.path.join(project_dir, "project.conf") _yaml.roundtrip_dump(project, project_file) result = cli.run(project=project_dir, args=["source", "fetch", element_name]) result.assert_success()
def test_source_push(cli, tmpdir, datafiles): cache_dir = os.path.join(str(tmpdir), "cache") project_dir = str(datafiles) with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { "scheduler": { "pushers": 1 }, "source-caches": { "servers": [{ "url": share.repo, "push": True, }] }, "cachedir": cache_dir, } _yaml.roundtrip_dump(user_config, file=user_config_file) cli.configure(user_config) repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project_dir, "files")) element_path = os.path.join(project_dir, "elements") element_name = "push.bst" element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # get the source object with dummy_context(config=user_config_file) as context: project = Project(project_dir, context) project.ensure_fully_loaded() element = project.load_elements(["push.bst"])[0] element._initialize_state() assert not element._cached_sources() source = list(element.sources())[0] # check we don't have it in the current cache assert not share.get_source_proto(source._get_source_name()) # build the element, this should fetch and then push the source to the # remote res = cli.run(project=project_dir, args=["build", "push.bst"]) res.assert_success() assert "Pushed source" in res.stderr # check that we've got the remote locally now sourcecache = context.sourcecache assert sourcecache.contains(source) # check that the remote CAS now has it digest = sourcecache.export(source)._get_digest() assert share.has_object(digest)
def test_junction_build_remote(cli, tmpdir, datafiles): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") subproject_element_path = os.path.join(subproject_path, "elements") amhello_files_path = os.path.join(subproject_path, "files") element_path = os.path.join(project, "elements") junction_path = os.path.join(element_path, "junction.bst") # We need a repo for real trackable elements repo = create_repo("git", str(tmpdir)) ref = repo.create(amhello_files_path) # ensure that the correct project directory is also listed in the junction subproject_conf = os.path.join(subproject_path, "project.conf") with open(subproject_conf, encoding="utf-8") as f: config = f.read() config = config.format(project_dir=subproject_path) with open(subproject_conf, "w", encoding="utf-8") as f: f.write(config) # Create a trackable element to depend on the cross junction element, # this one has it's ref resolved already create_element(repo, "sub-target.bst", subproject_element_path, ["autotools/amhello.bst"], ref=ref) # Create a trackable element to depend on the cross junction element create_element(repo, "target.bst", element_path, [{"junction": "junction.bst", "filename": "sub-target.bst"}]) # Create a repo to hold the subproject and generate a junction element for it generate_junction(tmpdir, subproject_path, junction_path, store_ref=False) # Now create a compose element at the top level element = {"kind": "compose", "depends": [{"filename": "target.bst", "type": "build"}]} _yaml.roundtrip_dump(element, os.path.join(element_path, "composed.bst")) # We're doing remote execution so ensure services are available services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # track the junction first to ensure we have refs result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() # track target to ensure we have refs result = cli.run(project=project, args=["source", "track", "--deps", "all", "composed.bst"]) result.assert_success() # build result = cli.run(project=project, silent=True, args=["build", "composed.bst"]) result.assert_success() # Assert that the main target is cached as a result assert cli.get_element_state(project, "composed.bst") == "cached"
def test_no_fetch_cached(cli, tmpdir, datafiles): project = str(datafiles) # Create the repo from 'files' subdir repo = create_repo("git", str(tmpdir)) ref = repo.create(os.path.join(project, "files")) # Write out test target with a cached and a non-cached source element = {"kind": "import", "sources": [repo.source_config(ref=ref), {"kind": "always_cached"}]} generate_element(project, "target.bst", element) # Test fetch of target with a cached and a non-cached source result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_success()
def test_fetch_bad_ref(cli, tmpdir, datafiles): project = str(datafiles) # Create the repo from 'repofiles' subdir repo = create_repo("git", str(tmpdir)) repo.create(os.path.join(project, "repofiles")) # Write out our test target with a bad ref element = {"kind": "import", "sources": [repo.source_config(ref="5")]} generate_element(project, "target.bst", element) # Assert that fetch raises an error here result = cli.run(project=project, args=["source", "fetch", "target.bst"]) result.assert_main_error(ErrorDomain.STREAM, None) result.assert_task_error(ErrorDomain.SOURCE, None)
def test_tar_show(cli, tmpdir, datafiles): project = os.path.join(str(datafiles), "use-repo") # Create the repo from 'baserepo' subdir repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(project, "baserepo")) # Write out junction element with tar source element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]} _yaml.roundtrip_dump(element, os.path.join(project, "base.bst")) # Check that bst show succeeds with implicit subproject fetching and the # pipeline includes the subproject element element_list = cli.get_pipeline(project, ["target.bst"]) assert "base.bst:target.bst" in element_list