Example #1
0
def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
    output_file = os.path.join(str(tmpdir), "output.txt")
    project_dir = str(tmpdir)
    element_dir = os.path.join(project_dir, "elements")
    os.makedirs(element_dir, exist_ok=True)
    element_name = "test.bst"
    element_path = os.path.join(element_dir, element_name)
    element = generate_element(output_file)
    _yaml.roundtrip_dump(element, element_path)

    project_file = os.path.join(project_dir, "project.conf")
    project = generate_project()
    _yaml.roundtrip_dump(project, project_file)

    userconfig = {"projects": {"test": {"default-mirror": "oz"}}}
    cli.configure(userconfig)

    result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name])
    result.assert_success()
    with open(output_file) as f:
        contents = f.read()
        print(contents)
        # Success if fetching from arrakis' mirror happened before middle-earth's
        arrakis_str = "OFO/repo1"
        arrakis_pos = contents.find(arrakis_str)
        assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
        me_str = "OOF/repo1"
        me_pos = contents.find(me_str)
        assert me_pos != -1, "'{}' wasn't found".format(me_str)
        assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
def test_shell_use_cached_buildtree(share_with_buildtrees, datafiles, cli,
                                    pull_deps, pull_buildtree, expect_error):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure(
        {"artifacts": {
            "servers": [{
                "url": share_with_buildtrees.repo
            }]
        }})

    # Optionally pull the buildtree along with `bst artifact pull`
    maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree)

    # Disable access to the artifact server after pulling, so that `bst shell` cannot automatically
    # pull the missing bits, this should be equivalent to the missing bits being missing in a
    # remote server
    cli.configure({"artifacts": {}})

    # Run the shell without asking it to pull any buildtree, just asking to use a buildtree
    result = cli.run(project=project,
                     args=[
                         "shell", "--build", element_name, "--use-buildtree",
                         "--", "cat", "test"
                     ])

    if expect_error:
        result.assert_main_error(ErrorDomain.APP, expect_error)
    else:
        result.assert_success()
        assert "Hi" in result.output
Example #3
0
def test_shell_pull_cached_buildtree(share_with_buildtrees, datafiles, cli,
                                     pull_deps, pull_buildtree):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure({"artifacts": {"url": share_with_buildtrees.repo}})

    # Optionally pull the buildtree along with `bst artifact pull`
    maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree)

    # Run the shell and request that required artifacts and buildtrees should be pulled
    result = cli.run(
        project=project,
        args=[
            "--pull-buildtrees",
            "shell",
            "--build",
            element_name,
            "--pull",
            "--use-buildtree",
            "--",
            "cat",
            "test",
        ],
    )

    # In this case, we should succeed every time, regardless of what was
    # originally available in the local cache.
    #
    result.assert_success()
    assert "Hi" in result.output
Example #4
0
def test_push_update_after_rebuild(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    generate_project(
        project,
        config={
            "element-path": "elements",
            "min-version": "2.0",
            "plugins": [{"origin": "local", "path": "plugins", "elements": ["randomelement"]}],
        },
    )

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        # Build the element and push the artifact
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert result.get_pushed_elements() == ["random.bst"]
        assert cli.get_element_state(project, "random.bst") == "cached"

        # Now delete the artifact and ensure it is not in the cache
        result = cli.run(project=project, args=["artifact", "delete", "random.bst"])
        assert cli.get_element_state(project, "random.bst") != "cached"

        # Now rebuild the element. Reset config to disable pulling.
        cli.config = None
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert cli.get_element_state(project, "random.bst") == "cached"

        # Push the new build
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["artifact", "push", "random.bst"])
        assert result.get_pushed_elements() == ["random.bst"]
Example #5
0
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"
    dep = "compose-all.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_states(project, [element, dep], deps="none") == {
        element: "cached",
        dep: "cached",
    }

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", artifact, dep])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))

    # Check that the dependency ELEMENT is no longer cached
    assert cli.get_element_state(project, dep) != "cached"
Example #6
0
def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up the share
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        # First build the target (and its deps)
        result = cli.run(project=project, args=["build", "target.bst"])
        assert cli.get_element_state(project, "target.bst") == "cached"
        assert cli.get_element_state(project, "import-dev.bst") == "cached"

        # Now delete the artifact of a dependency and ensure it is not in the cache
        result = cli.run(project=project, args=["artifact", "delete", "import-dev.bst"])
        assert cli.get_element_state(project, "import-dev.bst") != "cached"

        # Configure bst to be able to push to the share
        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})

        # Now try and push the target with its deps using --on-error continue
        # and assert that push failed, but what could be pushed was pushed
        result = cli.run(
            project=project, args=["--on-error=continue", "artifact", "push", "--deps", "all", "target.bst"]
        )

        # The overall process should return as failed
        result.assert_main_error(ErrorDomain.STREAM, None)

        # We should still have pushed what we could
        assert_shared(cli, share, project, "import-bin.bst")
        assert_shared(cli, share, project, "compose-all.bst")
        assert_shared(cli, share, project, "target.bst")

        assert_not_shared(cli, share, project, "import-dev.bst")

        assert "Push failed: import-dev.bst is not cached" in result.stderr
Example #7
0
def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
    project = os.path.join(datafiles.dirname, datafiles.basename)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        cli.configure({"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}}})

        # First get us a build
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        # Now cause one of the dependenies to change their cache key
        #
        # Here we just add a file, causing the strong cache key of the
        # import-bin.bst element to change due to the local files it
        # imports changing.
        path = os.path.join(project, "files", "bin-files", "newfile")
        with open(path, "w") as f:
            f.write("PONY !")

        # Now build again after having changed the dependencies
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        # Now run `bst artifact push`.
        #
        # Optionally try it with --pull-buildtrees, since this causes
        # a pull queue to be added to the `push` command, the behavior
        # around this is different.
        args = []
        if buildtrees == "buildtrees":
            args += ["--pull-buildtrees"]
        args += ["artifact", "push", "--deps", "all", "target.bst"]
        result = cli.run(project=project, args=args)
        result.assert_success()
Example #8
0
def test_push_already_cached(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", "target.bst"])

        result.assert_success()
        assert "SKIPPED Push" not in result.stderr

        result = cli.run(project=project,
                         args=["artifact", "push", "target.bst"])

        result.assert_success()
        assert not result.get_pushed_elements(
        ), "No elements should have been pushed since the cache was populated"
        assert "INFO    Remote ({}) already has ".format(
            share.repo) in result.stderr
        assert "SKIPPED Push" in result.stderr
Example #9
0
def test_custom_logging(cli, tmpdir, datafiles):
    project = str(datafiles)
    bin_files_path = os.path.join(project, "files", "bin-files")
    element_path = os.path.join(project, "elements")
    element_name = "fetch-test-git.bst"

    custom_log_format = "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},%{key},%{element},%{action},%{message}"
    user_config = {"logging": {"message-format": custom_log_format}}
    cli.configure(user_config)

    # Create our repo object of the given source type with
    # the bin files, and then collect the initial ref.
    #
    repo = create_repo("git", str(tmpdir))
    ref = repo.create(bin_files_path)

    # Write out our test target
    element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # Now try to fetch it
    result = cli.run(project=project, args=["source", "fetch", element_name])
    result.assert_success()

    m = re.search(
        r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*" r",SUCCESS,Checking sources",
        result.stderr,
    )
    assert m is not None
Example #10
0
def test_push_after_pull(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(
            str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as share2:

        # Set the scene: share1 has the artifact, share2 does not.
        #
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                ]
            }})

        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        cli.remove_artifact_from_cache(project, "target.bst")

        assert_shared(cli, share1, project, "target.bst")
        assert_not_shared(cli, share2, project, "target.bst")
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now run the build again. Correct `bst build` behaviour is to download the
        # artifact from share1 but not push it back again.
        #
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" not in result.get_pushed_elements()

        # Delete the artifact locally again.
        cli.remove_artifact_from_cache(project, "target.bst")

        # Now we add share2 into the mix as a second push remote. This time,
        # `bst build` should push to share2 after pulling from share1.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                    {
                        "url": share2.repo,
                        "push": True
                    },
                ]
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" in result.get_pushed_elements()
Example #11
0
def test_push_cross_junction(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")

    generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)

    result = cli.run(project=project,
                     args=["build", "junction.bst:import-etc.bst"])
    result.assert_success()

    assert cli.get_element_state(project,
                                 "junction.bst:import-etc.bst") == "cached"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})
        cli.run(project=project,
                args=["artifact", "push", "junction.bst:import-etc.bst"])

        cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
        assert share.get_artifact(
            cli.get_artifact_name(project,
                                  "subtest",
                                  "import-etc.bst",
                                  cache_key=cache_key))
Example #12
0
def test_push_fails(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up the share
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure bst to be able to push to the share
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # First ensure that the target is *NOT* cache
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now try and push the target
        result = cli.run(project=project,
                         args=["artifact", "push", "target.bst"])
        result.assert_main_error(ErrorDomain.STREAM, None)

        assert "Push failed: target.bst is not cached" in result.stderr

        # Now ensure that deps are also not cached
        assert cli.get_element_state(project, "import-bin.bst") != "cached"
        assert cli.get_element_state(project, "import-dev.bst") != "cached"
        assert cli.get_element_state(project, "compose-all.bst") != "cached"
Example #13
0
def test_push_missing_source_after_build(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_name = "import-bin.bst"

    res = cli.run(project=project_dir, args=["build", element_name])
    res.assert_success()

    # Delete source but keep artifact in cache
    shutil.rmtree(os.path.join(cache_dir, "elementsources"))
    shutil.rmtree(os.path.join(cache_dir, "source_protos"))

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        res = cli.run(project=project_dir,
                      args=["source", "push", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) in res.stderr
        assert "Pushed source" in res.stderr
Example #14
0
def test_large_directory(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Number of files chosen to ensure the complete list of digests exceeds
    # our 1 MB gRPC message limit. I.e., test message splitting.
    MAX_MESSAGE_LENGTH = 1024 * 1024
    NUM_FILES = MAX_MESSAGE_LENGTH // 64 + 1

    large_directory_dir = os.path.join(project, "files", "large-directory")
    os.mkdir(large_directory_dir)
    for i in range(NUM_FILES):
        with open(os.path.join(large_directory_dir, str(i)), "w") as f:
            # The files need to have different content as we want different digests.
            f.write(str(i))

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Configure bst to push to the artifact share
        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})

        # Enforce 1 MB gRPC message limit
        with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
            # Build and push
            result = cli.run(project=project, args=["build", "import-large-directory.bst"])
            result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project, "import-large-directory.bst") == "cached"

        # Assert that the push was successful
        assert_shared(cli, share, project, "import-large-directory.bst")
Example #15
0
def test_strict_dependencies(cli, datafiles, target, expected_state):
    project = str(datafiles)

    # Configure non strict mode, this will have
    # an effect on the build and the `bst show`
    # commands run via cli.get_element_states()
    cli.configure({"projects": {"test": {"strict": False}}})

    result = cli.run(project=project, silent=True, args=["build", target])
    result.assert_success()

    states = cli.get_element_states(project, ["base.bst", target])
    assert states["base.bst"] == "cached"
    assert states[target] == "cached"

    # Now modify the file, effectively causing the common base.bst
    # dependency to change it's cache key
    hello_path = os.path.join(project, "files", "hello.txt")
    with open(hello_path, "w") as f:
        f.write("Goodbye")

    # Now assert that we have the states we expect as a result
    states = cli.get_element_states(project, ["base.bst", target])
    assert states["base.bst"] == "buildable"
    assert states[target] == expected_state
Example #16
0
def test_push_pull(cli, datafiles, tmpdir):
    project_dir = str(datafiles)
    cache_dir = os.path.join(str(tmpdir), "cache")

    with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {"pushers": 1},
            "source-caches": {"url": share.repo, "push": True,},
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        # create repo to pull from
        repo = create_repo("git", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")
        element_name = "push.bst"
        element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        res = cli.run(project=project_dir, args=["build", "push.bst"])
        res.assert_success()

        # remove local cache dir, and repo files and check it all works
        shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)
        shutil.rmtree(repo.repo)

        # check it's pulls from the share
        res = cli.run(project=project_dir, args=["build", "push.bst"])
        res.assert_success()
Example #17
0
def test_push_artifact_glob(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
        assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Configure artifact share
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        # Run bst artifact push with a wildcard.
        # This matches two artifact refs (weak and strong cache keys).
        result = cli.run(project=project, args=["artifact", "push", "test/target/*"])
        result.assert_success()
        assert len(result.get_pushed_elements()) == 2
Example #18
0
def test_push_fail(cli, tmpdir, datafiles):
    project_dir = str(datafiles)
    cache_dir = os.path.join(str(tmpdir), "cache")

    # set up config with remote that we'll take down
    with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
        remote = share.repo
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {"pushers": 1},
            "source-caches": {"url": share.repo, "push": True,},
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

    # create repo to pull from
    repo = create_repo("git", str(tmpdir))
    ref = repo.create(os.path.join(project_dir, "files"))
    element_path = os.path.join(project_dir, "elements")
    element_name = "push.bst"
    element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # build and check that it fails to set up the remote
    res = cli.run(project=project_dir, args=["build", "push.bst"])
    res.assert_success()

    assert "Failed to initialize remote {}".format(remote) in res.stderr
    assert "Pushing" not in res.stderr
    assert "Pushed" not in res.stderr
Example #19
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
        os.path.join(str(tmpdir), "artifactshare2")
    ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:

        # Add shareproject repo url to project.conf
        with open(os.path.join(project, "project.conf"), "a") as projconf:
            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))

        # Configure shareuser remote in user conf
        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})

        result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])

        # Artifacts should have only been pushed to sharecli, as that was provided via the cli
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()
            assert_shared(cli, sharecli, project, element_name)
            assert_not_shared(cli, shareuser, project, element_name)
            assert_not_shared(cli, shareproject, project, element_name)
Example #20
0
def test_source_push_build_fail(cli, tmpdir, datafiles):
    project_dir = str(datafiles)
    cache_dir = os.path.join(str(tmpdir), "cache")

    with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
        user_config = {
            "scheduler": {"pushers": 1},
            "source-caches": {"url": share.repo, "push": True,},
            "cachedir": cache_dir,
        }
        cli.configure(user_config)

        repo = create_repo("git", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")

        element_name = "always-fail.bst"
        element = {"kind": "always_fail", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        res = cli.run(project=project_dir, args=["build", "always-fail.bst"])
        res.assert_main_error(ErrorDomain.STREAM, None)
        res.assert_task_error(ErrorDomain.ELEMENT, None)

        # Sources are not pushed as the build queue is before the source push
        # queue.
        assert "Pushed source " not in res.stderr
Example #21
0
def test_push_after_rebuild(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    generate_project(
        project,
        config={
            "element-path": "elements",
            "min-version": "2.0",
            "plugins": [{"origin": "local", "path": "plugins", "elements": ["randomelement"]}],
        },
    )

    # First build the element
    result = cli.run(project=project, args=["build", "random.bst"])
    result.assert_success()
    assert cli.get_element_state(project, "random.bst") == "cached"

    # Delete the artifact blobs but keep the artifact proto,
    # i.e., now we have an incomplete artifact
    casdir = os.path.join(cli.directory, "cas")
    shutil.rmtree(casdir)
    assert cli.get_element_state(project, "random.bst") != "cached"

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        # Now rebuild the element and push it
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert result.get_pushed_elements() == ["random.bst"]
        assert cli.get_element_state(project, "random.bst") == "cached"
Example #22
0
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache,
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete the artifact from the local cache
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"

        result = cli.run(project=project, args=["artifact", "show", element])
        result.assert_success()
        assert "available {}".format(element) in result.output
Example #23
0
def test_artifact_delete_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project, args=["artifact", "delete", artifact])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(
        os.path.join(local_cache, "cas", "refs", "heads", artifact))
def test_default_target_push_pull(cli, tmpdir, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")
    target = "dummy_1.bst"

    # Set a default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {"targets": [target]},
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # Build the target
    result = cli.run(project=project, args=["build"])
    result.assert_success()
    assert cli.get_element_state(project, target) == "cached"

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Push the artifacts
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["artifact", "push"])
        result.assert_success()

        # Delete local artifacts
        # Note that `artifact delete` does not support default targets
        result = cli.run(project=project, args=["artifact", "delete", target])
        result.assert_success()

        # Target should be buildable now, and we should be able to pull it
        assert cli.get_element_state(project, target) == "buildable"
        result = cli.run(project=project, args=["artifact", "pull"])
        assert cli.get_element_state(project, target) == "cached"
Example #25
0
def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    # Target with at least one (indirect) build-only dependency
    element_name = "target.bst"

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})

        # First build it in non-strict mode with an artifact server configured.
        # With this configuration BuildStream will attempt to pull the build-only
        # dependencies after attempting to pull the target element. This means
        # that the cache key calculation of the target element has to be deferred
        # until the pull attempt of the build-only dependencies, exercising a
        # different code path.
        # As this is a clean build from scratch, the result and also the cache keys
        # should be identical to a build in strict mode.
        result = cli.run(project=project, args=["--no-strict", "build", element_name])
        result.assert_success()

        # Now check it out in strict mode.
        # This verifies that the clean build in non-strict mode produced an artifact
        # matching the strict cache key.
        result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
        result.assert_success()

        # Check that the executable hello file is found in the checkout
        filename = os.path.join(checkout, "usr", "bin", "hello")
        assert os.path.exists(filename)
Example #26
0
def test_partial_checkout_fail(tmpdir, datafiles, cli):
    project = str(datafiles)
    build_elt = "import-bin.bst"
    checkout_dir = os.path.join(str(tmpdir), "checkout")

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        res = cli.run(project=project,
                      args=[
                          "artifact", "checkout", "--pull", build_elt,
                          "--directory", checkout_dir
                      ])
        res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
        assert re.findall(
            r"Remote \((\S+)\) does not have artifact (\S+) cached",
            res.stderr)
Example #27
0
def test_shell_use_uncached_buildtree(share_without_buildtrees, datafiles,
                                      cli):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure(
        {"artifacts": {
            "servers": [{
                "url": share_without_buildtrees.repo
            }]
        }})

    # Pull everything we would need
    maybe_pull_deps(cli, project, element_name, "all", True)

    # Run the shell without asking it to pull any buildtree, just asking to use a buildtree
    result = cli.run(project=project,
                     args=[
                         "shell", "--build", element_name, "--use-buildtree",
                         "--", "cat", "test"
                     ])

    # Sorry, a buildtree was never cached for this element
    result.assert_main_error(
        ErrorDomain.APP,
        "missing-buildtree-artifact-created-without-buildtree")
Example #28
0
def test_max_jobs(cli, datafiles, cli_value, config_value):
    project = str(datafiles)
    target = "target.bst"

    # Specify `--max-jobs` if this test sets it
    args = []
    if cli_value is not None:
        args += ["--max-jobs", cli_value]
    args += ["show", "--deps", "none", "--format", "%{vars}", target]

    # Specify `max-jobs` in user configuration if this test sets it
    if config_value is not None:
        cli.configure({"build": {"max-jobs": config_value}})

    result = cli.run(project=project, silent=True, args=args)
    result.assert_success()
    loaded = _yaml.load_data(result.output)
    loaded_value = loaded.get_int("max-jobs")

    # We expect the value provided on the command line to take
    # precedence over the configuration file value, if specified.
    #
    # If neither are specified then we expect the default
    expected_value = cli_value or config_value or "0"

    if expected_value == "0":
        # If we are expecting the automatic behavior of using the maximum
        # number of cores available, just check that it is a value > 0
        assert loaded_value > 0, "Automatic setting of max-jobs didnt work"
    else:
        # Check that we got the explicitly set value
        assert loaded_value == int(expected_value)
Example #29
0
def test_shell_pull_uncached_buildtree(share_without_buildtrees, datafiles,
                                       cli):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure({"artifacts": {"url": share_without_buildtrees.repo}})

    # Run the shell and request that required artifacts and buildtrees should be pulled
    result = cli.run(
        project=project,
        args=[
            "--pull-buildtrees",
            "shell",
            "--build",
            element_name,
            "--pull",
            "--use-buildtree",
            "--",
            "cat",
            "test",
        ],
    )

    # Sorry, a buildtree was never cached for this element
    result.assert_main_error(
        ErrorDomain.APP,
        "missing-buildtree-artifact-created-without-buildtree")
Example #30
0
def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
    project = str(datafiles)
    checkout_dir = os.path.join(str(tmpdir), "checkout")

    repo = create_repo("git", str(tmpdir))
    repo.create(os.path.join(str(datafiles), "files"))
    element_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    project_config = {
        "name": "partial-artifact-checkout-fetch",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)
    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }
    input_name = "input.bst"
    input_file = os.path.join(element_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        result = cli.run(project=project, args=["source", "track", input_name])
        result.assert_success()
        result = cli.run(project=project, args=["build", input_name])
        result.assert_success()

        # A push artifact cache means we have to pull to push to them, so
        # delete some blobs from that CAS such that we have to fetch
        digest = utils.sha256sum(
            os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
        objpath = os.path.join(cli.directory, "cas", "objects", digest[:2],
                               digest[2:])
        os.unlink(objpath)

        # Verify that the build-only dependency is not (complete) in the local cache
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", input_name, "--directory",
                             checkout_dir
                         ])
        result.assert_main_error(ErrorDomain.STREAM,
                                 "uncached-checkout-attempt")

        # Verify that the pull method fetches relevant artifacts in order to stage
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", "--pull", input_name,
                             "--directory", checkout_dir
                         ])
        result.assert_success()

        # should have pulled whatever was deleted previous
        assert input_name in result.get_pulled_elements()