Example #1
0
def test_push_fails(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up the share
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure bst to be able to push to the share
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # First ensure that the target is *NOT* cache
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now try and push the target
        result = cli.run(project=project,
                         args=["artifact", "push", "target.bst"])
        result.assert_main_error(ErrorDomain.STREAM, None)

        assert "Push failed: target.bst is not cached" in result.stderr

        # Now ensure that deps are also not cached
        assert cli.get_element_state(project, "import-bin.bst") != "cached"
        assert cli.get_element_state(project, "import-dev.bst") != "cached"
        assert cli.get_element_state(project, "compose-all.bst") != "cached"
def test_buildtree_pulled(cli, tmpdir, datafiles):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Build the element to push it to cache
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(
            project=project,
            args=["--cache-buildtrees", "always", "build", element_name])
        result.assert_success()
        assert cli.get_element_state(project, element_name) == "cached"

        # Discard the cache
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
        assert cli.get_element_state(project, element_name) != "cached"

        # Pull from cache, ensuring cli options is set to pull the buildtree
        result = cli.run(project=project,
                         args=[
                             "--pull-buildtrees", "artifact", "pull", "--deps",
                             "all", element_name
                         ])
        result.assert_success()

        # Check it's using the cached build tree
        res = cli.run(project=project,
                      args=[
                          "shell", "--build", element_name, "--use-buildtree",
                          "always", "--", "cat", "test"
                      ])
        res.assert_success()
def test_default_target_track(cli, tmpdir, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")
    target = "track-fetch-test.bst"

    # First, create an element with trackable sources
    repo = create_repo("git", str(tmpdir))
    repo.create(project)
    element_conf = {"kind": "import", "sources": [repo.source_config()]}
    _yaml.roundtrip_dump(element_conf, os.path.join(project, "elements", target))

    # Then, make it the default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {"targets": [target]},
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # Setup finished. Track it now
    assert cli.get_element_state(project, target) == "no reference"
    result = cli.run(project=project, args=["source", "track"])
    result.assert_success()
    # Tracking will result in fetching it automatically, so we expect the state
    # to be buildable.
    assert cli.get_element_state(project, target) == "buildable"
Example #4
0
def test_no_default_with_junction(cli, datafiles):
    project = str(datafiles)
    junction_path = os.path.join(project, "elements", "junction.bst")
    target_path = os.path.join(project, "elements", "junction-target.bst")

    # First, create a junction element to refer to the subproject
    junction_config = {
        "kind": "junction",
        "sources": [{
            "kind": "local",
            "path": "files/sub-project",
        }]
    }
    _yaml.roundtrip_dump(junction_config, junction_path)

    # Then, create a stack element with dependency on cross junction element
    target_config = {
        "kind": "stack",
        "depends": ["junction.bst:dummy_subproject.bst"]
    }
    _yaml.roundtrip_dump(target_config, target_path)

    # Now try to perform a build
    # This should automatically fetch the junction at load time.
    result = cli.run(project=project, args=["build"])
    result.assert_success()

    assert cli.get_element_state(
        project, "junction.bst:dummy_subproject.bst") == "cached"
    assert cli.get_element_state(project, "junction-target.bst") == "cached"
Example #5
0
def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
    checkout = os.path.join(cli.directory, "checkout")
    project = str(datafiles)
    dev_files_path = os.path.join(project, "files", "dev-files")
    element_path = os.path.join(project, "elements")
    element_name = "build-test-{}.bst".format(kind)

    # Create our repo object of the given source type with
    # the dev files, and then collect the initial ref.
    #
    repo = create_repo(kind, str(tmpdir))
    ref = repo.create(dev_files_path)

    # Write out our test target
    element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    assert cli.get_element_state(project, element_name) == "fetch needed"
    result = cli.run(project=project, args=strict_args(["build", element_name], strict))
    result.assert_success()
    assert cli.get_element_state(project, element_name) == "cached"

    # Now check it out
    result = cli.run(
        project=project, args=strict_args(["artifact", "checkout", element_name, "--directory", checkout], strict)
    )
    result.assert_success()

    # Check that the pony.h include from files/dev-files exists
    filename = os.path.join(checkout, "usr", "include", "pony.h")
    assert os.path.exists(filename)
Example #6
0
def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up the share
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        # First build the target (and its deps)
        result = cli.run(project=project, args=["build", "target.bst"])
        assert cli.get_element_state(project, "target.bst") == "cached"
        assert cli.get_element_state(project, "import-dev.bst") == "cached"

        # Now delete the artifact of a dependency and ensure it is not in the cache
        result = cli.run(project=project, args=["artifact", "delete", "import-dev.bst"])
        assert cli.get_element_state(project, "import-dev.bst") != "cached"

        # Configure bst to be able to push to the share
        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})

        # Now try and push the target with its deps using --on-error continue
        # and assert that push failed, but what could be pushed was pushed
        result = cli.run(
            project=project, args=["--on-error=continue", "artifact", "push", "--deps", "all", "target.bst"]
        )

        # The overall process should return as failed
        result.assert_main_error(ErrorDomain.STREAM, None)

        # We should still have pushed what we could
        assert_shared(cli, share, project, "import-bin.bst")
        assert_shared(cli, share, project, "compose-all.bst")
        assert_shared(cli, share, project, "target.bst")

        assert_not_shared(cli, share, project, "import-dev.bst")

        assert "Push failed: import-dev.bst is not cached" in result.stderr
def test_fetch_gpg_verify(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    gpg_homedir = os.path.join(DATA_DIR, "gpghome")

    # Create the repo from 'repofiles' subdir
    repo = create_repo("ostree", str(tmpdir))
    ref = repo.create(
        os.path.join(project, "repofiles"),
        gpg_sign="FFFF54C070353B52D046DEB087FA0F41A6EFD9E9",
        gpg_homedir=gpg_homedir,
    )

    # Write out our test target
    ostreesource = repo.source_config(ref=ref, gpg_key="test.gpg")
    element = {"kind": "import", "sources": [ostreesource]}

    _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))

    # Assert that a fetch is needed
    assert cli.get_element_state(project, "target.bst") == "fetch needed"

    # Now try to fetch it
    result = cli.run(project=project, args=["source", "fetch", "target.bst"])
    result.assert_success()

    # Assert that we are now buildable because the source is
    # now cached.
    assert cli.get_element_state(project, "target.bst") == "buildable"
Example #8
0
def test_push_after_rebuild(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    generate_project(
        project,
        config={
            "element-path": "elements",
            "min-version": "2.0",
            "plugins": [{"origin": "local", "path": "plugins", "elements": ["randomelement"]}],
        },
    )

    # First build the element
    result = cli.run(project=project, args=["build", "random.bst"])
    result.assert_success()
    assert cli.get_element_state(project, "random.bst") == "cached"

    # Delete the artifact blobs but keep the artifact proto,
    # i.e., now we have an incomplete artifact
    casdir = os.path.join(cli.directory, "cas")
    shutil.rmtree(casdir)
    assert cli.get_element_state(project, "random.bst") != "cached"

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        # Now rebuild the element and push it
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert result.get_pushed_elements() == ["random.bst"]
        assert cli.get_element_state(project, "random.bst") == "cached"
Example #9
0
def test_push_update_after_rebuild(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    generate_project(
        project,
        config={
            "element-path": "elements",
            "min-version": "2.0",
            "plugins": [{"origin": "local", "path": "plugins", "elements": ["randomelement"]}],
        },
    )

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        # Build the element and push the artifact
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert result.get_pushed_elements() == ["random.bst"]
        assert cli.get_element_state(project, "random.bst") == "cached"

        # Now delete the artifact and ensure it is not in the cache
        result = cli.run(project=project, args=["artifact", "delete", "random.bst"])
        assert cli.get_element_state(project, "random.bst") != "cached"

        # Now rebuild the element. Reset config to disable pulling.
        cli.config = None
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert cli.get_element_state(project, "random.bst") == "cached"

        # Push the new build
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["artifact", "push", "random.bst"])
        assert result.get_pushed_elements() == ["random.bst"]
def test_default_target_push_pull(cli, tmpdir, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")
    target = "dummy_1.bst"

    # Set a default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {"targets": [target]},
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # Build the target
    result = cli.run(project=project, args=["build"])
    result.assert_success()
    assert cli.get_element_state(project, target) == "cached"

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Push the artifacts
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["artifact", "push"])
        result.assert_success()

        # Delete local artifacts
        # Note that `artifact delete` does not support default targets
        result = cli.run(project=project, args=["artifact", "delete", target])
        result.assert_success()

        # Target should be buildable now, and we should be able to pull it
        assert cli.get_element_state(project, target) == "buildable"
        result = cli.run(project=project, args=["artifact", "pull"])
        assert cli.get_element_state(project, target) == "cached"
Example #11
0
def test_push_pull_non_strict(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # First build the target element and push to the remote.
        cli.configure({
            "artifacts": {
                "url": share.repo,
                "push": True
            },
            "projects": {
                "test": {
                    "strict": False
                }
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert cli.get_element_state(project, "target.bst") == "cached"

        # Assert that everything is now cached in the remote.
        all_elements = [
            "target.bst", "import-bin.bst", "import-dev.bst", "compose-all.bst"
        ]
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        for element_name in all_elements:
            assert cli.get_element_state(project, element_name) != "cached"

        # Add a file to force change in strict cache key of import-bin.bst
        with open(
                os.path.join(str(project), "files", "bin-files", "usr", "bin",
                             "world"), "w") as f:
            f.write("world")

        # Assert that the workspaced element requires a rebuild
        assert cli.get_element_state(project, "import-bin.bst") == "buildable"
        # Assert that the target is still waiting due to --no-strict
        assert cli.get_element_state(project, "target.bst") == "waiting"

        # Now try bst artifact pull
        result = cli.run(
            project=project,
            args=["artifact", "pull", "--deps", "all", "target.bst"])
        result.assert_success()

        # And assert that the target is again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
Example #12
0
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "goodartifactshare")) as good_share, create_artifact_share(
                os.path.join(str(tmpdir), "badartifactshare")) as bad_share:

        # Build the target so we have it cached locally only.
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"

        # Configure the default push location to be bad_share; we will assert that
        # nothing actually gets pushed there.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": bad_share.repo,
                        "push": True
                    },
                ]
            }
        })

        # Now try `bst artifact push` to the good_share.
        result = cli.run(project=project,
                         args=[
                             "artifact", "push", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # Assert that all the artifacts are in the share we pushed
        # to, and not the other.
        assert_shared(cli, good_share, project, "target.bst")
        assert_not_shared(cli, bad_share, project, "target.bst")

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the good_share.
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        result = cli.run(project=project,
                         args=[
                             "artifact", "pull", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
def test_pull_buildtree_pulled(cli, tmpdir, datafiles):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Build the element to push it to cache
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(
            project=project,
            args=["--cache-buildtrees", "always", "build", element_name])
        result.assert_success()
        assert cli.get_element_state(project, element_name) == "cached"

        # Discard the cache
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
        assert cli.get_element_state(project, element_name) != "cached"

        # Check it's not using the cached build tree, because --pull
        # and pull-buildtrees were not both set
        res = cli.run(
            project=project,
            args=[
                "shell",
                "--build",
                element_name,
                "--pull",
                "--use-buildtree",
                "always",
                "--",
                "cat",
                "test",
            ],
        )
        res.assert_main_error(ErrorDomain.APP, None)
        assert "Artifact not cached locally. Can be retried with --pull and pull-buildtrees configured" in res.stderr

        # Check it's using the cached build tree, because --pull
        # and pull-buildtrees were both set
        res = cli.run(
            project=project,
            args=[
                "--pull-buildtrees",
                "shell",
                "--build",
                element_name,
                "--pull",
                "--use-buildtree",
                "always",
                "--",
                "cat",
                "test",
            ],
        )
        result.assert_success()
        assert "Hi" in res.output
Example #14
0
def test_artifact_delete_element(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Build the element and ensure it's cached
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_state(project, element) == "cached"

    result = cli.run(project=project, args=["artifact", "delete", element])
    result.assert_success()
    assert cli.get_element_state(project, element) != "cached"
Example #15
0
def test_generate_key(cli, datafiles):
    project_dir = str(datafiles)

    # check that we don't fail if not tracking due to get_unique_key
    res = cli.run(project=project_dir, args=["build", "key-test.bst"])
    res.assert_main_error(ErrorDomain.PIPELINE, "inconsistent-pipeline")

    assert cli.get_element_state(project_dir, "key-test.bst") == "no reference"
    res = cli.run(project=project_dir, args=["source", "track", "key-test.bst"])
    res.assert_success()
    assert cli.get_element_state(project_dir, "key-test.bst") == "fetch needed"

    res = cli.run(project=project_dir, args=["build", "key-test.bst"])
    res.assert_success()
    assert cli.get_element_state(project_dir, "key-test.bst") == "cached"
Example #16
0
def test_build_junction_short_notation_filename(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")
    checkout = os.path.join(cli.directory, "checkout")

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path)

    # Create a stack element to depend on a cross junction element, using
    # colon (:) as the separator
    element = {"kind": "stack", "depends": [{"filename": "junction.bst:import-etc.bst"}]}
    _yaml.roundtrip_dump(element, element_path)

    # Now try to build it, this should automatically result in fetching
    # the junction itself at load time.
    result = cli.run(project=project, args=["build", "junction-dep.bst"])
    result.assert_success()

    # Assert that it's cached now
    assert cli.get_element_state(project, "junction-dep.bst") == "cached"

    # Now check it out
    result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout])
    result.assert_success()

    # Assert the content of /etc/animal.conf
    filename = os.path.join(checkout, "etc", "animal.conf")
    assert os.path.exists(filename)
    with open(filename, "r", encoding="utf-8") as f:
        contents = f.read()
    assert contents == "animal=Pony\n"
Example #17
0
def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")

    configure_project(project, {"ref-storage": ref_storage})

    # Create a repo to hold the subproject and generate a junction element for it
    ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))

    # Create a stack element to depend on a cross junction element
    #
    element = {"kind": "stack", "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}]}
    _yaml.roundtrip_dump(element, element_path)

    # Dump a project.refs if we're using project.refs storage
    #
    if ref_storage == "project.refs":
        project_refs = {"projects": {"test": {"junction.bst": [{"ref": ref}]}}}
        _yaml.roundtrip_dump(project_refs, os.path.join(project, "junction.refs"))

    # Now try to build it, this should automatically result in fetching
    # the junction itself at load time.
    result = cli.run(project=project, args=["build", "junction-dep.bst"])
    result.assert_success()

    # Assert that it's cached now
    assert cli.get_element_state(project, "junction-dep.bst") == "cached"
Example #18
0
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"
    dep = "compose-all.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_states(project, [element, dep], deps="none") == {
        element: "cached",
        dep: "cached",
    }

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", artifact, dep])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))

    # Check that the dependency ELEMENT is no longer cached
    assert cli.get_element_state(project, dep) != "cached"
Example #19
0
def test_artifact_expires(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {
        "quota": 10000000,
    }})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Our cache should now be almost full. Let's create another
    # artifact and see if we can cause buildstream to delete the old
    # one.
    create_element_size("target2.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
Example #20
0
def test_no_ref(cli, tmpdir, datafiles):
    project = str(datafiles)
    generate_project(project,
                     config={"aliases": {
                         "tmpdir": "file:///" + str(tmpdir)
                     }})
    assert cli.get_element_state(project, "target.bst") == "no reference"
Example #21
0
def test_pull_missing_local_blob(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)
    repo = create_repo("git", str(tmpdir))
    repo.create(os.path.join(str(datafiles), "files"))
    element_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    project_config = {
        "name": "pull-missing-local-blob",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)
    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }
    input_name = "input.bst"
    input_file = os.path.join(element_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    depends_name = "depends.bst"
    depends_config = {"kind": "stack", "depends": [input_name]}
    depends_file = os.path.join(element_dir, depends_name)
    _yaml.roundtrip_dump(depends_config, depends_file)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the import-bin element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["source", "track", input_name])
        result.assert_success()
        result = cli.run(project=project, args=["build", input_name])
        result.assert_success()
        assert cli.get_element_state(project, input_name) == "cached"

        # Delete a file blob from the local cache.
        # This is a placeholder to test partial CAS handling until we support
        # partial artifact pulling (or blob-based CAS expiry).
        #
        digest = utils.sha256sum(
            os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
        objpath = os.path.join(cli.directory, "cas", "objects", digest[:2],
                               digest[2:])
        os.unlink(objpath)

        # Now try bst build
        result = cli.run(project=project, args=["build", depends_name])
        result.assert_success()

        # Assert that the import-bin artifact was pulled (completing the partial artifact)
        assert result.get_pulled_elements() == [input_name]
Example #22
0
def test_push(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {"pushers": 1},
            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        element_key = _push(cli, rootcache_dir, project_dir, user_config_file, "target.bst")
        assert share.get_artifact(cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key))
Example #23
0
def test_large_directory(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Number of files chosen to ensure the complete list of digests exceeds
    # our 1 MB gRPC message limit. I.e., test message splitting.
    MAX_MESSAGE_LENGTH = 1024 * 1024
    NUM_FILES = MAX_MESSAGE_LENGTH // 64 + 1

    large_directory_dir = os.path.join(project, "files", "large-directory")
    os.mkdir(large_directory_dir)
    for i in range(NUM_FILES):
        with open(os.path.join(large_directory_dir, str(i)), "w") as f:
            # The files need to have different content as we want different digests.
            f.write(str(i))

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Configure bst to push to the artifact share
        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})

        # Enforce 1 MB gRPC message limit
        with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
            # Build and push
            result = cli.run(project=project, args=["build", "import-large-directory.bst"])
            result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project, "import-large-directory.bst") == "cached"

        # Assert that the push was successful
        assert_shared(cli, share, project, "import-large-directory.bst")
Example #24
0
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache,
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete the artifact from the local cache
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"

        result = cli.run(project=project, args=["artifact", "show", element])
        result.assert_success()
        assert "available {}".format(element) in result.output
Example #25
0
def test_push_cross_junction(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")

    generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)

    result = cli.run(project=project,
                     args=["build", "junction.bst:import-etc.bst"])
    result.assert_success()

    assert cli.get_element_state(project,
                                 "junction.bst:import-etc.bst") == "cached"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})
        cli.run(project=project,
                args=["artifact", "push", "junction.bst:import-etc.bst"])

        cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
        assert share.get_artifact(
            cli.get_artifact_name(project,
                                  "subtest",
                                  "import-etc.bst",
                                  cache_key=cache_key))
Example #26
0
def test_cleanup_first(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {"quota": 10000000,}})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 8000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Now configure with a smaller quota, create a situation
    # where the cache must be cleaned up before building anything else.
    #
    # Fix the fetchers and builders just to ensure a predictable
    # sequence of events (although it does not effect this test)
    cli.configure({"cache": {"quota": 5000000,}, "scheduler": {"fetchers": 1, "builders": 1}})

    # Our cache is now more than full, BuildStream
    create_element_size("target2.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
Example #27
0
def test_source_checkout_fetch(datafiles, cli):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "source-checkout")
    target = "remote-import-dev.bst"
    target_path = os.path.join(project, "elements", target)

    # Create an element with remote source
    element = generate_remote_import_element(
        os.path.join(project, "files", "dev-files", "usr", "include",
                     "pony.h"), "pony.h")
    _yaml.roundtrip_dump(element, target_path)

    # Testing implicit fetching requires that we do not have the sources
    # cached already
    assert cli.get_element_state(project, target) == "fetch needed"

    args = ["source", "checkout"]
    args += [target, checkout]
    result = cli.run(
        project=project,
        args=["source", "checkout", "--directory", checkout, target])

    result.assert_success()
    assert os.path.exists(os.path.join(checkout, "remote-import-dev",
                                       "pony.h"))
Example #28
0
def test_push_split(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    indexshare = os.path.join(str(tmpdir), "indexshare")
    storageshare = os.path.join(str(tmpdir), "storageshare")

    # Set up an artifact cache.
    with create_split_share(indexshare, storageshare) as (index, storage):
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config = {
            "scheduler": {"pushers": 1},
            "artifacts": {
                "servers": [
                    {"url": index.repo, "push": True, "type": "index"},
                    {"url": storage.repo, "push": True, "type": "storage"},
                ],
            },
            "cachedir": rootcache_dir,
        }
        config_path = str(tmpdir.join("buildstream.conf"))
        _yaml.roundtrip_dump(user_config, file=config_path)

        element_key = _push(cli, rootcache_dir, project_dir, config_path, "target.bst")
        proto = index.get_artifact_proto(
            cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key)
        )
        assert storage.get_cas_files(proto) is not None
Example #29
0
def test_dummy_sandbox_fallback(cli, datafiles, tmp_path):
    # Create symlink to buildbox-casd to work with custom PATH
    buildbox_casd = tmp_path.joinpath("bin/buildbox-casd")
    buildbox_casd.parent.mkdir()
    os.symlink(utils.get_host_tool("buildbox-casd"), str(buildbox_casd))

    project = str(datafiles)
    element_path = os.path.join(project, "elements", "element.bst")

    # Write out our test target
    element = {
        "kind": "script",
        "depends": [{"filename": "base.bst", "type": "build",},],
        "config": {"commands": ["true",],},
    }
    _yaml.roundtrip_dump(element, element_path)

    # Build without access to host tools, this will fail
    result = cli.run(project=project, args=["build", "element.bst"], env={"PATH": str(tmp_path.joinpath("bin"))},)
    # But if we dont spesify a sandbox then we fall back to dummy, we still
    # fail early but only once we know we need a facny sandbox and that
    # dumy is not enough, there for element gets fetched and so is buildable

    result.assert_task_error(ErrorDomain.SANDBOX, "unavailable-local-sandbox")
    assert cli.get_element_state(project, "element.bst") == "buildable"
Example #30
0
def test_push_after_pull(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(
            str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as share2:

        # Set the scene: share1 has the artifact, share2 does not.
        #
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                ]
            }})

        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        cli.remove_artifact_from_cache(project, "target.bst")

        assert_shared(cli, share1, project, "target.bst")
        assert_not_shared(cli, share2, project, "target.bst")
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now run the build again. Correct `bst build` behaviour is to download the
        # artifact from share1 but not push it back again.
        #
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" not in result.get_pushed_elements()

        # Delete the artifact locally again.
        cli.remove_artifact_from_cache(project, "target.bst")

        # Now we add share2 into the mix as a second push remote. This time,
        # `bst build` should push to share2 after pulling from share1.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                    {
                        "url": share2.repo,
                        "push": True
                    },
                ]
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" in result.get_pushed_elements()