Example #1
0
def test_no_default_with_junction(cli, datafiles):
    project = str(datafiles)
    junction_path = os.path.join(project, "elements", "junction.bst")
    target_path = os.path.join(project, "elements", "junction-target.bst")

    # First, create a junction element to refer to the subproject
    junction_config = {
        "kind": "junction",
        "sources": [{
            "kind": "local",
            "path": "files/sub-project",
        }],
    }
    _yaml.roundtrip_dump(junction_config, junction_path)

    # Then, create a stack element with dependency on cross junction element
    target_config = {
        "kind": "stack",
        "depends": ["junction.bst:dummy_subproject.bst"]
    }
    _yaml.roundtrip_dump(target_config, target_path)

    # Now try to perform a build
    # This should automatically fetch the junction at load time.
    result = cli.run(project=project, args=["build"])
    result.assert_success()

    assert cli.get_element_state(
        project, "junction.bst:dummy_subproject.bst") == "cached"
    assert cli.get_element_state(project, "junction-target.bst") == "cached"
Example #2
0
def test_default_target_fetch(cli, tmpdir, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")
    target = "track-fetch-test.bst"

    # First, create an element with trackable sources
    repo = create_repo("tar", str(tmpdir))
    ref = repo.create(project)
    element_conf = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
    _yaml.roundtrip_dump(element_conf, os.path.join(project, "elements",
                                                    target))

    # Then, make it the default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {
            "targets": [target]
        },
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # Setup finished. Track it now
    assert cli.get_element_state(project, target) == "fetch needed"
    result = cli.run(project=project, args=["source", "fetch"])
    result.assert_success()
    assert cli.get_element_state(project, target) == "buildable"
Example #3
0
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "goodartifactshare")) as good_share, create_artifact_share(
                os.path.join(str(tmpdir), "badartifactshare")) as bad_share:

        # Build the target so we have it cached locally only.
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"

        # Configure the default push location to be bad_share; we will assert that
        # nothing actually gets pushed there.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": bad_share.repo,
                        "push": True
                    },
                ]
            }
        })

        # Now try `bst artifact push` to the good_share.
        result = cli.run(project=project,
                         args=[
                             "artifact", "push", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # Assert that all the artifacts are in the share we pushed
        # to, and not the other.
        assert_shared(cli, good_share, project, "target.bst")
        assert_not_shared(cli, bad_share, project, "target.bst")

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the good_share.
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        result = cli.run(project=project,
                         args=[
                             "artifact", "pull", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
Example #4
0
def test_artifact_delete_element(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Build the element and ensure it's cached
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_state(project, element) == "cached"

    result = cli.run(project=project, args=["artifact", "delete", element])
    result.assert_success()
    assert cli.get_element_state(project, element) != "cached"
Example #5
0
def test_generate_key(cli, datafiles):
    project_dir = str(datafiles)

    # check that we don't fail if not tracking due to get_unique_key
    res = cli.run(project=project_dir, args=["build", "key-test.bst"])
    res.assert_main_error(ErrorDomain.PIPELINE, "inconsistent-pipeline")

    assert cli.get_element_state(project_dir, "key-test.bst") == "no reference"
    res = cli.run(project=project_dir, args=["source", "track", "key-test.bst"])
    res.assert_success()
    assert cli.get_element_state(project_dir, "key-test.bst") == "fetch needed"

    res = cli.run(project=project_dir, args=["build", "key-test.bst"])
    res.assert_success()
    assert cli.get_element_state(project_dir, "key-test.bst") == "cached"
Example #6
0
def test_pull_missing_local_blob(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)
    repo = create_repo("tar", str(tmpdir))
    repo.create(os.path.join(str(datafiles), "files"))
    element_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    project_config = {
        "name": "pull-missing-local-blob",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)
    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }
    input_name = "input.bst"
    input_file = os.path.join(element_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    depends_name = "depends.bst"
    depends_config = {"kind": "stack", "depends": [input_name]}
    depends_file = os.path.join(element_dir, depends_name)
    _yaml.roundtrip_dump(depends_config, depends_file)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the import-bin element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["source", "track", input_name])
        result.assert_success()
        result = cli.run(project=project, args=["build", input_name])
        result.assert_success()
        assert cli.get_element_state(project, input_name) == "cached"

        # Delete a file blob from the local cache.
        # This is a placeholder to test partial CAS handling until we support
        # partial artifact pulling (or blob-based CAS expiry).
        #
        digest = utils.sha256sum(
            os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
        objpath = os.path.join(cli.directory, "cas", "objects", digest[:2],
                               digest[2:])
        os.unlink(objpath)

        # Now try bst build
        result = cli.run(project=project, args=["build", depends_name])
        result.assert_success()

        # Assert that the import-bin artifact was pulled (completing the partial artifact)
        assert result.get_pulled_elements() == [input_name]
Example #7
0
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache,
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete the artifact from the local cache
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"

        result = cli.run(project=project, args=["artifact", "show", element])
        result.assert_success()
        assert "available {}".format(element) in result.output
Example #8
0
def test_source_checkout_fetch(datafiles, cli):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "source-checkout")
    target = "remote-import-dev.bst"
    target_path = os.path.join(project, "elements", target)

    # Create an element with remote source
    element = generate_remote_import_element(
        os.path.join(project, "files", "dev-files", "usr", "include",
                     "pony.h"), "pony.h")
    _yaml.roundtrip_dump(element, target_path)

    # Testing implicit fetching requires that we do not have the sources
    # cached already
    assert cli.get_element_state(project, target) == "fetch needed"

    args = ["source", "checkout"]
    args += [target, checkout]
    result = cli.run(
        project=project,
        args=["source", "checkout", "--directory", checkout, target])

    result.assert_success()
    assert os.path.exists(os.path.join(checkout, "remote-import-dev",
                                       "pony.h"))
Example #9
0
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"
    dep = "compose-all.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_states(project, [element, dep], deps="none") == {
        element: "cached",
        dep: "cached",
    }

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", artifact, dep])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))

    # Check that the dependency ELEMENT is no longer cached
    assert cli.get_element_state(project, dep) != "cached"
Example #10
0
def test_no_ref(cli, tmpdir, datafiles):
    project = str(datafiles)
    generate_project(project,
                     {"aliases": {
                         "tmpdir": "file:///" + str(tmpdir)
                     }})
    assert cli.get_element_state(project, "target.bst") == "no reference"
Example #11
0
def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
    project_dir = str(datafiles)
    element_name, repo, ref = create_test_element(tmpdir, project_dir)
    cache_dir = os.path.join(str(tmpdir), "cache")

    # use artifact cache for sources for now, they should work the same
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        with context_with_source_cache(cli, cache_dir, share,
                                       tmpdir) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements([element_name])[0]
            element._query_source_cache()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            assert not share.get_artifact_proto(source._get_source_name())

            # Just check that we sensibly fetch and build the element
            res = cli.run(project=project_dir, args=["build", element_name])
            res.assert_success()

            assert os.listdir(
                os.path.join(str(tmpdir), "cache", "sources", "tar")) != []

            # get root digest of source
            sourcecache = context.sourcecache
            digest = sourcecache.export(source)._get_digest()

            # Push the source to the remote
            res = cli.run(project=project_dir,
                          args=[
                              "source", "push", "--source-remote", share.repo,
                              element_name
                          ])
            res.assert_success()

            # Remove the cas content, only keep the proto and such around
            shutil.rmtree(
                os.path.join(str(tmpdir), "sourceshare", "repo", "cas",
                             "objects"))
            # check the share doesn't have the object
            assert not share.has_object(digest)

            # Delete the source locally
            shutil.rmtree(os.path.join(str(cache_dir), "sources"))
            shutil.rmtree(os.path.join(str(cache_dir), "cas"))
            state = cli.get_element_state(project_dir, element_name)
            assert state == "fetch needed"

            # Now fetch the source and check
            res = cli.run(project=project_dir,
                          args=["source", "fetch", element_name])
            res.assert_success()

            assert ("SUCCESS Fetching {}".format(
                repo.source_config(ref=ref)["url"])) in res.stderr
Example #12
0
def test_pull_secondary_cache(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as share2:

        # Build the target and push it to share2 only.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": False
                    },
                    {
                        "url": share2.repo,
                        "push": True
                    },
                ]
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        assert_not_shared(cli, share1, project, "target.bst")
        assert_shared(cli, share2, project, "target.bst")

        # Delete the user's local artifact cache.
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that the element is not cached anymore.
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", "target.bst"])
        result.assert_success()

        # And assert that it's again in the local cache, without having built,
        # i.e. we found it in share2.
        assert cli.get_element_state(project, "target.bst") == "cached"
Example #13
0
def test_push_pull_cross_junction(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        subproject_path = os.path.join(project, "files", "sub-project")
        junction_path = os.path.join(project, "elements", "junction.bst")

        generate_junction(tmpdir,
                          subproject_path,
                          junction_path,
                          store_ref=True)

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project,
                         args=["build", "junction.bst:import-etc.bst"])
        result.assert_success()
        assert cli.get_element_state(project,
                                     "junction.bst:import-etc.bst") == "cached"

        cache_dir = os.path.join(project, "cache", "cas")
        shutil.rmtree(cache_dir)
        artifact_dir = os.path.join(project, "cache", "artifacts")
        shutil.rmtree(artifact_dir)

        assert cli.get_element_state(
            project, "junction.bst:import-etc.bst") == "buildable"

        # Now try bst artifact pull
        result = cli.run(
            project=project,
            args=["artifact", "pull", "junction.bst:import-etc.bst"])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project,
                                     "junction.bst:import-etc.bst") == "cached"
Example #14
0
def test_default_target_push_pull(cli, tmpdir, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")
    target = "dummy_1.bst"

    # Set a default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {
            "targets": [target]
        },
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # Build the target
    result = cli.run(project=project, args=["build"])
    result.assert_success()
    assert cli.get_element_state(project, target) == "cached"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Push the artifacts
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["artifact", "push"])
        result.assert_success()

        # Delete local artifacts
        # Note that `artifact delete` does not support default targets
        result = cli.run(project=project, args=["artifact", "delete", target])
        result.assert_success()

        # Target should be buildable now, and we should be able to pull it
        assert cli.get_element_state(project, target) == "buildable"
        result = cli.run(project=project, args=["artifact", "pull"])
        assert cli.get_element_state(project, target) == "cached"
Example #15
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "artifactshare1")) as shareuser, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as sharecli:

        # Configure shareuser remote in user conf
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": shareuser.repo,
                    "push": True
                }]
            }
        })

        # Push the artifacts to the shareuser remote.
        # Assert that shareuser has the artfifacts cached, but sharecli doesn't,
        # then delete locally cached elements
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()
            assert_not_shared(cli, sharecli, project, element_name)
            assert_shared(cli, shareuser, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a build with cli set as sharecli results in nothing being pulled,
        # as it doesn't have them cached and shareuser should be ignored. This
        # will however result in the artifacts being built and pushed to it
        result = cli.run(
            project=project,
            args=["build", "--artifact-remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert element_name not in result.get_pulled_elements()
            assert_shared(cli, sharecli, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a clean build with cli set as sharecli should result in artifacts only
        # being pulled from it, as that was provided via the cli and is populated
        result = cli.run(
            project=project,
            args=["build", "--artifact-remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert cli.get_element_state(project, element_name) == "cached"
            assert element_name in result.get_pulled_elements()
        assert shareuser.repo not in result.stderr
        assert sharecli.repo in result.stderr
Example #16
0
def test_track_with_comments(cli, datafiles):
    project = str(datafiles)
    generate_project(project,
                     {"aliases": {
                         "project-root": "file:///" + project
                     }})

    target = "comments.bst"

    # Assert that it needs to be tracked
    assert cli.get_element_state(project, target) == "no reference"

    # Track and fetch the sources
    result = cli.run(project=project, args=["source", "track", target])
    result.assert_success()
    result = cli.run(project=project, args=["source", "fetch", target])
    result.assert_success()

    # Assert that the sources are cached
    assert cli.get_element_state(project, target) == "buildable"
Example #17
0
def _test_pull_missing_blob(cli, project, index, storage):
    # First build the target element and push to the remote.
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()
    assert cli.get_element_state(project, "target.bst") == "cached"

    # Assert that everything is now cached in the remote.
    all_elements = [
        "target.bst", "import-bin.bst", "import-dev.bst", "compose-all.bst"
    ]
    for element_name in all_elements:
        project_name = "test"
        artifact_name = cli.get_artifact_name(project, project_name,
                                              element_name)
        artifact_proto = index.get_artifact_proto(artifact_name)
        assert artifact_proto
        assert storage.get_cas_files(artifact_proto)

    # Now we've pushed, delete the user's local artifact cache
    # directory and try to redownload it from the share
    #
    casdir = os.path.join(cli.directory, "cas")
    shutil.rmtree(casdir)
    artifactdir = os.path.join(cli.directory, "artifacts")
    shutil.rmtree(artifactdir)

    # Assert that nothing is cached locally anymore
    for element_name in all_elements:
        assert cli.get_element_state(project, element_name) != "cached"

    # Now delete blobs in the remote without deleting the artifact ref.
    # This simulates scenarios with concurrent artifact expiry.
    remote_objdir = os.path.join(storage.repodir, "cas", "objects")
    shutil.rmtree(remote_objdir)

    # Now try bst build
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    # Assert that no artifacts were pulled
    assert not result.get_pulled_elements()
Example #18
0
def test_artifact_delete_pulled_artifact_without_buildtree(
        cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete and then pull the artifact (without its buildtree)
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"
        result = cli.run(project=project, args=["artifact", "pull", element])
        result.assert_success()
        assert cli.get_element_state(project, element) == "cached"

        # Now delete it again (it should have been pulled without the buildtree, but
        # a digest of the buildtree is pointed to in the artifact's metadata
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"
Example #19
0
def test_artifact_delete_elements_build_deps(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Build the element and ensure it's cached
    result = cli.run(project=project, args=["build", element])
    result.assert_success()

    # Assert element and build deps are cached
    assert cli.get_element_state(project, element) == "cached"
    bdep_states = cli.get_element_states(project, [element], deps="build")
    for state in bdep_states.values():
        assert state == "cached"

    result = cli.run(project=project,
                     args=["artifact", "delete", "--deps", "build", element])
    result.assert_success()

    # Assert that the build deps have been deleted and that the artifact remains cached
    assert cli.get_element_state(project, element) == "cached"
    bdep_states = cli.get_element_states(project, [element], deps="build")
    for state in bdep_states.values():
        assert state != "cached"
Example #20
0
def open_cross_junction(cli, tmpdir):
    project = prepare_junction_project(cli, tmpdir)
    element = "sub.bst:data.bst"

    oldkey = cli.get_element_key(project, element)

    workspace = tmpdir.join("workspace")
    args = ["workspace", "open", "--directory", str(workspace), element]
    result = cli.run(project=project, args=args)
    result.assert_success()

    assert cli.get_element_state(project, element) == "buildable"
    assert os.path.exists(str(workspace.join("hello.txt")))
    assert cli.get_element_key(project, element) != oldkey

    return project, workspace
Example #21
0
def test_push_split(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    indexshare = os.path.join(str(tmpdir), "indexshare")
    storageshare = os.path.join(str(tmpdir), "storageshare")

    # Set up an artifact cache.
    with create_split_share(indexshare, storageshare) as (index, storage):
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [
                    {
                        "url": index.repo,
                        "push": True,
                        "type": "index"
                    },
                    {
                        "url": storage.repo,
                        "push": True,
                        "type": "storage"
                    },
                ],
            },
            "cachedir": rootcache_dir,
        }
        config_path = str(tmpdir.join("buildstream.conf"))
        _yaml.roundtrip_dump(user_config, file=config_path)

        element_key = _push(cli, rootcache_dir, project_dir, config_path,
                            "target.bst")
        proto = index.get_artifact_proto(
            cli.get_artifact_name(project_dir,
                                  "test",
                                  "target.bst",
                                  cache_key=element_key))
        assert storage.get_cas_files(proto) is not None
Example #22
0
def test_artifact_delete_unbuilt_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # delete it, just in case it's there
    _ = cli.run(project=project, args=["artifact", "delete", element])

    # Ensure the element is not cached
    assert cli.get_element_state(project, element) != "cached"

    # Now try and remove it again (now we know its not there)
    result = cli.run(project=project, args=["artifact", "delete", element])

    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
    expected_err = "WARNING Could not find ref '{}'".format(artifact)
    assert expected_err in result.stderr
Example #23
0
def test_build_checkout_junction(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")
    checkout = os.path.join(cli.directory, "checkout")

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path)

    # Create a stack element to depend on a cross junction element
    #
    element = {
        "kind": "stack",
        "depends": [{
            "junction": "junction.bst",
            "filename": "import-etc.bst"
        }]
    }
    _yaml.roundtrip_dump(element, element_path)

    # Now try to build it, this should automatically result in fetching
    # the junction itself at load time.
    result = cli.run(project=project, args=["build", "junction-dep.bst"])
    result.assert_success()

    # Assert that it's cached now
    assert cli.get_element_state(project, "junction-dep.bst") == "cached"

    # Now check it out
    result = cli.run(project=project,
                     args=[
                         "artifact", "checkout", "junction-dep.bst",
                         "--directory", checkout
                     ])
    result.assert_success()

    # Assert the content of /etc/animal.conf
    filename = os.path.join(checkout, "etc", "animal.conf")
    assert os.path.exists(filename)
    with open(filename, "r", encoding="utf-8") as f:
        contents = f.read()
    assert contents == "animal=Pony\n"
Example #24
0
def test_large_directory(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Number of files chosen to ensure the complete list of digests exceeds
    # our 1 MB gRPC message limit. I.e., test message splitting.
    MAX_MESSAGE_LENGTH = 1024 * 1024
    NUM_FILES = MAX_MESSAGE_LENGTH // 64 + 1

    large_directory_dir = os.path.join(project, "files", "large-directory")
    os.mkdir(large_directory_dir)
    for i in range(NUM_FILES):
        with open(os.path.join(large_directory_dir, str(i)),
                  "w",
                  encoding="utf-8") as f:
            # The files need to have different content as we want different digests.
            f.write(str(i))

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure bst to push to the artifact share
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # Enforce 1 MB gRPC message limit
        with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
            # Build and push
            result = cli.run(project=project,
                             args=["build", "import-large-directory.bst"])
            result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project,
                                     "import-large-directory.bst") == "cached"

        # Assert that the push was successful
        assert_shared(cli, share, project, "import-large-directory.bst")
Example #25
0
def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
    repo = create_repo("tar", str(tmpdir))
    ref = repo.create(os.path.join(str(datafiles), "files"))
    elements_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    input_name = "input.bst"

    project_config = {
        "name": "filter-track-test",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)

    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }

    input_file = os.path.join(elements_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    filter1_config = {"kind": "filter", "depends": [{"filename": input_name, "type": "build"}]}
    filter1_file = os.path.join(elements_dir, "filter1.bst")
    _yaml.roundtrip_dump(filter1_config, filter1_file)

    filter2_config = {"kind": "filter", "depends": [{"filename": input_name, "type": "build"}]}
    filter2_file = os.path.join(elements_dir, "filter2.bst")
    _yaml.roundtrip_dump(filter2_config, filter2_file)

    # Assert that a fetch is needed
    assert cli.get_element_state(project, input_name) == "no reference"

    # Now try to track it
    result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
    result.assert_success()

    # Now check that a ref field exists
    new_input = _yaml.load(input_file, shortname=None)
    source_node = new_input.get_sequence("sources").mapping_at(0)
    new_ref = source_node.get_str("ref")
    assert new_ref == ref
Example #26
0
def test_artifact_expires(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    cli.configure({"cache": {
        "quota": 10000000,
    }})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Our cache should now be almost full. Let's create another
    # artifact and see if we can cause buildstream to delete the old
    # one.
    create_element_size("target2.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
Example #27
0
def test_cleanup_first(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {
        "quota": 10000000,
    }})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 8000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Now configure with a smaller quota, create a situation
    # where the cache must be cleaned up before building anything else.
    #
    # Fix the fetchers and builders just to ensure a predictable
    # sequence of events (although it does not effect this test)
    cli.configure({
        "cache": {
            "quota": 5000000,
        },
        "scheduler": {
            "fetchers": 1,
            "builders": 1
        },
    })

    # Our cache is now more than full, BuildStream
    create_element_size("target2.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
Example #28
0
def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")

    configure_project(project, {"ref-storage": ref_storage})

    # Create a repo to hold the subproject and generate a junction element for it
    ref = generate_junction(tmpdir,
                            subproject_path,
                            junction_path,
                            store_ref=(ref_storage == "inline"))

    # Create a stack element to depend on a cross junction element
    #
    element = {
        "kind": "stack",
        "depends": [{
            "junction": "junction.bst",
            "filename": "import-etc.bst"
        }]
    }
    _yaml.roundtrip_dump(element, element_path)

    # Dump a project.refs if we're using project.refs storage
    #
    if ref_storage == "project.refs":
        project_refs = {"projects": {"test": {"junction.bst": [{"ref": ref}]}}}
        _yaml.roundtrip_dump(project_refs,
                             os.path.join(project, "junction.refs"))

    # Now try to build it, this should automatically result in fetching
    # the junction itself at load time.
    result = cli.run(project=project, args=["build", "junction-dep.bst"])
    result.assert_success()

    # Assert that it's cached now
    assert cli.get_element_state(project, "junction-dep.bst") == "cached"
Example #29
0
def test_junction_element(cli, tmpdir, datafiles, ref_storage):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")

    configure_project(project, {"ref-storage": ref_storage})

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)

    # Create a stack element to depend on a cross junction element
    #
    element = {
        "kind": "stack",
        "depends": [{
            "junction": "junction.bst",
            "filename": "import-etc.bst"
        }]
    }
    _yaml.roundtrip_dump(element, element_path)

    # First demonstrate that showing the pipeline yields an error
    result = cli.run(project=project, args=["show", "junction-dep.bst"])
    result.assert_main_error(ErrorDomain.LOAD,
                             LoadErrorReason.SUBPROJECT_INCONSISTENT)

    # Assert that we have the expected provenance encoded into the error
    element_node = _yaml.load(element_path, shortname="junction-dep.bst")
    ref_node = element_node.get_sequence("depends").mapping_at(0)
    provenance = ref_node.get_provenance()
    assert str(provenance) in result.stderr

    # Now track the junction itself
    result = cli.run(project=project, args=["source", "track", "junction.bst"])
    result.assert_success()

    # Now assert element state (via bst show under the hood) of the dep again
    assert cli.get_element_state(project, "junction-dep.bst") == "waiting"
Example #30
0
def test_push(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        element_key = _push(cli, rootcache_dir, project_dir, user_config_file,
                            "target.bst")
        assert share.get_artifact(
            cli.get_artifact_name(project_dir,
                                  "test",
                                  "target.bst",
                                  cache_key=element_key))