Beispiel #1
0
def test_push_missing_source_after_build(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_name = "import-bin.bst"

    res = cli.run(project=project_dir, args=["build", element_name])
    res.assert_success()

    # Delete source but keep artifact in cache
    shutil.rmtree(os.path.join(cache_dir, "elementsources"))
    shutil.rmtree(os.path.join(cache_dir, "source_protos"))

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        res = cli.run(project=project_dir,
                      args=["source", "push", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) in res.stderr
        assert "Pushed source" in res.stderr
Beispiel #2
0
def test_artifact_delete_artifact(cli, tmpdir, datafiles, with_project):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the project.conf if we're going to try this without a project
    if not with_project:
        os.remove(os.path.join(project, "project.conf"))

    # Delete the artifact
    result = cli.run(project=project, args=["artifact", "delete", artifact])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(
        os.path.join(local_cache, "cas", "refs", "heads", artifact))
Beispiel #3
0
def test_strict_dependencies(cli, datafiles, target, expected_state):
    project = str(datafiles)

    # Configure non strict mode, this will have
    # an effect on the build and the `bst show`
    # commands run via cli.get_element_states()
    cli.configure({"projects": {"test": {"strict": False}}})

    result = cli.run(project=project, silent=True, args=["build", target])
    result.assert_success()

    states = cli.get_element_states(project, ["base.bst", target])
    assert states["base.bst"] == "cached"
    assert states[target] == "cached"

    # Now modify the file, effectively causing the common base.bst
    # dependency to change it's cache key
    hello_path = os.path.join(project, "files", "hello.txt")
    with open(hello_path, "w", encoding="utf-8") as f:
        f.write("Goodbye")

    # Now assert that we have the states we expect as a result
    states = cli.get_element_states(project, ["base.bst", target])
    assert states["base.bst"] == "buildable"
    assert states[target] == expected_state
def test_shell_pull_cached_buildtree(share_with_buildtrees, datafiles, cli,
                                     pull_deps, pull_buildtree):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure(
        {"artifacts": {
            "servers": [{
                "url": share_with_buildtrees.repo
            }]
        }})

    # Optionally pull the buildtree along with `bst artifact pull`
    maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree)

    # Run the shell and request that required artifacts and buildtrees should be pulled
    result = cli.run(
        project=project,
        args=[
            "--pull-buildtrees",
            "shell",
            "--build",
            element_name,
            "--use-buildtree",
            "--",
            "cat",
            "test",
        ],
    )

    # In this case, we should succeed every time, regardless of what was
    # originally available in the local cache.
    #
    result.assert_success()
    assert "Hi" in result.output
Beispiel #5
0
def test_unfetched_junction(cli, tmpdir, datafiles, strict, ref_storage):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")

    configure_project(project, {"ref-storage": ref_storage})
    cli.configure({"projects": {"test": {"strict": strict}}})

    # Create a repo to hold the subproject and generate a junction element for it
    ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))

    # Create a stack element to depend on a cross junction element
    #
    element = {"kind": "stack", "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}]}
    _yaml.roundtrip_dump(element, element_path)

    # Dump a project.refs if we're using project.refs storage
    #
    if ref_storage == "project.refs":
        project_refs = {"projects": {"test": {"junction.bst": [{"ref": ref}]}}}
        _yaml.roundtrip_dump(project_refs, os.path.join(project, "junction.refs"))

    # Now try to fetch it, this should automatically result in fetching
    # the junction itself.
    result = cli.run(project=project, args=["source", "fetch", "junction-dep.bst"])
    result.assert_success()
Beispiel #6
0
def test_build_push_source_twice(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_name = "import-bin.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        res = cli.run(project=project_dir, args=["build", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) in res.stderr
        assert "Pushed source" in res.stderr

        # The second build pipeline is a no-op as everything is already cached.
        # However, this verifies that the pipeline behaves as expected.
        res = cli.run(project=project_dir, args=["build", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) not in res.stderr
        assert "Pushed source" not in res.stderr
Beispiel #7
0
def test_max_jobs(cli, datafiles, cli_value, config_value):
    project = str(datafiles)
    target = "target.bst"

    # Specify `--max-jobs` if this test sets it
    args = []
    if cli_value is not None:
        args += ["--max-jobs", cli_value]
    args += ["show", "--deps", "none", "--format", "%{vars}", target]

    # Specify `max-jobs` in user configuration if this test sets it
    if config_value is not None:
        cli.configure({"build": {"max-jobs": config_value}})

    result = cli.run(project=project, silent=True, args=args)
    result.assert_success()
    loaded = _yaml.load_data(result.output)
    loaded_value = loaded.get_int("max-jobs")

    # We expect the value provided on the command line to take
    # precedence over the configuration file value, if specified.
    #
    # If neither are specified then we expect the default
    expected_value = cli_value or config_value or "0"

    if expected_value == "0":
        # If we are expecting the automatic behavior of using the maximum
        # number of cores available, just check that it is a value > 0
        assert loaded_value > 0, "Automatic setting of max-jobs didnt work"
    else:
        # Check that we got the explicitly set value
        assert loaded_value == int(expected_value)
Beispiel #8
0
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache,
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete the artifact from the local cache
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"

        result = cli.run(project=project, args=["artifact", "show", element])
        result.assert_success()
        assert "available {}".format(element) in result.output
def test_shell_use_cached_buildtree(share_with_buildtrees, datafiles, cli,
                                    pull_deps, pull_buildtree, expect_error):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure(
        {"artifacts": {
            "servers": [{
                "url": share_with_buildtrees.repo
            }]
        }})

    # Optionally pull the buildtree along with `bst artifact pull`
    maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree)

    # Disable access to the artifact server after pulling, so that `bst shell` cannot automatically
    # pull the missing bits, this should be equivalent to the missing bits being missing in a
    # remote server
    cli.configure({"artifacts": {}})

    # Run the shell without asking it to pull any buildtree, just asking to use a buildtree
    result = cli.run(project=project,
                     args=[
                         "shell", "--build", element_name, "--use-buildtree",
                         "--", "cat", "test"
                     ])

    if expect_error:
        result.assert_main_error(ErrorDomain.APP, expect_error)
    else:
        result.assert_success()
        assert "Hi" in result.output
Beispiel #10
0
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"
    dep = "compose-all.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_states(project, [element, dep], deps="none") == {
        element: "cached",
        dep: "cached",
    }

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", artifact, dep])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))

    # Check that the dependency ELEMENT is no longer cached
    assert cli.get_element_state(project, dep) != "cached"
Beispiel #11
0
def test_custom_logging(cli, tmpdir, datafiles):
    project = str(datafiles)
    bin_files_path = os.path.join(project, "files", "bin-files")
    element_path = os.path.join(project, "elements")
    element_name = "fetch-test-git.bst"

    custom_log_format = "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},%{key},%{element},%{action},%{message}"
    user_config = {"logging": {"message-format": custom_log_format}}
    cli.configure(user_config)

    # Create our repo object of the given source type with
    # the bin files, and then collect the initial ref.
    #
    repo = create_repo("tar", str(tmpdir))
    ref = repo.create(bin_files_path)

    # Write out our test target
    element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # Now try to fetch it
    result = cli.run(project=project, args=["source", "fetch", element_name])
    result.assert_success()

    m = re.search(
        r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*" r",SUCCESS,Query cache",
        result.stderr,
    )
    assert m is not None
Beispiel #12
0
def test_pull_missing_local_blob(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)
    repo = create_repo("tar", str(tmpdir))
    repo.create(os.path.join(str(datafiles), "files"))
    element_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    project_config = {
        "name": "pull-missing-local-blob",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)
    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }
    input_name = "input.bst"
    input_file = os.path.join(element_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    depends_name = "depends.bst"
    depends_config = {"kind": "stack", "depends": [input_name]}
    depends_file = os.path.join(element_dir, depends_name)
    _yaml.roundtrip_dump(depends_config, depends_file)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the import-bin element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["source", "track", input_name])
        result.assert_success()
        result = cli.run(project=project, args=["build", input_name])
        result.assert_success()
        assert cli.get_element_state(project, input_name) == "cached"

        # Delete a file blob from the local cache.
        # This is a placeholder to test partial CAS handling until we support
        # partial artifact pulling (or blob-based CAS expiry).
        #
        digest = utils.sha256sum(
            os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
        objpath = os.path.join(cli.directory, "cas", "objects", digest[:2],
                               digest[2:])
        os.unlink(objpath)

        # Now try bst build
        result = cli.run(project=project, args=["build", depends_name])
        result.assert_success()

        # Assert that the import-bin artifact was pulled (completing the partial artifact)
        assert result.get_pulled_elements() == [input_name]
Beispiel #13
0
def test_shell_use_uncached_buildtree(share_without_buildtrees, datafiles,
                                      cli):
    project = str(datafiles)
    element_name = "build-shell/buildtree.bst"

    cli.configure(
        {"artifacts": {
            "servers": [{
                "url": share_without_buildtrees.repo
            }]
        }})

    # Pull everything we would need
    maybe_pull_deps(cli, project, element_name, "all", True)

    # Run the shell without asking it to pull any buildtree, just asking to use a buildtree
    result = cli.run(project=project,
                     args=[
                         "shell", "--build", element_name, "--use-buildtree",
                         "--", "cat", "test"
                     ])

    # Sorry, a buildtree was never cached for this element
    result.assert_main_error(
        ErrorDomain.APP,
        "missing-buildtree-artifact-created-without-buildtree")
Beispiel #14
0
def test_pull_missing_blob_split_share(cli, tmpdir, datafiles):
    project = str(datafiles)

    indexshare = os.path.join(str(tmpdir), "indexshare")
    storageshare = os.path.join(str(tmpdir), "storageshare")

    with create_split_share(indexshare, storageshare) as (index, storage):
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": index.repo,
                        "push": True,
                        "type": "index"
                    },
                    {
                        "url": storage.repo,
                        "push": True,
                        "type": "storage"
                    },
                ]
            }
        })

        _test_pull_missing_blob(cli, project, index, storage)
Beispiel #15
0
def test_partial_checkout_fail(tmpdir, datafiles, cli):
    project = str(datafiles)
    build_elt = "import-bin.bst"
    checkout_dir = os.path.join(str(tmpdir), "checkout")

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        res = cli.run(project=project,
                      args=[
                          "artifact", "checkout", build_elt, "--directory",
                          checkout_dir
                      ])
        res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
        assert re.findall(
            r"Remote \((\S+)\) does not have artifact (\S+) cached",
            res.stderr)
Beispiel #16
0
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "goodartifactshare")) as good_share, create_artifact_share(
                os.path.join(str(tmpdir), "badartifactshare")) as bad_share:

        # Build the target so we have it cached locally only.
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"

        # Configure the default push location to be bad_share; we will assert that
        # nothing actually gets pushed there.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": bad_share.repo,
                        "push": True
                    },
                ]
            }
        })

        # Now try `bst artifact push` to the good_share.
        result = cli.run(project=project,
                         args=[
                             "artifact", "push", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # Assert that all the artifacts are in the share we pushed
        # to, and not the other.
        assert_shared(cli, good_share, project, "target.bst")
        assert_not_shared(cli, bad_share, project, "target.bst")

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the good_share.
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        result = cli.run(project=project,
                         args=[
                             "artifact", "pull", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
Beispiel #17
0
def test_source_push(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        repo = create_repo("tar", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")
        element_name = "push.bst"
        element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        # get the source object
        with dummy_context(config=user_config_file) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements(["push.bst"])[0]
            element._query_source_cache()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            # check we don't have it in the current cache
            assert not share.get_source_proto(source._get_source_name())

            # build the element, this should fetch and then push the source to the
            # remote
            res = cli.run(project=project_dir, args=["build", "push.bst"])
            res.assert_success()
            assert "Pushed source" in res.stderr

            # check that we've got the remote locally now
            sourcecache = context.sourcecache
            assert sourcecache.contains(source)

            # check that the remote CAS now has it
            digest = sourcecache.export(source)._get_digest()
            assert share.has_object(digest)
Beispiel #18
0
def test_expiry_order(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"
    checkout = os.path.join(project, "workspace")

    cli.configure({"cache": {"quota": 9000000}})

    # Create an artifact
    create_element_size("dep.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "dep.bst"])
    res.assert_success()

    # Create another artifact
    create_element_size("unrelated.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # And build something else
    create_element_size("target.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    create_element_size("target2.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    wait_for_cache_granularity()

    # Now extract dep.bst
    res = cli.run(
        project=project,
        args=["artifact", "checkout", "dep.bst", "--directory", checkout])
    res.assert_success()

    # Finally, build something that will cause the cache to overflow
    create_element_size("expire.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "expire.bst"])
    res.assert_success()

    # While dep.bst was the first element to be created, it should not
    # have been removed.
    # Note that buildstream will reduce the cache to 50% of the
    # original size - we therefore remove multiple elements.
    check_elements = [
        "unrelated.bst", "target.bst", "target2.bst", "dep.bst", "expire.bst"
    ]
    states = cli.get_element_states(project, check_elements)
    assert tuple(states[element] for element in check_elements) == (
        "buildable",
        "buildable",
        "buildable",
        "cached",
        "cached",
    )
Beispiel #19
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "artifactshare1")) as shareuser, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as sharecli:

        # Configure shareuser remote in user conf
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": shareuser.repo,
                    "push": True
                }]
            }
        })

        # Push the artifacts to the shareuser remote.
        # Assert that shareuser has the artfifacts cached, but sharecli doesn't,
        # then delete locally cached elements
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()
            assert_not_shared(cli, sharecli, project, element_name)
            assert_shared(cli, shareuser, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a build with cli set as sharecli results in nothing being pulled,
        # as it doesn't have them cached and shareuser should be ignored. This
        # will however result in the artifacts being built and pushed to it
        result = cli.run(
            project=project,
            args=["build", "--artifact-remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert element_name not in result.get_pulled_elements()
            assert_shared(cli, sharecli, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a clean build with cli set as sharecli should result in artifacts only
        # being pulled from it, as that was provided via the cli and is populated
        result = cli.run(
            project=project,
            args=["build", "--artifact-remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert cli.get_element_state(project, element_name) == "cached"
            assert element_name in result.get_pulled_elements()
        assert shareuser.repo not in result.stderr
        assert sharecli.repo in result.stderr
Beispiel #20
0
def test_invalid_cache_pullbuildtrees(cli, datafiles, value, success):
    project = str(datafiles)

    cli.configure({"cache": {
        "pull-buildtrees": value,
    }})

    res = cli.run(project=project, args=["workspace", "list"])
    if success:
        res.assert_success()
    else:
        res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
Beispiel #21
0
def test_keep_dependencies(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    cli.configure({"cache": {"quota": 10000000}})

    # Create a pretty big dependency
    create_element_size("dependency.bst", project, element_path, [], 5000000)
    res = cli.run(project=project, args=["build", "dependency.bst"])
    res.assert_success()

    # Now create some other unrelated artifact
    create_element_size("unrelated.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project,
                                    ["dependency.bst", "unrelated.bst"])
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] == "cached"

    # We try to build an element which depends on the LRU artifact,
    # and could therefore fail if we didn't make sure dependencies
    # aren't removed.
    #
    # Since some artifact caches may implement weak cache keys by
    # duplicating artifacts (bad!) we need to make this equal in size
    # or smaller than half the size of its dependencies.
    #
    create_element_size("target.bst", project, element_path,
                        ["dependency.bst"], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
    assert states["target.bst"] == "cached"
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] != "cached"
Beispiel #22
0
def test_never_delete_required(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    cli.configure({
        "cache": {
            "quota": 10000000
        },
        "scheduler": {
            "fetchers": 1,
            "builders": 1
        }
    })

    # Create a linear build tree
    create_element_size("dep1.bst", project, element_path, [], 8000000)
    create_element_size("dep2.bst", project, element_path, ["dep1.bst"],
                        8000000)
    create_element_size("dep3.bst", project, element_path, ["dep2.bst"],
                        8000000)
    create_element_size("target.bst", project, element_path, ["dep3.bst"],
                        8000000)

    # Build dep1.bst, which should fit into the cache.
    res = cli.run(project=project, args=["build", "dep1.bst"])
    res.assert_success()

    # We try to build this pipeline, but it's too big for the
    # cache. Since all elements are required, the build should fail.
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_main_error(ErrorDomain.STREAM, None)
    res.assert_task_error(ErrorDomain.CAS, "cache-too-full")

    states = cli.get_element_states(project, ["target.bst"])
    assert states["dep1.bst"] == "cached"
    assert states["dep2.bst"] != "cached"
    assert states["dep3.bst"] != "cached"
    assert states["target.bst"] != "cached"
Beispiel #23
0
def test_dynamic_build_plan(cli, tmpdir, datafiles):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache directory
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try to rebuild target
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that target and runtime dependency were pulled
        # but build dependency was not pulled as it wasn't needed
        # (dynamic build plan).
        assert target in result.get_pulled_elements()
        assert runtime_dep in result.get_pulled_elements()
        assert build_dep not in result.get_pulled_elements()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        assert states[target] == "cached"
        assert states[runtime_dep] == "cached"
        assert states[build_dep] != "cached"
Beispiel #24
0
def default_state(cli, tmpdir, share):
    shutil.rmtree(os.path.join(str(tmpdir), "cas"))
    cli.configure({
        "artifacts": {
            "servers": [{
                "url": share.repo,
                "push": False
            }]
        },
        "cachedir": str(tmpdir),
        "cache": {
            "pull-buildtrees": False
        },
    })
Beispiel #25
0
def test_pull_missing_blob(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        _test_pull_missing_blob(cli, project, share, share)
Beispiel #26
0
def test_pull_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test",
                                    os.path.splitext(element)[0], cache_key)
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Assert that the target is shared (note that assert shared will use the artifact name)
        assert_shared(cli, share, project, element)

        # Now we've pushed, remove the local cache
        shutil.rmtree(os.path.join(local_cache, "artifacts"))

        # Assert that nothing is cached locally anymore
        assert not os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", artifact_ref])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))
Beispiel #27
0
def test_invalid_cache_quota(cli, datafiles, quota, err_domain, err_reason):
    project = str(datafiles)
    os.makedirs(os.path.join(project, "elements"))

    cli.configure({
        "cache": {
            "quota": quota,
        },
    })

    res = cli.run(project=project, args=["workspace", "list"])

    if err_domain == "success":
        res.assert_success()
    else:
        res.assert_main_error(err_domain, err_reason)
Beispiel #28
0
def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", "--deps", deps, target])
        result.assert_success()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        states_flattended = (states[target], states[build_dep],
                             states[runtime_dep])
        assert states_flattended == expected_states
Beispiel #29
0
def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
    cachedir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_path = os.path.join(project_dir, "elements")

    with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cachedir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        create_element_size("repo.bst", project_dir, element_path, [], 10000)

        res = cli.run(project=project_dir, args=["source", "push", "repo.bst"])
        res.assert_success()
        assert "Pushed source " in res.stderr

        # delete local sources and check it pulls sources, builds
        # and then pushes the artifacts
        shutil.rmtree(os.path.join(cachedir, "cas"))
        shutil.rmtree(os.path.join(cachedir, "sources"))

        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Remote ({}) does not have artifact ".format(
            share.repo) in res.stderr
        assert "Pulled source" in res.stderr
        assert "Caching artifact" in res.stderr
        assert "Pushed artifact" in res.stderr
Beispiel #30
0
def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
        result = cli.run(project=project, args=["build", "target.bst"])

        result.assert_success()
        assert not result.get_pulled_elements(
        ), "No elements should have been pulled since the cache was empty"

        assert "INFO    Remote ({}) does not have".format(
            share.repo) in result.stderr
        assert "SKIPPED Pull" in result.stderr