コード例 #1
0
ファイル: remote.py プロジェクト: wjt/buildstream
def test_unique_key(cli, tmpdir, datafiles):
    """This test confirms that the 'filename' parameter is honoured when it comes
    to generating a cache key for the source.
    """
    project = str(datafiles)
    generate_project(project,
                     {"aliases": {
                         "tmpdir": "file:///" + str(tmpdir)
                     }})

    states = cli.get_element_states(
        project,
        ["target.bst", "target-custom.bst", "target-custom-executable.bst"])
    assert states["target.bst"] == "fetch needed"
    assert states["target-custom.bst"] == "fetch needed"
    assert states["target-custom-executable.bst"] == "fetch needed"

    # Try to fetch it
    cli.run(project=project, args=["source", "fetch", "target.bst"])

    # We should download_yaml the file only once
    states = cli.get_element_states(
        project,
        ["target.bst", "target-custom.bst", "target-custom-executable.bst"])
    assert states["target.bst"] == "buildable"
    assert states["target-custom.bst"] == "buildable"
    assert states["target-custom-executable.bst"] == "buildable"

    # But the cache key is different because the 'filename' is different.
    assert (cli.get_element_key(project, "target.bst") != cli.get_element_key(
        project, "target-custom.bst") != cli.get_element_key(
            project, "target-custom-executable.bst"))
コード例 #2
0
ファイル: show.py プロジェクト: wjt/buildstream
def test_strict_dependencies(cli, datafiles, target, expected_state):
    project = str(datafiles)

    # Configure non strict mode, this will have
    # an effect on the build and the `bst show`
    # commands run via cli.get_element_states()
    cli.configure({"projects": {"test": {"strict": False}}})

    result = cli.run(project=project, silent=True, args=["build", target])
    result.assert_success()

    states = cli.get_element_states(project, ["base.bst", target])
    assert states["base.bst"] == "cached"
    assert states[target] == "cached"

    # Now modify the file, effectively causing the common base.bst
    # dependency to change it's cache key
    hello_path = os.path.join(project, "files", "hello.txt")
    with open(hello_path, "w") as f:
        f.write("Goodbye")

    # Now assert that we have the states we expect as a result
    states = cli.get_element_states(project, ["base.bst", target])
    assert states["base.bst"] == "buildable"
    assert states[target] == expected_state
コード例 #3
0
ファイル: fetch.py プロジェクト: jbampton/buildstream
def test_fetch_deps(cli, datafiles, deps, expected_states):
    project = str(datafiles)
    generate_project(project)
    generate_project(project,
                     {"aliases": {
                         "project-root": "file:///" + project
                     }})

    target = "bananas.bst"
    build_dep = "apples.bst"
    runtime_dep = "oranges.bst"

    # Assert that none of the sources are cached
    states = cli.get_element_states(project, [target, build_dep, runtime_dep])
    assert all([state == "fetch needed" for state in states.values()])

    # Now fetch the specified sources
    result = cli.run(project=project,
                     args=["source", "fetch", "--deps", deps, target])
    result.assert_success()

    # Finally assert that we have fetched _only_ the desired sources
    states = cli.get_element_states(project, [target, build_dep, runtime_dep])
    states_flattened = (states[target], states[build_dep], states[runtime_dep])
    assert states_flattened == expected_states
コード例 #4
0
ファイル: pull.py プロジェクト: abderrahim/buildstream
def test_dynamic_build_plan(cli, tmpdir, datafiles):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache directory
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try to rebuild target
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that target and runtime dependency were pulled
        # but build dependency was not pulled as it wasn't needed
        # (dynamic build plan).
        assert target in result.get_pulled_elements()
        assert runtime_dep in result.get_pulled_elements()
        assert build_dep not in result.get_pulled_elements()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        assert states[target] == "cached"
        assert states[runtime_dep] == "cached"
        assert states[build_dep] != "cached"
コード例 #5
0
ファイル: expiry.py プロジェクト: wjt/buildstream
def test_keep_dependencies(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip("Filesystem does not support subsecond mtime precision: {}".format(project))

    cli.configure({"cache": {"quota": 10000000}})

    # Create a pretty big dependency
    create_element_size("dependency.bst", project, element_path, [], 5000000)
    res = cli.run(project=project, args=["build", "dependency.bst"])
    res.assert_success()

    # Now create some other unrelated artifact
    create_element_size("unrelated.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["dependency.bst", "unrelated.bst"])
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] == "cached"

    # We try to build an element which depends on the LRU artifact,
    # and could therefore fail if we didn't make sure dependencies
    # aren't removed.
    #
    # Since some artifact caches may implement weak cache keys by
    # duplicating artifacts (bad!) we need to make this equal in size
    # or smaller than half the size of its dependencies.
    #
    create_element_size("target.bst", project, element_path, ["dependency.bst"], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
    assert states["target.bst"] == "cached"
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] != "cached"
コード例 #6
0
ファイル: expiry.py プロジェクト: wjt/buildstream
def test_cleanup_first(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {"quota": 10000000,}})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 8000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Now configure with a smaller quota, create a situation
    # where the cache must be cleaned up before building anything else.
    #
    # Fix the fetchers and builders just to ensure a predictable
    # sequence of events (although it does not effect this test)
    cli.configure({"cache": {"quota": 5000000,}, "scheduler": {"fetchers": 1, "builders": 1}})

    # Our cache is now more than full, BuildStream
    create_element_size("target2.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
コード例 #7
0
ファイル: default_target.py プロジェクト: wjt/buildstream
def test_default_target(cli, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")

    # First, modify project configuration to set a default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {
            "targets": ["dummy_stack.bst"]
        },
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # dummy_stack only depends on dummy_1 and dummy_2, but not dummy_3
    all_targets = ["dummy_1.bst", "dummy_2.bst", "dummy_stack.bst"]

    result = cli.run(project=project, args=["build"])
    result.assert_success()

    states = cli.get_element_states(project, all_targets)
    assert all(states[e] == "cached" for e in all_targets)

    # assert that dummy_3 isn't included in the output
    assert "dummy_3.bst" not in states
コード例 #8
0
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"
    dep = "compose-all.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_states(project, [element, dep], deps="none") == {
        element: "cached",
        dep: "cached",
    }

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", artifact, dep])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))

    # Check that the dependency ELEMENT is no longer cached
    assert cli.get_element_state(project, dep) != "cached"
コード例 #9
0
def test_artifact_expires(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {
        "quota": 10000000,
    }})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Our cache should now be almost full. Let's create another
    # artifact and see if we can cause buildstream to delete the old
    # one.
    create_element_size("target2.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
コード例 #10
0
ファイル: pull.py プロジェクト: abderrahim/buildstream
def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", "--deps", deps, target])
        result.assert_success()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        states_flattended = (states[target], states[build_dep],
                             states[runtime_dep])
        assert states_flattended == expected_states
コード例 #11
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {"url": share.repo, "push": True},}
        )

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [], int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project, ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        os.mkdir(cli2_path)
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")
コード例 #12
0
def test_artifact_expires(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # Create and build an element of 15 MB
        create_element_size("element1.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        # Create and build an element of 5 MB
        create_element_size("element2.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # check that element's 1 and 2 are cached both locally and remotely
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])

        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Create and build another element of 5 MB (This will exceed the free disk space available)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Ensure it is cached both locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure element1 has been removed from the share
        assert_not_shared(cli, share, project, "element1.bst")
        # Ensure that elemen2 remains
        assert_shared(cli, share, project, "element2.bst")
コード例 #13
0
def test_no_default(cli, datafiles, operation, expected_state):
    project = str(datafiles)
    all_targets = ["dummy_1.bst", "dummy_2.bst", "dummy_3.bst", "dummy_stack.bst"]

    result = cli.run(project=project, args=[operation])
    result.assert_success()

    states = cli.get_element_states(project, all_targets)
    assert all(states[e] == expected_state for e in all_targets)
コード例 #14
0
def test_track_single(cli, tmpdir, datafiles):
    project = str(datafiles)
    dev_files_path = os.path.join(project, "files", "dev-files")
    element_path = os.path.join(project, "elements")
    element_dep_name = "track-test-dep.bst"
    element_target_name = "track-test-target.bst"

    # Create our repo object of the given source type with
    # the dev files, and then collect the initial ref.
    #
    repo = create_repo("git", str(tmpdir))
    repo.create(dev_files_path)

    # Write out our test targets
    generate_element(repo, os.path.join(element_path, element_dep_name))
    generate_element(repo,
                     os.path.join(element_path, element_target_name),
                     dep_name=element_dep_name)

    # Assert that tracking is needed for both elements
    states = cli.get_element_states(project, [element_target_name])
    assert states == {
        element_dep_name: "no reference",
        element_target_name: "no reference",
    }

    # Now first try to track only one element
    result = cli.run(
        project=project,
        args=["source", "track", "--deps", "none", element_target_name])
    result.assert_success()

    # And now fetch it
    result = cli.run(
        project=project,
        args=["source", "fetch", "--deps", "none", element_target_name])
    result.assert_success()

    # Assert that the dependency is waiting and the target has still never been tracked
    states = cli.get_element_states(project, [element_target_name])
    assert states == {
        element_dep_name: "no reference",
        element_target_name: "waiting",
    }
コード例 #15
0
ファイル: default_target.py プロジェクト: wjt/buildstream
def test_no_default(cli, datafiles):
    project = str(datafiles)
    all_targets = [
        "dummy_1.bst", "dummy_2.bst", "dummy_3.bst", "dummy_stack.bst"
    ]

    result = cli.run(project=project, args=["build"])
    result.assert_success()

    states = cli.get_element_states(project, all_targets)
    assert all(states[e] == "cached" for e in all_targets)
コード例 #16
0
def test_expiry_order(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"
    checkout = os.path.join(project, "workspace")

    cli.configure({"cache": {"quota": 9000000}})

    # Create an artifact
    create_element_size("dep.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "dep.bst"])
    res.assert_success()

    # Create another artifact
    create_element_size("unrelated.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # And build something else
    create_element_size("target.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    create_element_size("target2.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    wait_for_cache_granularity()

    # Now extract dep.bst
    res = cli.run(
        project=project,
        args=["artifact", "checkout", "dep.bst", "--directory", checkout])
    res.assert_success()

    # Finally, build something that will cause the cache to overflow
    create_element_size("expire.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "expire.bst"])
    res.assert_success()

    # While dep.bst was the first element to be created, it should not
    # have been removed.
    # Note that buildstream will reduce the cache to 50% of the
    # original size - we therefore remove multiple elements.
    check_elements = [
        "unrelated.bst", "target.bst", "target2.bst", "dep.bst", "expire.bst"
    ]
    states = cli.get_element_states(project, check_elements)
    assert tuple(states[element] for element in check_elements) == (
        "buildable",
        "buildable",
        "buildable",
        "cached",
        "cached",
    )
コード例 #17
0
def test_keep_dependencies(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {"quota": 10000000}})

    # Create a pretty big dependency
    create_element_size("dependency.bst", project, element_path, [], 5000000)
    res = cli.run(project=project, args=["build", "dependency.bst"])
    res.assert_success()

    # Now create some other unrelated artifact
    create_element_size("unrelated.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project,
                                    ["dependency.bst", "unrelated.bst"])
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] == "cached"

    # We try to build an element which depends on the LRU artifact,
    # and could therefore fail if we didn't make sure dependencies
    # aren't removed.
    #
    # Since some artifact caches may implement weak cache keys by
    # duplicating artifacts (bad!) we need to make this equal in size
    # or smaller than half the size of its dependencies.
    #
    create_element_size("target.bst", project, element_path,
                        ["dependency.bst"], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
    assert states["target.bst"] == "cached"
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] != "cached"
コード例 #18
0
def test_build_deps_build(datafiles, cli):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"

    result = cli.run(project=project, args=["build", "--deps", "build", target])
    result.assert_success()

    states = cli.get_element_states(project, [target, build_dep, runtime_dep])
    assert states[build_dep] == "cached"
    assert states[target] == "buildable"
    assert states[runtime_dep] == "buildable"
コード例 #19
0
def test_artifact_delete_elements_build_deps(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Build the element and ensure it's cached
    result = cli.run(project=project, args=["build", element])
    result.assert_success()

    # Assert element and build deps are cached
    assert cli.get_element_state(project, element) == "cached"
    bdep_states = cli.get_element_states(project, [element], deps="build")
    for state in bdep_states.values():
        assert state == "cached"

    result = cli.run(project=project,
                     args=["artifact", "delete", "--deps", "build", element])
    result.assert_success()

    # Assert that the build deps have been deleted and that the artifact remains cached
    assert cli.get_element_state(project, element) == "cached"
    bdep_states = cli.get_element_states(project, [element], deps="build")
    for state in bdep_states.values():
        assert state != "cached"
コード例 #20
0
def test_artifact_too_large(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Mock a file system with 5 MB total space
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(5e6)) as share:

        # Configure bst to push to the remote cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and push a 3MB element
        create_element_size("small_element.bst", project, element_path, [],
                            int(3e6))
        result = cli.run(project=project, args=["build", "small_element.bst"])
        result.assert_success()

        # Create and try to push a 6MB element.
        create_element_size("large_element.bst", project, element_path, [],
                            int(6e6))
        result = cli.run(project=project, args=["build", "large_element.bst"])
        # This should fail; the server will refuse to store the CAS
        # blobs for the artifact, and then fail to find the files for
        # the uploaded artifact proto.
        #
        # FIXME: This should be extremely uncommon in practice, since
        # the artifact needs to be at least half the cache size for
        # this to happen. Nonetheless, a nicer error message would be
        # nice (perhaps we should just disallow uploading artifacts
        # that large).
        result.assert_main_error(ErrorDomain.STREAM, None)

        # Ensure that the small artifact is still in the share
        states = cli.get_element_states(
            project, ["small_element.bst", "large_element.bst"])
        assert states["small_element.bst"] == "cached"
        assert_shared(cli, share, project, "small_element.bst")

        # Ensure that the artifact is cached locally but NOT remotely
        assert states["large_element.bst"] == "cached"
        assert_not_shared(cli, share, project, "large_element.bst")
コード例 #21
0
def test_artifact_delete_artifacts_build_deps(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # get the artifact refs of the build dependencies
    bdep_refs = []
    bdep_states = cli.get_element_states(project, [element], deps="build")
    for bdep in bdep_states.keys():
        bdep_refs.append(
            os.path.join("test", _get_normal_name(bdep),
                         cli.get_element_key(project, bdep)))

    # Assert build dependencies are cached
    for ref in bdep_refs:
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", ref))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", "--deps", "build", artifact])
    result.assert_success()

    # Check that the artifact's build deps are no longer in the cache
    # Assert build dependencies have been deleted and that the artifact remains
    for ref in bdep_refs:
        assert not os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", ref))
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))
コード例 #22
0
def test_no_needless_overwrite(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)
    dev_files_path = os.path.join(project, "files", "dev-files")
    element_path = os.path.join(project, "elements")
    target = "track-test-target.bst"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    # Create our repo object of the given source type with
    # the dev files, and then collect the initial ref.
    #
    repo = create_repo("git", str(tmpdir))
    repo.create(dev_files_path)

    # Write out our test target and assert it exists
    generate_element(repo, os.path.join(element_path, target))
    path_to_target = os.path.join(element_path, target)
    assert os.path.exists(path_to_target)
    creation_mtime = os.path.getmtime(path_to_target)

    # Assert tracking is needed
    states = cli.get_element_states(project, [target])
    assert states[target] == "no reference"

    # Perform the track
    result = cli.run(project=project, args=["source", "track", target])
    result.assert_success()

    track1_mtime = os.path.getmtime(path_to_target)

    assert creation_mtime != track1_mtime

    # Now (needlessly) track again
    result = cli.run(project=project, args=["source", "track", target])
    result.assert_success()

    track2_mtime = os.path.getmtime(path_to_target)

    assert track1_mtime == track2_mtime
コード例 #23
0
ファイル: expiry.py プロジェクト: wjt/buildstream
def test_never_delete_required(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip("Filesystem does not support subsecond mtime precision: {}".format(project))

    cli.configure({"cache": {"quota": 10000000}, "scheduler": {"fetchers": 1, "builders": 1}})

    # Create a linear build tree
    create_element_size("dep1.bst", project, element_path, [], 8000000)
    create_element_size("dep2.bst", project, element_path, ["dep1.bst"], 8000000)
    create_element_size("dep3.bst", project, element_path, ["dep2.bst"], 8000000)
    create_element_size("target.bst", project, element_path, ["dep3.bst"], 8000000)

    # Build dep1.bst, which should fit into the cache.
    res = cli.run(project=project, args=["build", "dep1.bst"])
    res.assert_success()

    # We try to build this pipeline, but it's too big for the
    # cache. Since all elements are required, the build should fail.
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_main_error(ErrorDomain.STREAM, None)
    res.assert_task_error(ErrorDomain.CAS, "cache-too-full")

    states = cli.get_element_states(project, ["target.bst"])
    assert states["dep1.bst"] == "cached"
    assert states["dep2.bst"] != "cached"
    assert states["dep3.bst"] != "cached"
    assert states["target.bst"] != "cached"
コード例 #24
0
def test_never_delete_required(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({
        "cache": {
            "quota": 10000000
        },
        "scheduler": {
            "fetchers": 1,
            "builders": 1
        }
    })

    # Create a linear build tree
    create_element_size("dep1.bst", project, element_path, [], 8000000)
    create_element_size("dep2.bst", project, element_path, ["dep1.bst"],
                        8000000)
    create_element_size("dep3.bst", project, element_path, ["dep2.bst"],
                        8000000)
    create_element_size("target.bst", project, element_path, ["dep3.bst"],
                        8000000)

    # Build dep1.bst, which should fit into the cache.
    res = cli.run(project=project, args=["build", "dep1.bst"])
    res.assert_success()

    # We try to build this pipeline, but it's too big for the
    # cache. Since all elements are required, the build should fail.
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_main_error(ErrorDomain.STREAM, None)
    res.assert_task_error(ErrorDomain.CAS, "cache-too-full")

    states = cli.get_element_states(project, ["target.bst"])
    assert states["dep1.bst"] == "cached"
    assert states["dep2.bst"] != "cached"
    assert states["dep3.bst"] != "cached"
    assert states["target.bst"] != "cached"
コード例 #25
0
ファイル: expiry.py プロジェクト: wjt/buildstream
def test_artifact_expires(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip("Filesystem does not support subsecond mtime precision: {}".format(project))

    cli.configure({"cache": {"quota": 10000000,}})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Our cache should now be almost full. Let's create another
    # artifact and see if we can cause buildstream to delete the old
    # one.
    create_element_size("target2.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
コード例 #26
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project,
                          args=[
                              "artifact", "pull", "element1.bst",
                              "--artifact-remote", share.repo
                          ])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")
コード例 #27
0
ファイル: filter.py プロジェクト: wjt/buildstream
def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
    repo = create_repo("git", str(tmpdir))
    ref = repo.create(os.path.join(str(datafiles), "files"))
    elements_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    input_name = "input.bst"
    input2_name = "input2.bst"

    project_config = {
        "name": "filter-track-test",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)

    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }

    input_file = os.path.join(elements_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    input2_config = dict(input_config)
    input2_file = os.path.join(elements_dir, input2_name)
    _yaml.roundtrip_dump(input2_config, input2_file)

    filter1_config = {
        "kind": "filter",
        "depends": [{
            "filename": input_name,
            "type": "build"
        }]
    }
    filter1_file = os.path.join(elements_dir, "filter1.bst")
    _yaml.roundtrip_dump(filter1_config, filter1_file)

    filter2_config = {
        "kind": "filter",
        "depends": [{
            "filename": input2_name,
            "type": "build"
        }]
    }
    filter2_file = os.path.join(elements_dir, "filter2.bst")
    _yaml.roundtrip_dump(filter2_config, filter2_file)

    # Assert that a fetch is needed
    states = cli.get_element_states(project, [input_name, input2_name])
    assert states == {
        input_name: "no reference",
        input2_name: "no reference",
    }

    # Now try to track it
    result = cli.run(project=project,
                     args=[
                         "source", "track", "filter1.bst", "filter2.bst",
                         "--except", input_name
                     ])
    result.assert_success()

    # Now check that a ref field exists
    new_input = _yaml.load(input_file, shortname=None)
    source_node = new_input.get_sequence("sources").mapping_at(0)
    assert "ref" not in source_node

    new_input2 = _yaml.load(input2_file, shortname=None)
    source_node2 = new_input2.get_sequence("sources").mapping_at(0)
    new_ref2 = source_node2.get_str("ref")
    assert new_ref2 == ref