Exemplo n.º 1
0
def test_artifact_too_large(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Mock a file system with 5 MB total space
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(5e6)) as share:

        # Configure bst to push to the remote cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and push a 3MB element
        create_element_size("small_element.bst", project, element_path, [],
                            int(3e6))
        result = cli.run(project=project, args=["build", "small_element.bst"])
        result.assert_success()

        # Create and try to push a 6MB element.
        create_element_size("large_element.bst", project, element_path, [],
                            int(6e6))
        result = cli.run(project=project, args=["build", "large_element.bst"])
        # This should fail; the server will refuse to store the CAS
        # blobs for the artifact, and then fail to find the files for
        # the uploaded artifact proto.
        #
        # FIXME: This should be extremely uncommon in practice, since
        # the artifact needs to be at least half the cache size for
        # this to happen. Nonetheless, a nicer error message would be
        # nice (perhaps we should just disallow uploading artifacts
        # that large).
        result.assert_main_error(ErrorDomain.STREAM, None)

        # Ensure that the small artifact is still in the share
        states = cli.get_element_states(
            project, ["small_element.bst", "large_element.bst"])
        assert states["small_element.bst"] == "cached"
        assert_shared(cli, share, project, "small_element.bst")

        # Ensure that the artifact is cached locally but NOT remotely
        assert states["large_element.bst"] == "cached"
        assert_not_shared(cli, share, project, "large_element.bst")
Exemplo n.º 2
0
def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
    cachedir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_path = os.path.join(project_dir, "elements")

    with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cachedir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        create_element_size("repo.bst", project_dir, element_path, [], 10000)

        res = cli.run(project=project_dir, args=["source", "push", "repo.bst"])
        res.assert_success()
        assert "Pushed source " in res.stderr

        # delete local sources and check it pulls sources, builds
        # and then pushes the artifacts
        shutil.rmtree(os.path.join(cachedir, "cas"))
        shutil.rmtree(os.path.join(cachedir, "sources"))

        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Remote ({}) does not have artifact ".format(
            share.repo) in res.stderr
        assert "Pulled source" in res.stderr
        assert "Caching artifact" in res.stderr
        assert "Pushed artifact" in res.stderr
Exemplo n.º 3
0
def test_cleanup_first(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {
        "quota": 10000000,
    }})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 8000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Now configure with a smaller quota, create a situation
    # where the cache must be cleaned up before building anything else.
    #
    # Fix the fetchers and builders just to ensure a predictable
    # sequence of events (although it does not effect this test)
    cli.configure({
        "cache": {
            "quota": 5000000,
        },
        "scheduler": {
            "fetchers": 1,
            "builders": 1
        },
    })

    # Our cache is now more than full, BuildStream
    create_element_size("target2.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
Exemplo n.º 4
0
def test_artifact_expires(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    cli.configure({"cache": {
        "quota": 10000000,
    }})

    # Create an element that uses almost the entire cache (an empty
    # ostree cache starts at about ~10KiB, so we need a bit of a
    # buffer)
    create_element_size("target.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    assert cli.get_element_state(project, "target.bst") == "cached"

    # Our cache should now be almost full. Let's create another
    # artifact and see if we can cause buildstream to delete the old
    # one.
    create_element_size("target2.bst", project, element_path, [], 6000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project, ["target.bst", "target2.bst"])
    assert states["target.bst"] != "cached"
    assert states["target2.bst"] == "cached"
Exemplo n.º 5
0
def test_never_delete_required(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    cli.configure({
        "cache": {
            "quota": 10000000
        },
        "scheduler": {
            "fetchers": 1,
            "builders": 1
        }
    })

    # Create a linear build tree
    create_element_size("dep1.bst", project, element_path, [], 8000000)
    create_element_size("dep2.bst", project, element_path, ["dep1.bst"],
                        8000000)
    create_element_size("dep3.bst", project, element_path, ["dep2.bst"],
                        8000000)
    create_element_size("target.bst", project, element_path, ["dep3.bst"],
                        8000000)

    # Build dep1.bst, which should fit into the cache.
    res = cli.run(project=project, args=["build", "dep1.bst"])
    res.assert_success()

    # We try to build this pipeline, but it's too big for the
    # cache. Since all elements are required, the build should fail.
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_main_error(ErrorDomain.STREAM, None)
    res.assert_task_error(ErrorDomain.CAS, "cache-too-full")

    states = cli.get_element_states(project, ["target.bst"])
    assert states["dep1.bst"] == "cached"
    assert states["dep2.bst"] != "cached"
    assert states["dep3.bst"] != "cached"
    assert states["target.bst"] != "cached"
Exemplo n.º 6
0
def test_source_artifact_caches(cli, tmpdir, datafiles):
    cachedir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_path = os.path.join(project_dir, "elements")

    with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "url": share.repo,
                "push": True,
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": cachedir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        create_element_size("repo.bst", project_dir, element_path, [], 10000)

        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Pushed source " in res.stderr
        assert "Pushed artifact " in res.stderr

        # delete local sources and artifacts and check it pulls them
        shutil.rmtree(os.path.join(cachedir, "cas"))
        shutil.rmtree(os.path.join(cachedir, "sources"))

        # this should just fetch the artifacts
        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Pulled artifact " in res.stderr
        assert "Pulled source " not in res.stderr
Exemplo n.º 7
0
def test_artifact_too_large(cli, datafiles, size):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip("Filesystem does not support subsecond mtime precision: {}".format(project))

    cli.configure({"cache": {"quota": 400000}})

    # Create an element whose artifact is too large
    create_element_size("target.bst", project, element_path, [], size)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_main_error(ErrorDomain.STREAM, None)
    res.assert_task_error(ErrorDomain.CAS, "cache-too-full")
Exemplo n.º 8
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {"url": share.repo, "push": True},}
        )

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [], int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project, ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        os.mkdir(cli2_path)
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")
Exemplo n.º 9
0
def test_artifact_expires(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # Create and build an element of 15 MB
        create_element_size("element1.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        # Create and build an element of 5 MB
        create_element_size("element2.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # check that element's 1 and 2 are cached both locally and remotely
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])

        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Create and build another element of 5 MB (This will exceed the free disk space available)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Ensure it is cached both locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure element1 has been removed from the share
        assert_not_shared(cli, share, project, "element1.bst")
        # Ensure that elemen2 remains
        assert_shared(cli, share, project, "element2.bst")
Exemplo n.º 10
0
def test_keep_dependencies(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    # Skip this test if we do not have support for subsecond precision mtimes
    #
    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    cli.configure({"cache": {"quota": 10000000}})

    # Create a pretty big dependency
    create_element_size("dependency.bst", project, element_path, [], 5000000)
    res = cli.run(project=project, args=["build", "dependency.bst"])
    res.assert_success()

    # Now create some other unrelated artifact
    create_element_size("unrelated.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project,
                                    ["dependency.bst", "unrelated.bst"])
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] == "cached"

    # We try to build an element which depends on the LRU artifact,
    # and could therefore fail if we didn't make sure dependencies
    # aren't removed.
    #
    # Since some artifact caches may implement weak cache keys by
    # duplicating artifacts (bad!) we need to make this equal in size
    # or smaller than half the size of its dependencies.
    #
    create_element_size("target.bst", project, element_path,
                        ["dependency.bst"], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
    assert states["target.bst"] == "cached"
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] != "cached"
Exemplo n.º 11
0
def test_never_delete_required(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({
        "cache": {
            "quota": 10000000
        },
        "scheduler": {
            "fetchers": 1,
            "builders": 1
        }
    })

    # Create a linear build tree
    create_element_size("dep1.bst", project, element_path, [], 8000000)
    create_element_size("dep2.bst", project, element_path, ["dep1.bst"],
                        8000000)
    create_element_size("dep3.bst", project, element_path, ["dep2.bst"],
                        8000000)
    create_element_size("target.bst", project, element_path, ["dep3.bst"],
                        8000000)

    # Build dep1.bst, which should fit into the cache.
    res = cli.run(project=project, args=["build", "dep1.bst"])
    res.assert_success()

    # We try to build this pipeline, but it's too big for the
    # cache. Since all elements are required, the build should fail.
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_main_error(ErrorDomain.STREAM, None)
    res.assert_task_error(ErrorDomain.CAS, "cache-too-full")

    states = cli.get_element_states(project, ["target.bst"])
    assert states["dep1.bst"] == "cached"
    assert states["dep2.bst"] != "cached"
    assert states["dep3.bst"] != "cached"
    assert states["target.bst"] != "cached"
Exemplo n.º 12
0
def test_keep_dependencies(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"

    cli.configure({"cache": {"quota": 10000000}})

    # Create a pretty big dependency
    create_element_size("dependency.bst", project, element_path, [], 5000000)
    res = cli.run(project=project, args=["build", "dependency.bst"])
    res.assert_success()

    # Now create some other unrelated artifact
    create_element_size("unrelated.bst", project, element_path, [], 4000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # Check that the correct element remains in the cache
    states = cli.get_element_states(project,
                                    ["dependency.bst", "unrelated.bst"])
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] == "cached"

    # We try to build an element which depends on the LRU artifact,
    # and could therefore fail if we didn't make sure dependencies
    # aren't removed.
    #
    # Since some artifact caches may implement weak cache keys by
    # duplicating artifacts (bad!) we need to make this equal in size
    # or smaller than half the size of its dependencies.
    #
    create_element_size("target.bst", project, element_path,
                        ["dependency.bst"], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
    assert states["target.bst"] == "cached"
    assert states["dependency.bst"] == "cached"
    assert states["unrelated.bst"] != "cached"
Exemplo n.º 13
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project,
                          args=[
                              "artifact", "pull", "element1.bst",
                              "--artifact-remote", share.repo
                          ])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")