Ejemplo n.º 1
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {"url": share.repo, "push": True},}
        )

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [], int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project, ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        os.mkdir(cli2_path)
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")
Ejemplo n.º 2
0
def test_expiry_order(cli, datafiles):
    project = str(datafiles)
    element_path = "elements"
    checkout = os.path.join(project, "workspace")

    cli.configure({"cache": {"quota": 9000000}})

    # Create an artifact
    create_element_size("dep.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "dep.bst"])
    res.assert_success()

    # Create another artifact
    create_element_size("unrelated.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "unrelated.bst"])
    res.assert_success()

    # And build something else
    create_element_size("target.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "target.bst"])
    res.assert_success()

    create_element_size("target2.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "target2.bst"])
    res.assert_success()

    wait_for_cache_granularity()

    # Now extract dep.bst
    res = cli.run(
        project=project,
        args=["artifact", "checkout", "dep.bst", "--directory", checkout])
    res.assert_success()

    # Finally, build something that will cause the cache to overflow
    create_element_size("expire.bst", project, element_path, [], 2000000)
    res = cli.run(project=project, args=["build", "expire.bst"])
    res.assert_success()

    # While dep.bst was the first element to be created, it should not
    # have been removed.
    # Note that buildstream will reduce the cache to 50% of the
    # original size - we therefore remove multiple elements.
    check_elements = [
        "unrelated.bst", "target.bst", "target2.bst", "dep.bst", "expire.bst"
    ]
    states = cli.get_element_states(project, check_elements)
    assert tuple(states[element] for element in check_elements) == (
        "buildable",
        "buildable",
        "buildable",
        "cached",
        "cached",
    )
Ejemplo n.º 3
0
def test_incremental_partial(cli, datafiles):
    project = str(datafiles)
    workspace = os.path.join(cli.directory, "workspace")
    element_path = os.path.join(project, "elements")
    element_name = "workspace/incremental.bst"

    element = {
        "kind": "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "sources": [{
            "kind": "local",
            "path": "files/workspace-partial"
        }],
        "config": {
            "build-commands": ["make random", "make copy1", "make copy2"]
        },
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # We open a workspace on the above element
    res = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, element_name])
    res.assert_success()

    # Initial (non-incremental) build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Save the random hash
    random_hash = get_buildtree_file_contents(cli, project, element_name,
                                              "random")

    # Verify the expected output files of the initial build
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy1") == "1"
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy2") == "1"

    wait_for_cache_granularity()

    # Delete source1 and replace source2 file contents with '2'
    os.unlink(os.path.join(workspace, "source1"))
    with open(os.path.join(workspace, "source2"), "w", encoding="utf-8") as f:
        f.write("2")

    # Perform incremental build of the workspace
    # This should fail because of the missing source1 file.
    res = cli.run(project=project, args=["build", element_name])
    res.assert_main_error(ErrorDomain.STREAM, None)

    wait_for_cache_granularity()

    # Recreate source1 file
    with open(os.path.join(workspace, "source1"), "w", encoding="utf-8") as f:
        f.write("2")

    # Perform incremental build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Verify that this was an incremental build by comparing the random hash
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "random") == random_hash

    # Verify that both files got rebuilt
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy1") == "2"
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy2") == "2"
Ejemplo n.º 4
0
def test_incremental(cli, datafiles):
    project = str(datafiles)
    workspace = os.path.join(cli.directory, "workspace")
    element_path = os.path.join(project, "elements")
    element_name = "workspace/incremental.bst"

    element = {
        "kind": "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "sources": [{
            "kind": "local",
            "path": "files/workspace-incremental"
        }],
        "config": {
            "build-commands": ["make"]
        },
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # We open a workspace on the above element
    res = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, element_name])
    res.assert_success()

    # Initial (non-incremental) build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Save the random hash
    random_hash = get_buildtree_file_contents(cli, project, element_name,
                                              "random")

    # Verify the expected output file of the initial build
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy") == "1"

    wait_for_cache_granularity()

    # Replace source file contents with '2'
    with open(os.path.join(workspace, "source"), "w", encoding="utf-8") as f:
        f.write("2")

    # Perform incremental build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Verify that this was an incremental build by comparing the random hash
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "random") == random_hash

    # Verify that the output file matches the new source file
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy") == "2"

    wait_for_cache_granularity()

    # Replace source file contents with '3', however, set an old mtime such
    # that `make` will not pick up the change
    with open(os.path.join(workspace, "source"), "w", encoding="utf-8") as f:
        f.write("3")
    os.utime(os.path.join(workspace, "source"),
             (BST_ARBITRARY_TIMESTAMP, BST_ARBITRARY_TIMESTAMP))

    # Perform incremental build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Verify that this was an incremental build by comparing the random hash
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "random") == random_hash

    # Verify that the output file still matches the previous content '2'
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy") == "2"
Ejemplo n.º 5
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project,
                          args=[
                              "artifact", "pull", "element1.bst",
                              "--artifact-remote", share.repo
                          ])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")