Example #1
0
def test_push_after_pull(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(
            str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as share2:

        # Set the scene: share1 has the artifact, share2 does not.
        #
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                ]
            }})

        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        cli.remove_artifact_from_cache(project, "target.bst")

        assert_shared(cli, share1, project, "target.bst")
        assert_not_shared(cli, share2, project, "target.bst")
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now run the build again. Correct `bst build` behaviour is to download the
        # artifact from share1 but not push it back again.
        #
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" not in result.get_pushed_elements()

        # Delete the artifact locally again.
        cli.remove_artifact_from_cache(project, "target.bst")

        # Now we add share2 into the mix as a second push remote. This time,
        # `bst build` should push to share2 after pulling from share1.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                    {
                        "url": share2.repo,
                        "push": True
                    },
                ]
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" in result.get_pushed_elements()
Example #2
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "artifactshare1")) as shareuser, create_artifact_share(
                os.path.join(
                    str(tmpdir),
                    "artifactshare2")) as shareproject, create_artifact_share(
                        os.path.join(str(tmpdir),
                                     "artifactshare3")) as sharecli:

        # Add shareproject repo url to project.conf
        with open(os.path.join(project, "project.conf"), "a") as projconf:
            projconf.write("artifacts:\n  url: {}\n  push: True".format(
                shareproject.repo))

        # Configure shareuser remote in user conf
        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})

        # Push the artifacts to the shareuser and shareproject remotes.
        # Assert that shareuser and shareproject have the artfifacts cached,
        # but sharecli doesn't, then delete locally cached elements
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()
            assert_not_shared(cli, sharecli, project, element_name)
            assert_shared(cli, shareuser, project, element_name)
            assert_shared(cli, shareproject, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a build with cli set as sharecli results in nothing being pulled,
        # as it doesn't have them cached and shareuser/shareproject should be ignored. This
        # will however result in the artifacts being built and pushed to it
        result = cli.run(
            project=project,
            args=["build", "--remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert element_name not in result.get_pulled_elements()
            assert_shared(cli, sharecli, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a clean build with cli set as sharecli should result in artifacts only
        # being pulled from it, as that was provided via the cli and is populated
        result = cli.run(
            project=project,
            args=["build", "--remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert cli.get_element_state(project, element_name) == "cached"
            assert element_name in result.get_pulled_elements()
        assert shareproject.repo not in result.stderr
        assert shareuser.repo not in result.stderr
        assert sharecli.repo in result.stderr
Example #3
0
def test_pull(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        cache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": cache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        # Delete the artifact locally
        cli.remove_artifact_from_cache(project_dir, "target.bst")

        # Assert that we are not cached locally anymore
        assert cli.get_element_state(project_dir, "target.bst") != "cached"

        with dummy_context(config=user_config_file) as context:
            # Load the project
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Assert that the element's artifact is **not** cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert not cli.artifact.is_cached(cache_dir, element, element_key)

            context.cachedir = cache_dir
            context.casdir = os.path.join(cache_dir, "cas")
            context.tmpdir = os.path.join(cache_dir, "tmp")

            # Load the project manually
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Create a local artifact cache handle
            artifactcache = context.artifactcache

            # Manually setup the CAS remote
            artifactcache.setup_remotes(use_config=True)

            assert artifactcache.has_push_remotes(
                plugin=element), "No remote configured for element target.bst"
            assert artifactcache.pull(element,
                                      element_key), "Pull operation failed"

            assert cli.artifact.is_cached(cache_dir, element, element_key)
Example #4
0
def test_pull_tree(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        with dummy_context(config=user_config_file) as context:
            # Load the project and CAS cache
            project = Project(project_dir, context)
            project.ensure_fully_loaded()
            cas = context.get_cascache()

            # Assert that the element's artifact is cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert cli.artifact.is_cached(rootcache_dir, element, element_key)

            # Retrieve the Directory object from the cached artifact
            artifact_digest = cli.artifact.get_digest(rootcache_dir, element,
                                                      element_key)

            artifactcache = context.artifactcache
            # Manually setup the CAS remote
            artifactcache.setup_remotes(use_config=True)
            assert artifactcache.has_push_remotes()

            directory = remote_execution_pb2.Directory()

            with open(cas.objpath(artifact_digest), "rb") as f:
                directory.ParseFromString(f.read())

            # Build the Tree object while we are still cached
            tree = remote_execution_pb2.Tree()
            tree_maker(cas, tree, directory)

            # Push the Tree as a regular message
            tree_digest = artifactcache.push_message(project, tree)
            tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes
            assert tree_hash and tree_size

            # Now delete the artifact locally
            cli.remove_artifact_from_cache(project_dir, "target.bst")

            # Assert that we are not cached locally anymore
            artifactcache.close_grpc_channels()
            cas.close_grpc_channels()
            assert cli.get_element_state(project_dir, "target.bst") != "cached"

            tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
                                                      size_bytes=tree_size)

            # Pull the artifact using the Tree object
            directory_digest = artifactcache.pull_tree(project,
                                                       artifact_digest)
            directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes

        # Directory size now zero with AaaP and stack element commit #1cbc5e63dc
        assert directory_hash and not directory_size

        directory_digest = remote_execution_pb2.Digest(
            hash=directory_hash, size_bytes=directory_size)

        # Ensure the entire Tree stucture has been pulled
        assert os.path.exists(cas.objpath(directory_digest))
Example #5
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure({
            "artifacts": {
                "url": share.repo,
                "push": True
            },
        })

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Remove element1 from the local cache
        cli.remove_artifact_from_cache(project, "element1.bst")
        assert cli.get_element_state(project, "element1.bst") != "cached"

        # Pull the element1 from the remote cache (this should update its mtime)
        result = cli.run(
            project=project,
            args=["artifact", "pull", "element1.bst", "--remote", share.repo])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")