Esempio n. 1
0
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache,
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete the artifact from the local cache
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"

        result = cli.run(project=project, args=["artifact", "show", element])
        result.assert_success()
        assert "available {}".format(element) in result.output
Esempio n. 2
0
def test_push_split(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    indexshare = os.path.join(str(tmpdir), "indexshare")
    storageshare = os.path.join(str(tmpdir), "storageshare")

    # Set up an artifact cache.
    with create_split_share(indexshare, storageshare) as (index, storage):
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [
                    {
                        "url": index.repo,
                        "push": True,
                        "type": "index"
                    },
                    {
                        "url": storage.repo,
                        "push": True,
                        "type": "storage"
                    },
                ],
            },
            "cachedir": rootcache_dir,
        }
        config_path = str(tmpdir.join("buildstream.conf"))
        _yaml.roundtrip_dump(user_config, file=config_path)

        element_key = _push(cli, rootcache_dir, project_dir, config_path,
                            "target.bst")
        proto = index.get_artifact_proto(
            cli.get_artifact_name(project_dir,
                                  "test",
                                  "target.bst",
                                  cache_key=element_key))
        assert storage.get_cas_files(proto) is not None
Esempio n. 3
0
def _test_pull_missing_blob(cli, project, index, storage):
    # First build the target element and push to the remote.
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()
    assert cli.get_element_state(project, "target.bst") == "cached"

    # Assert that everything is now cached in the remote.
    all_elements = [
        "target.bst", "import-bin.bst", "import-dev.bst", "compose-all.bst"
    ]
    for element_name in all_elements:
        project_name = "test"
        artifact_name = cli.get_artifact_name(project, project_name,
                                              element_name)
        artifact_proto = index.get_artifact_proto(artifact_name)
        assert artifact_proto
        assert storage.get_cas_files(artifact_proto)

    # Now we've pushed, delete the user's local artifact cache
    # directory and try to redownload it from the share
    #
    casdir = os.path.join(cli.directory, "cas")
    shutil.rmtree(casdir)
    artifactdir = os.path.join(cli.directory, "artifacts")
    shutil.rmtree(artifactdir)

    # Assert that nothing is cached locally anymore
    for element_name in all_elements:
        assert cli.get_element_state(project, element_name) != "cached"

    # Now delete blobs in the remote without deleting the artifact ref.
    # This simulates scenarios with concurrent artifact expiry.
    remote_objdir = os.path.join(storage.repodir, "cas", "objects")
    shutil.rmtree(remote_objdir)

    # Now try bst build
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    # Assert that no artifacts were pulled
    assert not result.get_pulled_elements()
Esempio n. 4
0
def test_artifact_delete_pulled_artifact_without_buildtree(
        cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete and then pull the artifact (without its buildtree)
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"
        result = cli.run(project=project, args=["artifact", "pull", element])
        result.assert_success()
        assert cli.get_element_state(project, element) == "cached"

        # Now delete it again (it should have been pulled without the buildtree, but
        # a digest of the buildtree is pointed to in the artifact's metadata
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"
Esempio n. 5
0
def test_push(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        element_key = _push(cli, rootcache_dir, project_dir, user_config_file,
                            "target.bst")
        assert share.get_artifact(
            cli.get_artifact_name(project_dir,
                                  "test",
                                  "target.bst",
                                  cache_key=element_key))
Esempio n. 6
0
def create_built_artifact_share(tmpdir, cache_buildtrees, integration_cache):
    element_name = "build-shell/buildtree.bst"

    # Replicate datafiles behavior and do work entirely in the temp directory
    project = os.path.join(tmpdir, "project")
    shutil.copytree(DATA_DIR, project)

    # Create the share to be hosted from this temp directory
    share = ArtifactShare(os.path.join(tmpdir, "artifactcache"))

    # Create a Cli instance to build and populate the share
    cli = Cli(os.path.join(tmpdir, "cache"))
    cli.configure({
        "artifacts": {
            "servers": [{
                "url": share.repo,
                "push": True
            }]
        },
        "sourcedir": integration_cache.sources
    })

    # Optionally cache build trees
    args = []
    if cache_buildtrees:
        args += ["--cache-buildtrees", "always"]
    args += ["build", element_name]

    # Build
    result = cli.run(project=project, args=args)
    result.assert_success()

    # Assert that the artifact is indeed in the share
    assert cli.get_element_state(project, element_name) == "cached"
    artifact_name = cli.get_artifact_name(project, "test", element_name)
    assert share.get_artifact(artifact_name)

    return share
Esempio n. 7
0
def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
    project = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # Build the element to push it to cache, and explicitly configure local cache so we can check it
        local_cache = os.path.join(str(tmpdir), "cache")
        cli.configure({
            "cachedir": local_cache,
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }
        })

        # Build it
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        # Assert it is cached locally and remotely
        assert cli.get_element_state(project, "target.bst") == "cached"
        assert share.get_artifact(
            cli.get_artifact_name(project, "test", "target.bst"))

        # Obtain the artifact name for pulling purposes
        artifact_name = cli.get_artifact_name(project, "test", "target.bst")

        # Translate the expected element names into artifact names
        expect_cached_artifacts = [
            cli.get_artifact_name(project, "test", element_name)
            for element_name in expect_cached
        ]

        # Discard the local cache
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Delete the project.conf if we're going to try this without a project
        if not with_project:
            os.remove(os.path.join(project, "project.conf"))

        # Now run our pull test
        result = cli.run(
            project=project,
            args=["artifact", "pull", "--deps", deps, artifact_name])

        if deps in ["all", "run"]:
            result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported")
        else:
            result.assert_success()

        # After pulling, assert that we have the expected elements cached again.
        #
        # Note that we do not use cli.get_element_states() here because the project.conf
        # might not be present, so we poke at the cache directly for this assertion.
        for expect in expect_cached_artifacts:
            assert os.path.exists(
                os.path.join(local_cache, "artifacts", "refs", expect))
Esempio n. 8
0
def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist,
                  with_project):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Build the element to push it to cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        # Build it
        result = cli.run(project=project, args=["build", "target-import.bst"])
        result.assert_success()

        # Assert it is cached locally and remotely
        assert cli.get_element_state(project, "target-import.bst") == "cached"
        assert share.get_artifact(
            cli.get_artifact_name(project, "test", "target-import.bst"))

        # Obtain the artifact name for pulling purposes
        artifact_name = cli.get_artifact_name(project, "test",
                                              "target-import.bst")

        # Discard the local cache
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
        shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
        assert cli.get_element_state(project, "target-import.bst") != "cached"

        # Delete the project.conf if we're going to try this without a project
        if not with_project:
            os.remove(os.path.join(project, "project.conf"))

        # Now checkout the artifact
        result = cli.run(
            project=project,
            args=[
                "artifact", "checkout", "--directory", checkout, "--deps",
                deps, artifact_name
            ],
        )

        if deps in ["all", "run"]:
            result.assert_main_error(ErrorDomain.STREAM, "deps-not-supported")
        else:
            result.assert_success()

        # After checkout, assert that we have the expected files and assert that
        # we don't have any of the unexpected files.
        #
        for expect in expect_exist:
            filename = os.path.join(checkout, expect)
            assert os.path.exists(filename)

        for expect in expect_noexist:
            filename = os.path.join(checkout, expect)
            assert not os.path.exists(filename)
Esempio n. 9
0
def test_pull(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        cache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        # Delete the artifact locally
        cli.remove_artifact_from_cache(project_dir, "target.bst")

        # Assert that we are not cached locally anymore
        assert cli.get_element_state(project_dir, "target.bst") != "cached"

        with dummy_context(config=user_config_file) as context:
            # Load the project
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Assert that the element's artifact is **not** cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert not cli.artifact.is_cached(cache_dir, element, element_key)

            context.cachedir = cache_dir
            context.casdir = os.path.join(cache_dir, "cas")
            context.tmpdir = os.path.join(cache_dir, "tmp")

            # Load the project manually
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Create a local artifact cache handle
            artifactcache = context.artifactcache

            # Initialize remotes
            context.initialize_remotes(True, True, None, None)

            assert artifactcache.has_push_remotes(
                plugin=element), "No remote configured for element target.bst"
            assert artifactcache.pull(element,
                                      element_key), "Pull operation failed"

            assert cli.artifact.is_cached(cache_dir, element, element_key)
Esempio n. 10
0
def test_pull_tree(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        with dummy_context(config=user_config_file) as context:
            # Load the project and CAS cache
            project = Project(project_dir, context)
            project.ensure_fully_loaded()
            cas = context.get_cascache()

            # Assert that the element's artifact is cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert cli.artifact.is_cached(rootcache_dir, element, element_key)

            # Retrieve the Directory object from the cached artifact
            artifact_digest = cli.artifact.get_digest(rootcache_dir, element,
                                                      element_key)

            # Initialize remotes
            context.initialize_remotes(True, True, None, None)

            artifactcache = context.artifactcache
            assert artifactcache.has_push_remotes()

            directory = remote_execution_pb2.Directory()

            with open(cas.objpath(artifact_digest), "rb") as f:
                directory.ParseFromString(f.read())

            # Build the Tree object while we are still cached
            tree = remote_execution_pb2.Tree()
            tree_maker(cas, tree, directory)

            # Push the Tree as a regular message
            tree_digest = artifactcache.push_message(project, tree)
            tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes
            assert tree_hash and tree_size

            # Now delete the artifact locally
            cli.remove_artifact_from_cache(project_dir, "target.bst")

            # Assert that we are not cached locally anymore
            artifactcache.release_resources()
            cas._casd_channel.request_shutdown()
            cas.close_grpc_channels()
            assert cli.get_element_state(project_dir, "target.bst") != "cached"

            tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
                                                      size_bytes=tree_size)

            # Pull the artifact using the Tree object
            directory_digest = artifactcache.pull_tree(project,
                                                       artifact_digest)
            directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes

        # Directory size now zero with AaaP and stack element commit #1cbc5e63dc
        assert directory_hash and not directory_size

        directory_digest = remote_execution_pb2.Digest(
            hash=directory_hash, size_bytes=directory_size)

        # Ensure the entire Tree stucture has been pulled
        assert os.path.exists(cas.objpath(directory_digest))