Esempio n. 1
0
def test_update_artifact(tmpdir, files):
    sharedir = os.path.join(str(tmpdir), "share")
    with create_artifact_share(sharedir, casd=True) as share:
        # put files object
        if files == "present":
            directory = re_pb2.Directory()
            digest = share.cas.add_object(buffer=directory.SerializeToString())
        elif files == "invalid":
            digest = share.cas.add_object(
                buffer="abcdefghijklmnop".encode("utf-8"))
        elif files == "absent":
            digest = utils._message_digest("abcdefghijklmnop".encode("utf-8"))

        url = urlparse(share.repo)

        with grpc.insecure_channel("{}:{}".format(url.hostname,
                                                  url.port)) as channel:
            artifact_stub = ArtifactServiceStub(channel)

            # initialise an artifact
            artifact = Artifact()
            artifact.version = 0
            artifact.build_success = True
            artifact.strong_key = "abcdefghijklmnop"
            artifact.files.hash = "hashymchashash"
            artifact.files.size_bytes = 10

            artifact.files.CopyFrom(digest)

            # Put it in the artifact share with an UpdateArtifactRequest
            request = UpdateArtifactRequest()
            request.artifact.CopyFrom(artifact)
            request.cache_key = "a-cache-key"

            # should return the same artifact back
            if files == "present":
                response = artifact_stub.UpdateArtifact(request)
                assert response == artifact
            else:
                try:
                    artifact_stub.UpdateArtifact(request)
                except grpc.RpcError as e:
                    assert e.code() == grpc.StatusCode.FAILED_PRECONDITION
                    if files == "absent":
                        assert e.details(
                        ) == "Artifact files specified but no files found"
                    elif files == "invalid":
                        assert e.details(
                        ) == "Artifact files specified but directory not found"
                    return

            # If we uploaded the artifact check GetArtifact
            request = GetArtifactRequest()
            request.cache_key = "a-cache-key"

            response = artifact_stub.GetArtifact(request)
            assert response == artifact
Esempio n. 2
0
    def _reachable_refs_dir(self, reachable, tree):
        if tree.hash in reachable:
            return

        reachable.add(tree.hash)

        directory = remote_execution_pb2.Directory()

        with open(self.cas.objpath(tree), "rb") as f:
            directory.ParseFromString(f.read())

        for filenode in directory.files:
            if not os.path.exists(self.cas.objpath(filenode.digest)):
                raise FileNotFoundError
            reachable.add(filenode.digest.hash)

        for dirnode in directory.directories:
            self._reachable_refs_dir(reachable, dirnode.digest)
Esempio n. 3
0
def test_pull_tree(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        with dummy_context(config=user_config_file) as context:
            # Load the project and CAS cache
            project = Project(project_dir, context)
            project.ensure_fully_loaded()
            cas = context.get_cascache()

            # Assert that the element's artifact is cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert cli.artifact.is_cached(rootcache_dir, element, element_key)

            # Retrieve the Directory object from the cached artifact
            artifact_digest = cli.artifact.get_digest(rootcache_dir, element,
                                                      element_key)

            artifactcache = context.artifactcache
            # Manually setup the CAS remote
            artifactcache.setup_remotes(use_config=True)
            assert artifactcache.has_push_remotes()

            directory = remote_execution_pb2.Directory()

            with open(cas.objpath(artifact_digest), "rb") as f:
                directory.ParseFromString(f.read())

            # Build the Tree object while we are still cached
            tree = remote_execution_pb2.Tree()
            tree_maker(cas, tree, directory)

            # Push the Tree as a regular message
            tree_digest = artifactcache.push_message(project, tree)
            tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes
            assert tree_hash and tree_size

            # Now delete the artifact locally
            cli.remove_artifact_from_cache(project_dir, "target.bst")

            # Assert that we are not cached locally anymore
            artifactcache.close_grpc_channels()
            cas.close_grpc_channels()
            assert cli.get_element_state(project_dir, "target.bst") != "cached"

            tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
                                                      size_bytes=tree_size)

            # Pull the artifact using the Tree object
            directory_digest = artifactcache.pull_tree(project,
                                                       artifact_digest)
            directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes

        # Directory size now zero with AaaP and stack element commit #1cbc5e63dc
        assert directory_hash and not directory_size

        directory_digest = remote_execution_pb2.Digest(
            hash=directory_hash, size_bytes=directory_size)

        # Ensure the entire Tree stucture has been pulled
        assert os.path.exists(cas.objpath(directory_digest))