コード例 #1
0
def test_abs_symlink(tmpdir):
    cas_dir = os.path.join(str(tmpdir), "cas")
    cas_cache = CASCache(cas_dir,
                         log_directory=os.path.join(str(tmpdir), "logs"))
    try:
        d = CasBasedDirectory(cas_cache)

        Content_to_check = "two step file"
        test_dir = os.path.join(str(tmpdir), "importfrom")
        filesys_discription = [
            ("a", "D", ""),
            ("a/l", "S", "/target"),
            ("target", "D", ""),
            ("target/file", "F", Content_to_check),
        ]
        generate_import_root(test_dir, filesys_discription)
        d.import_files(test_dir)

        digest = d.descend("a", "l",
                           follow_symlinks=True).index["file"].get_digest()

        with open(cas_cache.objpath(digest)) as fp:
            content = fp.read()
        assert Content_to_check == content
    finally:
        cas_cache.release_resources()
コード例 #2
0
def test_descend(tmpdir):
    cas_dir = os.path.join(str(tmpdir), "cas")
    cas_cache = CASCache(cas_dir,
                         log_directory=os.path.join(str(tmpdir), "logs"))
    try:
        d = CasBasedDirectory(cas_cache)

        Content_to_check = "You got me"
        test_dir = os.path.join(str(tmpdir), "importfrom")
        filesys_discription = [("a", "D", ""), ("a/l", "D", ""),
                               ("a/l/g", "F", Content_to_check)]
        generate_import_root(test_dir, filesys_discription)

        d.import_files(test_dir)
        digest = d.descend("a", "l").index["g"].get_digest()

        with open(cas_cache.objpath(digest)) as fp:
            content = fp.read()
        assert Content_to_check == content
    finally:
        cas_cache.release_resources()
コード例 #3
0
class ArtifactShare(BaseArtifactShare):
    def __init__(self, directory, *, quota=None, casd=False, index_only=False):

        # The working directory for the artifact share (in case it
        # needs to do something outside of its backend's storage folder).
        #
        self.directory = os.path.abspath(directory)

        # The directory the actual repo will be stored in.
        #
        # Unless this gets more complicated, just use this directly
        # in tests as a remote artifact push/pull configuration
        #
        self.repodir = os.path.join(self.directory, "repo")
        os.makedirs(self.repodir)

        logdir = os.path.join(self.directory, "logs") if casd else None

        self.cas = CASCache(self.repodir, casd=casd, log_directory=logdir)

        self.quota = quota
        self.index_only = index_only

        super().__init__()

    def _create_server(self):
        return create_server(
            self.repodir,
            quota=self.quota,
            enable_push=True,
            index_only=self.index_only,
        )

    # has_object():
    #
    # Checks whether the object is present in the share
    #
    # Args:
    #    digest (str): The object's digest
    #
    # Returns:
    #    (bool): True if the object exists in the share, otherwise false.
    def has_object(self, digest):

        assert isinstance(digest, remote_execution_pb2.Digest)

        object_path = self.cas.objpath(digest)

        return os.path.exists(object_path)

    def get_artifact_proto(self, artifact_name):
        url = urlparse(self.repo)
        channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
        try:
            fetch_service = remote_asset_pb2_grpc.FetchStub(channel)

            uri = REMOTE_ASSET_ARTIFACT_URN_TEMPLATE.format(artifact_name)

            request = remote_asset_pb2.FetchBlobRequest()
            request.uris.append(uri)

            try:
                response = fetch_service.FetchBlob(request)
            except grpc.RpcError as e:
                if e.code() == grpc.StatusCode.NOT_FOUND:
                    return None
                raise

            if response.status.code != code_pb2.OK:
                return None

            return response.blob_digest
        finally:
            channel.close()

    def get_source_proto(self, source_name):
        url = urlparse(self.repo)
        channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
        try:
            fetch_service = remote_asset_pb2_grpc.FetchStub(channel)

            uri = REMOTE_ASSET_SOURCE_URN_TEMPLATE.format(source_name)

            request = remote_asset_pb2.FetchDirectoryRequest()
            request.uris.append(uri)

            try:
                response = fetch_service.FetchDirectory(request)
            except grpc.RpcError as e:
                if e.code() == grpc.StatusCode.NOT_FOUND:
                    return None
                raise

            if response.status.code != code_pb2.OK:
                return None

            return response.root_directory_digest
        finally:
            channel.close()

    def get_cas_files(self, artifact_proto_digest):

        reachable = set()

        def reachable_dir(digest):
            self.cas._reachable_refs_dir(reachable,
                                         digest,
                                         update_mtime=False,
                                         check_exists=True)

        try:
            artifact_proto_path = self.cas.objpath(artifact_proto_digest)
            if not os.path.exists(artifact_proto_path):
                return None

            artifact_proto = artifact_pb2.Artifact()
            try:
                with open(artifact_proto_path, "rb") as f:
                    artifact_proto.ParseFromString(f.read())
            except FileNotFoundError:
                return None

            if str(artifact_proto.files):
                reachable_dir(artifact_proto.files)

            if str(artifact_proto.buildtree):
                reachable_dir(artifact_proto.buildtree)

            if str(artifact_proto.public_data):
                if not os.path.exists(
                        self.cas.objpath(artifact_proto.public_data)):
                    return None

            for log_file in artifact_proto.logs:
                if not os.path.exists(self.cas.objpath(log_file.digest)):
                    return None

            return artifact_proto.files

        except CASError:
            return None

        except FileNotFoundError:
            return None

    # has_artifact():
    #
    # Checks whether the artifact is present in the share
    #
    # Args:
    #    artifact_name (str): The composed complete artifact name
    #
    # Returns:
    #    (ArtifactProto): artifact digest if the artifact exists in the share, otherwise None.
    def get_artifact(self, artifact_name):
        artifact_proto = self.get_artifact_proto(artifact_name)
        if not artifact_proto:
            return None
        return self.get_cas_files(artifact_proto)

    # close():
    #
    # Remove the artifact share.
    #
    def close(self):
        super().close()

        self.cas.release_resources()

        shutil.rmtree(self.directory)
コード例 #4
0
ファイル: artifactshare.py プロジェクト: cphang99/buildstream
class ArtifactShare(BaseArtifactShare):
    def __init__(self, directory, *, quota=None, casd=False, index_only=False):

        # The working directory for the artifact share (in case it
        # needs to do something outside of its backend's storage folder).
        #
        self.directory = os.path.abspath(directory)

        # The directory the actual repo will be stored in.
        #
        # Unless this gets more complicated, just use this directly
        # in tests as a remote artifact push/pull configuration
        #
        self.repodir = os.path.join(self.directory, "repo")
        os.makedirs(self.repodir)
        self.artifactdir = os.path.join(self.repodir, "artifacts", "refs")
        os.makedirs(self.artifactdir)
        self.sourcedir = os.path.join(self.repodir, "source_protos")
        os.makedirs(self.sourcedir)

        logdir = os.path.join(self.directory, "logs") if casd else None

        self.cas = CASCache(self.repodir, casd=casd, log_directory=logdir)

        self.quota = quota
        self.index_only = index_only

        super().__init__()

    def _create_server(self):
        return create_server(self.repodir, quota=self.quota, enable_push=True, index_only=self.index_only,)

    # has_object():
    #
    # Checks whether the object is present in the share
    #
    # Args:
    #    digest (str): The object's digest
    #
    # Returns:
    #    (bool): True if the object exists in the share, otherwise false.
    def has_object(self, digest):

        assert isinstance(digest, remote_execution_pb2.Digest)

        object_path = self.cas.objpath(digest)

        return os.path.exists(object_path)

    def get_artifact_proto(self, artifact_name):
        artifact_proto = artifact_pb2.Artifact()
        artifact_path = os.path.join(self.artifactdir, artifact_name)

        try:
            with open(artifact_path, "rb") as f:
                artifact_proto.ParseFromString(f.read())
        except FileNotFoundError:
            return None

        return artifact_proto

    def get_source_proto(self, source_name):
        source_proto = source_pb2.Source()
        source_path = os.path.join(self.sourcedir, source_name)

        try:
            with open(source_path, "rb") as f:
                source_proto.ParseFromString(f.read())
        except FileNotFoundError:
            return None

        return source_proto

    def get_cas_files(self, artifact_proto):

        reachable = set()

        def reachable_dir(digest):
            self.cas._reachable_refs_dir(reachable, digest, update_mtime=False, check_exists=True)

        try:
            if str(artifact_proto.files):
                reachable_dir(artifact_proto.files)

            if str(artifact_proto.buildtree):
                reachable_dir(artifact_proto.buildtree)

            if str(artifact_proto.public_data):
                if not os.path.exists(self.cas.objpath(artifact_proto.public_data)):
                    return None

            for log_file in artifact_proto.logs:
                if not os.path.exists(self.cas.objpath(log_file.digest)):
                    return None

            return artifact_proto.files

        except CASError:
            return None

        except FileNotFoundError:
            return None

    # has_artifact():
    #
    # Checks whether the artifact is present in the share
    #
    # Args:
    #    artifact_name (str): The composed complete artifact name
    #
    # Returns:
    #    (ArtifactProto): artifact digest if the artifact exists in the share, otherwise None.
    def get_artifact(self, artifact_name):
        artifact_proto = self.get_artifact_proto(artifact_name)
        if not artifact_proto:
            return None
        return self.get_cas_files(artifact_proto)

    # close():
    #
    # Remove the artifact share.
    #
    def close(self):
        super().close()

        self.cas.release_resources()

        shutil.rmtree(self.directory)