def _make_blob(self, container: Container, object_name: str) -> Blob: """Convert local file name to a Cloud Storage Blob. :param container: Container instance. :type container: :class:`.Container` :param object_name: Filename. :type object_name: str :return: Blob instance. :rtype: :class:`.Blob` """ full_path = os.path.join(self.base_path, container.name, object_name) object_path = pathlib.Path(full_path) try: stat = os.stat(str(object_path)) except FileNotFoundError: raise NotFoundError(messages.BLOB_NOT_FOUND % (object_name, container.name)) meta_data = {} content_type = None content_disposition = None cache_control = None try: attributes = xattr.xattr(full_path) for attr_key, attr_value in attributes.items(): value_str = None try: value_str = attr_value.decode('utf-8') except UnicodeDecodeError: pass if attr_key.startswith(self._OBJECT_META_PREFIX + 'metadata'): meta_key = attr_key.split('.')[-1] meta_data[meta_key] = value_str elif attr_key.endswith('content_type'): content_type = value_str elif attr_key.endswith('content_disposition'): content_disposition = value_str elif attr_key.endswith('cache_control'): cache_control = value_str else: logger.warning("Unknown file attribute '%s'", attr_key) except OSError: logger.warning(messages.LOCAL_NO_ATTRIBUTES) # TODO: QUESTION: Option to disable checksum for large files? # TODO: QUESTION: Save a .hash file for each file? file_hash = file_checksum(full_path, hash_type=self.hash_type) checksum = file_hash.hexdigest() etag = hashlib.sha1(full_path.encode('utf-8')).hexdigest() created_at = datetime.fromtimestamp(stat.st_ctime, timezone.utc) modified_at = datetime.fromtimestamp(stat.st_mtime, timezone.utc) return Blob(name=object_path.name, checksum=checksum, etag=etag, size=stat.st_size, container=container, driver=self, acl=None, meta_data=meta_data, content_disposition=content_disposition, content_type=content_type, cache_control=cache_control, created_at=created_at, modified_at=modified_at)