def upload_blob(self, container: Container, filename: FileLike, blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, cache_control: str = None, chunk_size: int = 1024, extra: ExtraOptions = None) -> Blob: meta_data = {} if meta_data is None else meta_data extra = {} if extra is None else extra blob_name = blob_name or validate_file_or_path(filename) if not content_type: if isinstance(filename, str): content_type = file_content_type(filename) else: content_type = file_content_type(blob_name) if isinstance(filename, str): self.client.fput_object(container.name, blob_name, filename, content_type=content_type, metadata=meta_data) else: length = extra.pop('length', len(filename.read())) filename.seek(0) self.client.put_object(container.name, blob_name, filename, length, content_type=content_type, metadata=meta_data) return self.get_blob(container, blob_name)
def upload_blob( self, container: Container, filename: FileLike, blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, cache_control: str = None, chunk_size: int = 1024, extra: ExtraOptions = None, ) -> Blob: meta_data = {} if meta_data is None else meta_data extra = {} if extra is None else extra extra_args = self._normalize_parameters(extra, self._PUT_OBJECT_KEYS) config = boto3.s3.transfer.TransferConfig(io_chunksize=chunk_size) # Default arguments extra_args.setdefault("Metadata", meta_data) extra_args.setdefault("StorageClass", "STANDARD") if acl: extra_args.setdefault("ACL", acl.lower()) if cache_control: extra_args.setdefault("CacheControl", cache_control) if content_disposition: extra_args["ContentDisposition"] = content_disposition blob_name = blob_name or validate_file_or_path(filename) # Boto uses application/octet-stream by default if not content_type: if isinstance(filename, str): # TODO: QUESTION: Any advantages between filename vs blob_name? extra_args["ContentType"] = file_content_type(filename) else: extra_args["ContentType"] = file_content_type(blob_name) else: extra_args["ContentType"] = content_type logger.debug("extra_args=%s", extra_args) if isinstance(filename, str): self.s3.Bucket(container.name).upload_file(Filename=filename, Key=blob_name, ExtraArgs=extra_args, Config=config) else: self.s3.Bucket(container.name).upload_fileobj(Fileobj=filename, Key=blob_name, ExtraArgs=extra_args, Config=config) return self.get_blob(container, blob_name)
def upload_blob( self, container: Container, filename: FileLike, blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, cache_control: str = None, chunk_size: int = 1024, extra: ExtraOptions = None, ) -> Blob: if acl: logger.warning(messages.OPTION_NOT_SUPPORTED, "acl") if cache_control: logger.warning(messages.OPTION_NOT_SUPPORTED, "cache_control") meta_data = meta_data if meta_data is not None else {} extra = extra if extra is not None else {} extra_args = self._normalize_parameters(extra, self._OBJECT_META_KEYS) # Default arguments extra_args.setdefault("content_encoding", None) blob_name = blob_name or validate_file_or_path(filename) if not content_type: if isinstance(filename, str): content_type = file_content_type(filename) else: content_type = file_content_type(blob_name) if isinstance(filename, str): file_obj = open(filename, "rb") # type: FileLike else: file_obj = filename with file_obj as data: extra_args["data"] = data extra_args["content_type"] = content_type extra_args["content_disposition"] = content_disposition extra_args["cache_control"] = cache_control obj = self.object_store.create_object( container=container.name, name=blob_name, **extra_args) # type: OpenStackObject # Manually set meta data after object upload self._set_object_meta(obj, meta_data) obj = self._get_object(container.name, blob_name) return self._make_blob(container, obj)
def upload_blob(self, container: Container, filename: Union[str, FileLike], blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, chunk_size: int = 1024, extra: ExtraOptions = None) -> Blob: if acl: logger.warning(option_not_supported % 'acl') meta_data = meta_data if meta_data is not None else {} extra = extra if extra is not None else {} extra_norm = self._normalize_parameters(extra, self._OBJECT_META_KEYS) extra_norm.setdefault('content_encoding', None) extra_norm.setdefault('delete_at', None) extra_norm.setdefault('delete_after', None) blob_name = blob_name or validate_file_or_path(filename) if not content_type: if isinstance(filename, str): content_type = file_content_type(filename) else: content_type = file_content_type(blob_name) if isinstance(filename, str): file_obj = open(filename, 'rb') else: file_obj = filename with file_obj as data: # returns an obj but bytes and content-length are empty self.object_store.upload_object(**dict( container=container.name, name=blob_name, data=data, content_type=content_type, content_disposition=content_disposition, content_encoding=extra_norm['content_encoding'], delete_after=extra_norm['delete_after'], delete_at=extra_norm['delete_at'])) # Manually set meta data after object upload obj = self._get_object(container.name, blob_name) self._set_object_meta(obj, meta_data) obj = self._get_object(container.name, blob_name) return self._make_blob(container, obj)
def upload_blob( self, container: Container, filename: FileLike, blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, cache_control: str = None, chunk_size: int = 1024, extra: ExtraOptions = None, ) -> Blob: if acl: logger.info(messages.OPTION_NOT_SUPPORTED, "acl") meta_data = {} if meta_data is None else meta_data extra = extra if extra is not None else {} attributes = self._normalize_parameters(extra, self._PUT_OBJECT_KEYS) attributes.setdefault("meta_data", meta_data) attributes.setdefault("content_disposition", content_disposition) attributes.setdefault("cache_control", cache_control) path = self._get_folder_path(container, validate=True) blob_name = blob_name or validate_file_or_path(filename) blob_path = os.path.join(path, blob_name) base_path = os.path.dirname(blob_path) self._make_path(base_path) tmp_blob_path = f'{blob_path}.tmp' with lock_local_file(blob_path): if isinstance(filename, str): shutil.copy(filename, tmp_blob_path) else: with open(tmp_blob_path, "wb") as blob_file: for data in filename: blob_file.write(data) os.fsync(blob_file.fileno()) os.rename(tmp_blob_path, blob_path) # Disable execute mode on file os.chmod(blob_path, int("664", 8)) if not content_type: attributes["content_type"] = file_content_type(blob_path) else: attributes["content_type"] = content_type # Set meta data and other attributes self._set_file_attributes(blob_path, attributes) return self.get_blob(container, blob_name)
def upload_blob( self, container: Container, filename: FileLike, blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, cache_control: str = None, chunk_size: int = 1024, extra: ExtraOptions = None, ) -> Blob: extra = extra if extra is not None else {} extra_args = self._normalize_parameters(extra, self._PUT_OBJECT_KEYS) extra_args.setdefault("metadata", meta_data) extra_args.setdefault("content_type", content_type) extra_args.setdefault("content_disposition", content_disposition) extra_args.setdefault("cache_control", cache_control) bucket = self._get_bucket(container.name) blob_name = blob_name or validate_file_or_path(filename) blob = bucket.blob(blob_name) # Default Content-Type is application/octet-stream for upload_from_file if not content_type: content_type = file_content_type(blob.name) if isinstance(filename, str): blob.upload_from_filename(filename=filename, content_type=content_type) else: blob.upload_from_file(file_obj=filename, content_type=content_type) if acl: blob.acl.save_predefined(acl) # Google object metadata (Content-Type set above) for attr_name, attr_value in extra_args.items(): if attr_name and hasattr(blob, attr_name): setattr(blob, attr_name, attr_value) blob.patch() return self._make_blob(container, blob)
def upload_blob(self, container: Container, filename: Union[str, FileLike], blob_name: str = None, acl: str = None, meta_data: MetaData = None, content_type: str = None, content_disposition: str = None, chunk_size: int = 1024, extra: ExtraOptions = None) -> Blob: if acl: logger.info(option_not_supported % 'acl') meta_data = {} if meta_data is None else meta_data extra = extra if extra is not None else {} attributes = self._normalize_parameters(extra, self._PUT_OBJECT_KEYS) attributes.setdefault('meta-data', meta_data) attributes.setdefault('content-disposition', content_disposition) path = self._get_folder_path(container, validate=True) blob_name = blob_name or validate_file_or_path(filename) blob_path = os.path.join(path, blob_name) base_path = os.path.dirname(blob_path) self._make_path(base_path) with lock_local_file(blob_path): if isinstance(filename, str): shutil.copy(filename, blob_path) else: with open(blob_path, 'wb') as blob_file: for data in filename: blob_file.write(data) # Disable execute mode on file os.chmod(blob_path, int('664', 8)) if not content_type: attributes['content-type'] = file_content_type(blob_path) else: attributes['content-type'] = content_type # Set meta data and other attributes self._set_file_attributes(blob_path, attributes) return self.get_blob(container, blob_name)
def test_file_content_type(text_filename, binary_stream): assert file_content_type(text_filename) == 'text/plain' assert file_content_type(binary_stream) == 'image/png'