Пример #1
0
    def upload_blob(self, container: Container, filename: FileLike,
                    blob_name: str = None, acl: str = None,
                    meta_data: MetaData = None, content_type: str = None,
                    content_disposition: str = None, cache_control: str = None,
                    chunk_size: int = 1024,
                    extra: ExtraOptions = None) -> Blob:
        meta_data = {} if meta_data is None else meta_data
        extra = {} if extra is None else extra

        blob_name = blob_name or validate_file_or_path(filename)

        if not content_type:
            if isinstance(filename, str):
                content_type = file_content_type(filename)
            else:
                content_type = file_content_type(blob_name)

        if isinstance(filename, str):
            self.client.fput_object(container.name,
                                    blob_name,
                                    filename,
                                    content_type=content_type,
                                    metadata=meta_data)
        else:
            length = extra.pop('length', len(filename.read()))
            filename.seek(0)
            self.client.put_object(container.name,
                                   blob_name,
                                   filename,
                                   length,
                                   content_type=content_type,
                                   metadata=meta_data)
        return self.get_blob(container, blob_name)
def read_in_chunks(file_object: FileLike,
                   block_size: int = 4096) -> Generator[bytes, None, None]:
    """Return a generator which yields data in chunks.

    Source: `read-file-in-chunks-ram-usage-read-strings-from-binary-file
    <https://stackoverflow.com/questions/17056382/
    read-file-in-chunks-ram-usage-read-strings-from-binary-files>`_

    :param file_object: File object to read in chunks.
    :type file_object: file object

    :param block_size: (optional) Chunk size.
    :type block_size: int

    :yield: The next chunk in file object.
    :yield type: `bytes`
    """
    for chunk in iter(lambda: file_object.read(block_size), b''):
        yield chunk