Exemplo n.º 1
0
    def upload_blob(self, container: Container, filename: FileLike,
                    blob_name: str = None, acl: str = None,
                    meta_data: MetaData = None, content_type: str = None,
                    content_disposition: str = None, cache_control: str = None,
                    chunk_size: int = 1024,
                    extra: ExtraOptions = None) -> Blob:
        meta_data = {} if meta_data is None else meta_data
        extra = {} if extra is None else extra

        blob_name = blob_name or validate_file_or_path(filename)

        if not content_type:
            if isinstance(filename, str):
                content_type = file_content_type(filename)
            else:
                content_type = file_content_type(blob_name)

        if isinstance(filename, str):
            self.client.fput_object(container.name,
                                    blob_name,
                                    filename,
                                    content_type=content_type,
                                    metadata=meta_data)
        else:
            length = extra.pop('length', len(filename.read()))
            filename.seek(0)
            self.client.put_object(container.name,
                                   blob_name,
                                   filename,
                                   length,
                                   content_type=content_type,
                                   metadata=meta_data)
        return self.get_blob(container, blob_name)
Exemplo n.º 2
0
def file_checksum(filename: FileLike,
                  hash_type: str = 'md5',
                  block_size: int = 4096) -> HASH:
    """Returns checksum for file.

    .. code-block:: python

        from cloudstorage.helpers import file_checksum

        picture_path = '/path/picture.png'
        file_checksum(picture_path, hash_type='sha256')
        # '03ef90ba683795018e541ddfb0ae3e958a359ee70dd4fccc7e747ee29b5df2f8'

    Source: `get-md5-hash-of-big-files-in-python <https://stackoverflow.com/
    questions/1131220/get-md5-hash-of-big-files-in-python>`_

    :param filename: File path or stream.
    :type filename: str or FileLike

    :param hash_type: Hash algorithm function name.
    :type hash_type:  str

    :param block_size: (optional) Chunk size.
    :type block_size: int

    :return: Hash of file.
    :rtype: :class:`_hashlib.HASH`

    :raise RuntimeError: If the hash algorithm is not found in :mod:`hashlib`.

    .. versionchanged:: 0.4
      Returns :class:`_hashlib.HASH` instead of `HASH.hexdigest()`.
    """
    try:
        file_hash = getattr(hashlib, hash_type)()
    except AttributeError:
        raise RuntimeError('Invalid or unsupported hash type: %s' % hash_type)

    if isinstance(filename, str):
        with open(filename, 'rb') as file_:
            for chunk in read_in_chunks(file_, block_size=block_size):
                file_hash.update(chunk)
    else:
        for chunk in read_in_chunks(filename, block_size=block_size):
            file_hash.update(chunk)
        # rewind the stream so it can be re-read later
        if filename.seekable():
            filename.seek(0)

    return file_hash