示例#1
0
class BOSStorage(BcsStorage):
    bucket_name = BCS_BUCKET['BUCKET_NAME']
    """
    这是一个支持BOS和本地django的FileStorage基类
    修改存储文件的路径和基本url
    """
    def __init__(self,
                 location=settings.MEDIA_URL,
                 base_url=settings.MEDIA_URL):
        super(BOSStorage, self).__init__(location, base_url)
        config = BceClientConfiguration(credentials=BceCredentials(
            BCS_BUCKET['AK'], BCS_BUCKET['SK']),
                                        endpoint=BCS_BUCKET['END_POINT'])
        self.bos_client = BosClient(config)
        # check if bucket exists
        if not self.bos_client.does_bucket_exist(self.bucket_name):
            self.bos_client.create_bucket(self.bucket_name)

    def saveToBucket(self, name, content):
        data = None
        if hasattr(content, '_get_file'):  # admin entry
            data = content._get_file().read()
        elif isinstance(content, (ContentFile)):  # view entry(ContentFile)
            data = content.read()
        else:
            data = content
        md5 = hashlib.md5()
        md5.update(data)
        md5value = base64.standard_b64encode(md5.digest())
        self.bos_client.put_object(self.bucket_name, name, data, len(data),
                                   md5value)

    def delete(self, name):
        """
        Delete a file from bos.
        """
        self.bos_client.delete_object(self.bucket_name, name)
示例#2
0
GENIOUS_BUCKET = 'genious'


def md5_obj(fp):
    buf_size = 8192
    md5 = hashlib.md5()
    while True:
        bytes_to_read = buf_size
        buf = fp.read(bytes_to_read)
        if not buf:
            break
        md5.update(buf)
    content_md5 = base64.standard_b64encode(md5.digest())
    return content_md5


if __name__ == '__main__':
    fp = open(
        "E:\workspace\pycharm\Exercise\spider\chrome_plugin\Clear Cache.crx",
        'rb')
    getsize = os.path.getsize(
        "E:\workspace\pycharm\Exercise\spider\chrome_plugin\Clear Cache.crx")
    bs = bytearray(getsize)
    readinto = fp.readinto(bs)
    print(getsize)
    bos_client.put_object(
        "genious", "chrome/crx/11.crx", io.BytesIO(bs), getsize,
        md5_obj(
            "E:\workspace\pycharm\Exercise\spider\chrome_plugin\Clear Cache.crx"
        ))
示例#3
0
#不重试
config.retry_policy = NoRetryPolicy()
'''
bos_client = BosClient(config)

if not bos_client.does_bucket_exist(bucket_name):
    bos_client.create_bucket(bucket_name)
    #设置私有权限,只有owner有读写权限,其他人无权限
    bos_client.set_bucket_canned_acl(bucket_name, canned_acl.PRIVATE)

response = bos_client.list_buckets()
owner = response.owner
print 'user id:%s, user name:%s' % (owner.id, owner.display_name)
for bucket in response.buckets:
    print bucket.name

#查看bucket_name所属区域
print bos_client.get_bucket_location(bucket_name)

#bos_client.delete_bucket(bucket_name) #bucket不为空则删除失败
'''
#上传
bos_client.put_object(bucket_name, object_key, data) #data为流对象
bos_client.put_object_from_string(bucket_name, object_key, string) #string为字符串对象
bos_client.put_object_from_file(bucket_name, object_key, file_name) #file_name为文件
#查看bucket中的object列表
response = bos_client.list_objects(bucket_name)
for object in response.contents:
        print object.key
'''
示例#4
0
class BaiduBos:
    def __init__(self, bucket):
        self._bucket = bucket
        self._bos_client = BosClient(_config)

    def upload_file(self,
                    fn,
                    key,
                    get_url=False,
                    absent=True,
                    expiration_in_seconds=-1):
        """
        上传文件,如果文件超过25兆,将采用分块上传
        如果key已存在,则返回key的url
        :param fn:
        :param key:
        :param get_url:是否需要获取key对应的url
        :param absent: True时,如果bos已存在该文件  则不上传
        :return:
        """
        exists = False
        if absent:
            for obj in self._bos_client.list_all_objects(self._bucket):
                if obj.key == key:
                    LOGGER.warning(
                        "the key '{0}' has already existed, upload canceled".
                        format(key))
                    exists = True
                    break
        if not exists:
            fs = os.path.getsize(fn)
            with open(fn, mode='rb') as f:
                if fs > 25 * 1024 * 1024:
                    self._multipart_upload(fn, key)
                else:
                    self._bos_client.put_object(self._bucket, key, f, fs,
                                                self.md5_file(fn))
        if get_url:
            url = self._bos_client.generate_pre_signed_url(
                self._bucket, key, expiration_in_seconds=expiration_in_seconds)
            return url.decode("utf-8")
        return None

    def upload_bytes(self,
                     byte_arr,
                     key,
                     get_url=False,
                     absent=True,
                     expiration_in_seconds=-1):
        """
        上传字节
        如果key已存在,则返回key的url
        :param byte_arr:
        :param key:
        :return:
        """
        exists = False
        if absent:
            for obj in self._bos_client.list_all_objects(self._bucket):
                if obj.key == key:
                    LOGGER.warning(
                        "the key '{0}' has already existed, upload canceled".
                        format(key))
                    exists = True
                    break
        if not exists:
            self._bos_client.put_object(GENIOUS_BUCKET, key,
                                        io.BytesIO(byte_arr), len(byte_arr),
                                        self.md5_obj(byte_arr))
        if get_url:
            url = self._bos_client.generate_pre_signed_url(
                self._bucket, key, expiration_in_seconds=expiration_in_seconds)
            return url.decode("utf-8")
        return None

    def _multipart_upload(self, fn, key):
        """
        文件分块上传
        如果key已存在,则返回key的url
        :arg key
        :arg fn
        """
        upload_id = self._bos_client.initiate_multipart_upload(
            GENIOUS_BUCKET, key).upload_id
        left_size = os.path.getsize(fn)
        # left_size用于设置分块开始位置
        # 设置分块的开始偏移位置
        offset = 0

        part_number = 1
        part_list = []
        index = 0
        while left_size > 0:
            # 设置每块为5MB
            part_size = 5 * 1024 * 1024
            if left_size < part_size:
                part_size = left_size

            response = self._bos_client.upload_part_from_file(
                GENIOUS_BUCKET, key, upload_id, part_number, part_size, fn,
                offset)
            index += 1
            print(index)
            left_size -= part_size
            offset += part_size
            part_list.append({
                "partNumber": part_number,
                "eTag": response.metadata.etag
            })

            part_number += 1
        location = self._bos_client.complete_multipart_upload(
            GENIOUS_BUCKET, key, upload_id, part_list)
        print(location.location)
        return location

    def md5_file(self, fn):
        buf_size = 8192
        md5 = hashlib.md5()
        with open(fn, mode='rb') as fp:
            while True:
                bytes_to_read = buf_size
                buf = fp.read(bytes_to_read)
                if not buf:
                    break
                md5.update(buf)
            content_md5 = base64.standard_b64encode(md5.digest())
        return content_md5

    def md5_obj(self, bs):
        md5 = hashlib.md5()
        md5.update(bs)
        return base64.standard_b64encode(md5.digest())

    def list_uploaded_objects(self, prefix=None):
        """
        列出桶中的文件,如果提供了prefix,则最多返回1000条记录
        若无法满足需要,可以使用sdk的api进行获取
        :arg 指定返回key的前缀"""
        keys = []
        if prefix is not None:
            response = self._bos_client.list_objects(self._bucket,
                                                     prefix=prefix,
                                                     max_keys=1000)
            for obj in response.contents:
                keys.append(obj.key)
            return keys
        response = self._bos_client.list_all_objects(self._bucket)
        for obj in response.contents:
            keys.append(obj.key)
        return keys

    def file_exists(self, fn):
        """
        :arg 文件名是否存在,服务器上的文件名为key去掉前缀(带slash)后的
        :return 如果文件存在,返回文件url,否则返回None
        """
        keys = self.list_uploaded_objects()
        for key in keys:
            slash_index = key.rfind("/")
            if slash_index > 0:
                file_name = key[slash_index + 1:]
                if file_name == fn:
                    url = self._bos_client.generate_pre_signed_url(
                        bucket_name=GENIOUS_BUCKET,
                        key=key,
                        expiration_in_seconds=-1)
                    return url.decode("utf-8")
        return None

    def key_exists(self, key):
        keys = self.list_uploaded_objects()
        return keys.index(key) >= 0