Ejemplo n.º 1
0
def file_upload_bos(config, local_filename):
    try:
        bucket_name = config['bucket_name']
        bucket_name = str(bucket_name)

        #p = config['dir']
        AK = config['AK']
        SK = config['SK']
        HOST = config['HOST']

        #print('step 1')
        #print('file name:',local_filename)
        bos_config = BceClientConfiguration(credentials=BceCredentials(
            str(AK), str(SK)),
                                            endpoint=str(HOST))
        bos_client = BosClient(bos_config)

        #print('step 2')
        #print 'bucket_name:',bucket_name,type(bucket_name)
        if not bos_client.does_bucket_exist(bucket_name):
            bos_client.create_bucket(bucket_name)

        #print('step 3')
        key = str(local_filename.split('/')[-1])
        #print('key:',key)
        bos_client.put_object_from_file(bucket_name, str(key),
                                        str(local_filename))
        print(local_filename + ' has been uploaded to bucket:' + bucket_name)
        return 0
    except Exception as e:
        print('file_upload_bos exception:' + str(e))
        traceback.print_exc()
        return -1
Ejemplo n.º 2
0
class BOSStorage(BcsStorage):
    bucket_name = BCS_BUCKET['BUCKET_NAME']
    """
    这是一个支持BOS和本地django的FileStorage基类
    修改存储文件的路径和基本url
    """
    def __init__(self,
                 location=settings.MEDIA_URL,
                 base_url=settings.MEDIA_URL):
        super(BOSStorage, self).__init__(location, base_url)
        config = BceClientConfiguration(credentials=BceCredentials(
            BCS_BUCKET['AK'], BCS_BUCKET['SK']),
                                        endpoint=BCS_BUCKET['END_POINT'])
        self.bos_client = BosClient(config)
        # check if bucket exists
        if not self.bos_client.does_bucket_exist(self.bucket_name):
            self.bos_client.create_bucket(self.bucket_name)

    def saveToBucket(self, name, content):
        data = None
        if hasattr(content, '_get_file'):  # admin entry
            data = content._get_file().read()
        elif isinstance(content, (ContentFile)):  # view entry(ContentFile)
            data = content.read()
        else:
            data = content
        md5 = hashlib.md5()
        md5.update(data)
        md5value = base64.standard_b64encode(md5.digest())
        self.bos_client.put_object(self.bucket_name, name, data, len(data),
                                   md5value)

    def delete(self, name):
        """
        Delete a file from bos.
        """
        self.bos_client.delete_object(self.bucket_name, name)
Ejemplo n.º 3
0
    __logger = logging.getLogger(__name__)

    bucket_name = 'samplebucket'
    key = 'samplekey' + _random_string(6)
    file_name = 'samplefile'
    download = 'download'

    ######################################################################################################
    #            bucket operation samples
    ######################################################################################################

    # create a bos client
    bos_client = BosClient(bos_sample_conf.config)

    # check if bucket exists
    if not bos_client.does_bucket_exist(bucket_name):
        bos_client.create_bucket(bucket_name)

    # delete a bucket(you can't  delete a bucket which is not empty)
    # clear it first
    for obj in bos_client.list_all_objects(bucket_name):
        bos_client.delete_object(bucket_name, obj.key)
    bos_client.delete_bucket(bucket_name)

    # create the bucket again
    bos_client.create_bucket(bucket_name)

    # list your buckets
    response = bos_client.list_buckets()
    for bucket in response.buckets:
        __logger.debug("[Sample] list buckets:%s", bucket.name)
Ejemplo n.º 4
0
    __logger = logging.getLogger(__name__)

    bucket_name = 'samplebucket-' + _random_string(6)
    key = 'samplekey' + _random_string(6)
    file_name = 'samplefile'
    download = 'download'

    ######################################################################################################
    #            bucket operation samples
    ######################################################################################################

    # create a bos client
    bos_client = BosClient(bos_sample_conf.config)

    # check if bucket exists
    if not bos_client.does_bucket_exist(bucket_name):
        bos_client.create_bucket(bucket_name)

    # delete a bucket(you can't  delete a bucket which is not empty)
    # clear it first
    for obj in bos_client.list_all_objects(bucket_name):
        bos_client.delete_object(bucket_name, obj.key)
    bos_client.delete_bucket(bucket_name)

    # create the bucket again
    bos_client.create_bucket(bucket_name)

    # list your buckets
    response = bos_client.list_buckets()
    for bucket in response.buckets:
        __logger.debug("[Sample] list buckets:%s", bucket.name)
Ejemplo n.º 5
0
def getClient():
    bos_client = BosClient(bos_sample_conf.config)
    if not bos_client.does_bucket_exist(bucket_name):
        bos_client.create_bucket(bucket_name)
    return bos_client
Ejemplo n.º 6
0
class UBosClient(object):
    """
    BosClient
    """
    def __init__(self,
                 access_key_id,
                 secret_access_key,
                 bucket_name='',
                 endpoint=''):
        """
        初始化
        """
        super(UBosClient, self).__init__()

        # 创建BceClientConfiguration
        config = BceClientConfiguration(credentials=BceCredentials(
            access_key_id, secret_access_key),
                                        endpoint=endpoint)
        # 设置请求超时时间
        config.connection_timeout_in_mills = 3000
        # 新建BosClient
        self.client = BosClient(config)
        self.bucket = bucket_name

    def check_bucket(self):
        """
        校验bucket是否存在
        """
        return not not self.client.does_bucket_exist(self.bucket)

    def check_object_key(self, object_key):
        """
        校验文件对象是否存在
        """
        if not self.check_bucket():
            return False
        try:
            self.client.get_object_meta_data(self.bucket, object_key)
            return True
        except:
            return False

    def mkdir(self, dir_name):
        """
        创建文件夹
        """
        if not self.check_bucket():
            return False
        try:
            self.client.put_object_from_string(self.bucket,
                                               '{}/'.format(dir_name), '')
            return True
        except:
            return False

    def get_all_files(self):
        """
        获取bucket所有文件对象集
        """
        file_list = []
        if not self.check_bucket():
            return file_list
        for fobj in self.client.list_all_objects(self.bucket):
            file_list.append({'name': fobj.key, 'size': fobj.size})
        return filelist

    def get_files_by_dir(self, dir_name):
        """
        获取文件夹子文件对象集
        """
        file_list = []
        if not self.check_bucket():
            return file_list
        prefix = '{}/'.format(dir_name)
        response = self.client.list_objects(self.bucket, prefix=prefix)
        for fobj in response.contents:
            if fobj.key == prefix:
                continue
            file_list.append({'name': fobj.key, 'size': fobj.size})
        return file_list

    def rmfile(self, object_key):
        """
        单一删除文件对象
        """
        if not self.check_bucket():
            return False
        self.client.delete_object(self.bucket, object_key)
        return True

    def rmfiles(self, object_keys):
        """
        批量删除
        """
        if not self.check_bucket():
            return False
        self.client.delete_multiple_objects(self.bucket, object_keys)
        return True

    def rmdir(self, dir_name):
        """
        删除目录, 需保证目录下无对象存在
        """
        if not self.check_bucket():
            return False
        prefix = '{}/'.format(dir_name)
        file_list = self.get_files_by_dir(dir_name)
        object_keys = [fobj['name'] for fobj in file_list if 'name' in fobj]
        self.rmfiles(object_keys)
        self.client.delete_object(self.bucket, prefix)
        return True

    def single_upload(self, object_key, file_path):
        """
        一次性上传
        """
        if not self.check_bucket():
            return False
        suffix = filename.split('.')[-1].lower()
        if suffix == 'mp4':
            ret = self.client.put_object_from_file(self.bucket,
                                                   object_key,
                                                   file_path,
                                                   content_type='video/mp4')
        elif suffix in ['jpg', 'jpeg']:
            ret = self.client.put_object_from_file(self.bucket,
                                                   object_key,
                                                   file_path,
                                                   content_type='image/jpeg')
        else:
            ret = self.client.put_object_from_file(self.bucket, object_key,
                                                   file_path)
        print ret
        return True

    def get_upload_id(self, object_key):
        """
        断点续传获取upload_id
        """
        upload_id = None
        response = self.client.list_multipart_uploads(self.bucket)
        for item in response.uploads:
            if item.key == object_key:
                upload_id = item.upload_id
                break
        if not upload_id:
            upload_id = self.client.initiate_multipart_upload(
                self.bucket, object_key, content_type='video/mp4').upload_id
        return upload_id

    def multipart_upload(self, object_key, file_path):
        """
        断点续传
        """
        upload_id = self.get_upload_id(object_key)
        if not upload_id:
            return False
        left_size = os.path.getsize(file_path)
        offset, part_number, part_list = 0, 1, []
        while left_size > 0:
            # 设置每块为5MB
            part_size = 5 * 1024 * 1024
            if left_size < part_size:
                part_size = left_size
            for _ in (_ for _ in range(300)):
                try:
                    response = self.client.upload_part_from_file(
                        self.bucket, object_key, upload_id, part_number,
                        part_size, file_path, offset)
                    break
                except:
                    pass
            left_size -= part_size
            offset += part_size
            part_list.append({
                "partNumber": part_number,
                "eTag": response.metadata.etag
            })
            part_number += 1

        for _ in (_ for _ in range(300)):
            try:
                ret = self.client.complete_multipart_upload(
                    self.bucket, object_key, upload_id, part_list)
                print ret
                return False
            except:
                pass

    def upload_file(self, object_key, file_path):
        """
        上传文件
        """
        file_size = os.path.getsize(file_name)
        if file_size > 5 * 1024 * 1024:
            # 断点续传
            self.multipart_upload(object_key, file_path)
        else:
            # 一次性上传
            self.single_upload(object_key, file_path)