Exemple #1
0
def clear_qiniu():
    global DELETE_COUNT
    q = Auth(access_key, secret_key)
    bucket = BucketManager(q)
    l = bucket.list(bucket_name)
    items = l[0].get('items')
    keys = [item.get('key') for item in items]
    DELETE_COUNT = len(keys)
    ops = build_batch_delete(bucket_name, keys)
    bucket.batch(ops)
def multi_copy(private_key_list: List[str]) -> List:
    access_key = Facade.config["qiniu"]["access_key"]
    secret_key = Facade.config["qiniu"]["secret_key"]
    # original code
    q = Auth(access_key, secret_key)
    bucket = BucketManager(q)

    # 1. what i do
    target_copy_dict = {}
    copy_key_list = []

    # 2. prepare copy name
    for private_key in private_key_list:
        target_copy_dict[private_key] = rename(private_key)
        copy_key_list.append(rename(private_key))

    public_bucket_name = Facade.config["qiniu"]["category"]["public"]["bucket"]
    private_bucket_name = Facade.config["qiniu"]["category"]["private"][
        "bucket"]

    logworker.warning('------multi_copy---------start')
    logworker.warning(public_bucket_name)
    logworker.warning(private_bucket_name)
    logworker.warning(copy_key_list)
    logworker.warning('end------multi_copy---------')

    # force为true时强制同名覆盖, 字典的键为原文件,值为目标文件
    ops = build_batch_copy(private_bucket_name,
                           target_copy_dict,
                           public_bucket_name,
                           force='false')
    ret, info = bucket.batch(ops)

    return ret, info, copy_key_list
class QiniuWrapper():

    policy = {'returnBody': '{"key": $(key), "type": $(mimeType), "name": $(fname), "size": $(fsize), "hash": $(etag)}'}
    bucket_name = QINIU_BUCKET_NAME
    domain = 'http://%s.qiniudn.com/' % bucket_name

    def __init__(self):
        self.q = Auth(QINIU_ACCESS_KEY, QINIU_SECRET_KEY)
        self.bucket_manager = BucketManager(self.q)

    def get_upload_token(self, key, expires=3600):
        return self.q.upload_token(self.bucket_name, key, expires, self.policy)

    def upload_file(self, key, filename, mime_type="application/octet-stream"):
        '''
        上传文件到七牛,如果指定的key对应的文件在七牛上已经存在, 会覆盖原来七牛上的文件
        '''
        ret, info = put_file(self.get_upload_token(key), key, filename, mime_type=mime_type, check_crc=True)
        if info.status_code != 200:
            return (False, info)
        return (True, info)

    def upload_stream(self, key, input_stream, data_size, mime_type="application/octet-stream"):
        '''
        上传文件到七牛,如果指定的key对应的文件在七牛上已经存在, 会覆盖原来七牛上的文件
        '''
        ret, info = put_stream(self.get_upload_token(key), key, input_stream, data_size, mime_type=mime_type, check_crc=True)
        if info.status_code != 200:
            return (False, info)
        return (True, info)

    def move(self, old_key, new_key):
        ret, info = self.bucket_manager.move(self.bucket_name, old_key, self.bucket_name, new_key)
        if info.status_code != 200:
            return (False, info)
        return (True, info)

    def delete(self, key):
        ret, info = self.bucket_manager.delete(self.bucket_name, key)
        if info.status_code != 200:
            return (False, info)
        return (True, info)

    def batch_delete(self, keys):
        '''
        keys = ['key1', 'key2', 'key3']
        '''
        ops = build_batch_delete(self.bucket_name, keys)
        ret, info = self.bucket_manager.batch(ops)
        if info.status_code != 200:
            return (False, info)
        return (True, info)

    def list(self, prefix=None, limit=1000, marker=None):
        return self.bucket_manager.list(self.bucket_name, prefix=prefix, marker=marker, limit=limit)

    @classmethod
    def get_url(cls, key):
        return cls.domain + key
Exemple #4
0
 def get_file_info(self, bucket_name, keys=[]):
     """Args:
     bucket_name:'bucket_name'
     keys:  ['fileName1','fileName2']
     """
     bucket = BucketManager(self.__auth)
     ops = build_batch_stat(bucket_name, keys)
     ret, info = bucket.batch(ops)
     return ret, info
Exemple #5
0
 def delete_files(self, source_bucket, pathlist=[]):
     """Args:
     source_bucket: 'source_bucket'
     pathlist: ['source_file_name',...]
     """
     bucket = BucketManager(self.__auth)
     ops = build_batch_delete(source_bucket, pathlist)
     ret, info = bucket.batch(ops)
     return ret, info
Exemple #6
0
 def delete_files(self,source_bucket,pathlist=[]):
     """Args:
     source_bucket: 'source_bucket'
     pathlist: ['source_file_name',...]
     """
     bucket = BucketManager(self.__auth)
     ops = build_batch_delete(source_bucket, pathlist)
     ret, info = bucket.batch(ops)
     return ret,info
Exemple #7
0
 def get_file_info(self,bucket_name,keys=[]):
     """Args:
     bucket_name:'bucket_name'
     keys:  ['fileName1','fileName2']
     """
     bucket = BucketManager(self.__auth)
     ops = build_batch_stat(bucket_name, keys)
     ret, info = bucket.batch(ops)
     return ret,info
Exemple #8
0
 def move_files(self, source_bucket, target_bucket, pathdict={}):
     """Args:
     source_bucket: 'source_bucket'
     target_bucket:  'target_bucket'
     pathdict: {'source_file_name':'target_file_name',...}
     """
     bucket = BucketManager(self.__auth)
     ops = build_batch_move(source_bucket, pathdict, target_bucket)
     ret, info = bucket.batch(ops)
     return ret, info
Exemple #9
0
 def move_files(self,source_bucket,target_bucket,pathdict={}):
     """Args:
     source_bucket: 'source_bucket'
     target_bucket:  'target_bucket'
     pathdict: {'source_file_name':'target_file_name',...}
     """
     bucket = BucketManager(self.__auth)
     ops = build_batch_move(source_bucket, pathdict, target_bucket)
     ret, info = bucket.batch(ops)
     return ret,info
Exemple #10
0
    def multi_delete(self, key_list):
        # key_list: [key1, key2, key3, ...]
        from qiniu import build_batch_delete
        bucket = BucketManager(self.q)
        ops = build_batch_delete(self.bucket_name, key_list)
        ret, info = bucket.batch(ops)
        # print('QiniuStorage - multi_delete: %s' % info)

        json_info = json.loads(info.text_body)
        for m_info in json_info:
            # "code":612,"data":{"error":"no such file or directory"
            assert m_info[u'code'] == 200 or m_info[u'code'] == 612
Exemple #11
0
def delete_data(urls, bucket='lucky-web', key_prefix=''):
    keys = []
    for url in urls:
        if not url.endswith('/'):
            url += '/'
        key = url.split('/')[-2]
        if key_prefix:
            key = '%s/%s' % (key_prefix, key)
        keys.append(key)

    ops = build_batch_delete(bucket, keys)
    b = BucketManager(Q)
    ret, info = b.batch(ops)
Exemple #12
0
def delete_data(urls, bucket='lucky-web', key_prefix=''):
    keys = []
    for url in urls:
        if not url.endswith('/'):
            url += '/'
        key = url.split('/')[-2]
        if key_prefix:
            key = '%s/%s' % (key_prefix, key)
        keys.append(key)

    ops = build_batch_delete(bucket, keys)
    b = BucketManager(Q)
    ret, info = b.batch(ops)
def delete_files(bucket_name, files_key, bucket=None):
    if bucket is None:
        bucket = BucketManager(q)
    if len(files_key) == 0:
        logging.info('nothing to delete')
        return
    ops = build_batch_delete(bucket_name,files_key)

    ret,info = bucket.batch(ops)

    if ret[0]['code'] == 200:
        logging.info('delete all success!')
    else:
        logging.error('delete failed!')
Exemple #14
0
    def delete_queryset(self, request, queryset):
        access_key = settings.QINIU_ACCESS_KEY
        secret_key = settings.QINIU_SECRET_KEY
        bucket = settings.QINIU_BUCKET
        host = settings.QINIU_HOST

        auth = Auth(access_key, secret_key)

        bucketManager = BucketManager(auth)

        keys = []

        for item in queryset:
            filename = str(item.link)
            if (filename.startswith(host)):
                filename = filename.replace(host + '/', '')
            else:
                if re.match(r'^https?:/{2}\w.+$', filename):
                    continue
            keys.append(filename)

        bucketManager.batch(build_batch_delete(bucket, keys))

        queryset.delete()
Exemple #15
0
def main():
    items = []
    keys = dict()
    q = Auth(accessKey, secretKey)
    bucketManager = BucketManager(q)
    data = bucketManager.list(bucket)
    if data[0].get("items"):
        items = data[0].get("items")
    for value in items:
        keys[value.get("key")] = value.get("key")
    # force为true时强制同名覆盖, 字典的键为原文件,值为目标文件
    ops = build_batch_move(bucket, keys, new, force='true')
    ret, info = bucketManager.batch(ops)
    print(ret)
    print(info)
    pass
def multi_delete(where: str, delete_key_list: List[str]) -> None:
    access_key = Facade.config["qiniu"]["access_key"]
    secret_key = Facade.config["qiniu"]["secret_key"]
    # original code
    q = Auth(access_key, secret_key)
    bucket = BucketManager(q)

    # 1. what i do
    bucket_name = Facade.config["qiniu"]["category"][where]

    logworker.warning('------multi_copy---------start')
    logworker.warning(delete_key_list)
    logworker.warning(bucket_name)
    logworker.warning('end------multi_copy---------')

    # original code
    ops = build_batch_delete(bucket_name, delete_key_list)
    ret, info = bucket.batch(ops)
Exemple #17
0
    def rename_static_file_in_cdn(self):
        exclude_files = [
            r'.DS_Store',
        ]
        cdn_file_path = configs.CDN_STATIC_ROOT
        file_list = {}
        for x in os.listdir(cdn_file_path):
            if x in exclude_files:
                continue
            file_list[x] = x.replace(Command.SLASH_SUBSTITUTE_BY, '/')

        q = Auth(self.access_key, self.secret_key)
        bucket = BucketManager(q)

        # force为true时强制同名覆盖, 字典的键为原文件,值为目标文件
        ops = build_batch_rename(self.bucket_name, file_list, force='false')
        ret, info = bucket.batch(ops)
        print(ret)
        print(info)
Exemple #18
0
def delete_files(keys=None):
    """
    批量删除
    :param keys:
    :return:
    """
    if keys is None:
        keys = []
    config = get_qiniu_config()

    # 七牛上传数据的凭证
    q = Auth(config['access_key'], config['secret_key'])
    bucket = BucketManager(q)
    ops = build_batch_delete(config['bucket_name'], keys)
    ret, info = bucket.batch(ops)
    if info.status_code == 200:
        return True
    else:
        print(ret)
        return False
Exemple #19
0
# -*- coding: utf-8 -*-
"""
批量删除文件

https://developer.qiniu.com/kodo/api/1250/batch
"""


from qiniu import build_batch_delete, Auth, BucketManager

access_key = ''

secret_key = ''

q = Auth(access_key, secret_key)

bucket = BucketManager(q)

bucket_name = ''

keys = ['1.gif', '2.txt', '3.png', '4.html']

ops = build_batch_delete(bucket_name, keys)
ret, info = bucket.batch(ops)
print(info)
Exemple #20
0
def delete_data_by_key(keys, bucket):
    ops = build_batch_delete(bucket, keys)
    b = BucketManager(Q)
    b.batch(ops)
Exemple #21
0
class QiniuBucketManager:
    def __init__(self, auth, bucket):
        """
        @param auth: qiniu auth object
        @param bucket: bucket name
        """
        self.auth = auth
        self.bucket = bucket
        self.bktmanager = BucketManager(auth)

        self.upload_token = auth.upload_token(bucket)
        
    def data_info(self, key):
        """
        return information with keys {fsize, hash, mimetype, putTime} else
        raise  `QiniuReadDataError` with status_code and error message
        """
        r, info = self.bktmanager.stat(self.bucket, key)
        if not r:
            raise QiniuReadDataError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
        else:
            return r
    
    def push_data(self, key, data):
        """
        return hash code if upload sucess else raise `QiniuPushDataError` 
        with status_code and error message
        """
        ret, info = put_data(self.upload_token, key, data)
        if not ret:
            raise QiniuPushDataError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
        else:
            return ret['hash']
    
    def delete_data(self, key):
        """
        delete data `key`
        """
        _, info = self.bktmanager.delete(self.bucket, key)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
    
    def batch_delete_data(self, keys):
        if keys:
            ops = build_batch_delete(self.bucket, keys)
            _, info = self.bktmanager.batch(ops)
            if info.status_code != 200:
                msg = ''
                for e in info.text_body:
                    msg += msg + ';' + 'status_code:{0}, error:{1}'.format(e)
                raise QiniuError(msg)
        
    def copy_data_to(self, sdata, dbucket, ddata):
        """
        copy data `sdata` in this bucket to destination bucket `dbucket` with name `ddata`
        """
        _, info = self.bktmanager.copy(self.bucket, sdata, dbucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
    
    def copy_data_from(self, sbucket, sdata, ddata):
        """
        copy data from `sdata` in bucket `sbucket` to this bucket with name `ddata`
        """
        _, info = self.bktmanager.copy(sbucket, sdata, self.bucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
    
    def move_data_to(self, sdata, dbucket, ddata):
        """
        move data `sdata` in this bucket to destination bucket `dbucket` with name `ddata`
        """
        _, info = self.bktmanager.move(self.bucket, sdata, dbucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
    
    def move_data_from(self, sbucket, sdata, ddata):
        """
        move data from `sdata` in bucket `sbucket` to this bucket with name `ddata`
        """
        _, info = self.bktmanager.move(sbucket, sdata, self.bucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
    
    def datas(self, prefix=None, limit=None, marker=None):
        """
        list datas in bucket with keys fsize, hash, key, mimeType, puttime of each data 
        """
        def list_files(marker, limit):
            k = {'bucket':self.bucket}
            if limit:
                k['limit'] = limit
            if prefix:
                k['prefix'] = prefix
            if marker:
                k['marker'] = marker
            return  self.bktmanager.list(**k)

        eof = False
        res = []
        mk = marker
        lm = limit if limit else MAX_INT64
        while not eof and lm:
            _r, eof, info = list_files(mk, lm)
            if info.status_code != 200:
                raise QiniuError('status_code:{0}, error:{1}'.format(info.status_code,
                            info.text_body))
                
            mk = _r.get('marker', None)
            lm = lm-len(_r['items'])
            res += _r['items']
        return res
 def delete(self, bucket_name, del_list):
     mbucket = BucketManager(self.mauth)
     del_item = build_batch_delete(bucket_name, del_list)
     ret, info = mbucket.batch(del_item)
     print info
Exemple #23
0
class Sync:
    """
    同步目录至七牛云
    """
    def __init__(self, access_key: str, secret_key: str, bucket_name: str,
                 sync_dir: str, exclude: List, cover: bool,
                 remove_redundant: bool):
        self.bucket_name = bucket_name
        self.q = Auth(access_key, secret_key)
        self.bucket = BucketManager(self.q)
        self.sync_dir = sync_dir
        self.exclude = exclude
        self.cover = cover
        self.remove_redundant = remove_redundant

        self.sync()

    def sync(self):
        """
        同步操作
        :return:
        """
        remote_files = self.list_remote()
        local_files = self.list_local()

        # 首先删除远端多余的文件
        remove_remote_files = []
        for remote_filename in remote_files:
            if remote_filename not in local_files:
                remove_remote_files.append(remote_filename)
        self.bucket.batch(
            build_batch_delete(self.bucket_name, remove_remote_files))

        # 上传本地文件到远端(未出现过的)
        for local_filename in local_files:
            if local_filename not in remote_files or local_files[
                    local_filename]['hash'] != remote_files[local_filename][
                        'hash']:
                print('puting ' + local_filename)
                ret, info = put_file(
                    self.q.upload_token(self.bucket_name, local_filename,
                                        3600), local_filename,
                    local_files[local_filename]['fullpath'])
        pass

    def list_remote(self) -> Dict:
        """
        列出远程仓库所有的文件信息
        :return: List
        """
        result = {}
        for file in self.bucket.list(self.bucket_name)[0]['items']:
            result[file['key']] = file

        return result

    def list_local(self) -> Dict:
        """
        列出本地仓库所有的文件信息
        """
        files = {}

        def get_files(path):
            for filename in os.listdir(path):
                if filename in self.exclude:
                    continue
                fullpath = os.path.join(path, filename)
                if os.path.isfile(fullpath):
                    key = fullpath.split(self.sync_dir)[1]
                    files[key] = {'fullpath': fullpath, 'hash': etag(fullpath)}
                    #
                else:
                    get_files(fullpath)

        get_files(self.sync_dir)
        return files
Exemple #24
0
 def delete(self, bucket_name, del_list):
     mbucket = BucketManager(self.mauth)
     del_item = build_batch_delete(bucket_name, del_list)
     ret, info = mbucket.batch(del_item)
     print info
Exemple #25
0
class Sync:
    """
    同步目录至七牛云
    """
    def __init__(
        self,
        access_key: str,
        secret_key: str,
        bucket_name: str,
        sync_dir: str,
        exclude: List,
        cover: bool,
        remove_redundant: bool,
    ):
        self.bucket_name = bucket_name
        self.q = Auth(access_key, secret_key)
        self.bucket = BucketManager(self.q)
        self.sync_dir = sync_dir
        self.exclude = exclude
        self.cover = cover
        self.remove_redundant = remove_redundant
        self.sync()

    def sync(self):
        """
        同步操作
        :return:
        """
        remote_files = self.list_remote()
        local_files = self.list_local()
        # 首先删除远端仓库中多余的文件
        remove_remote_files = []
        for remote_filename in remote_files:
            if remote_filename not in local_files:
                remove_remote_files.append(remote_filename)
        self.bucket.batch(
            build_batch_delete(self.bucket_name, remove_remote_files))
        # 上传本地文件到远端(仅上传远端不存在的以及修改过的)
        for local_filename in local_files:
            win_path = local_filename.replace('\\', '/')
            if (win_path not in remote_files or local_files[win_path]["hash"]
                    != remote_files[win_path]["hash"]):
                print("puting " + win_path)
                ret, info = put_file(
                    self.q.upload_token(self.bucket_name, win_path, 3600),
                    win_path,
                    local_files[local_filename]["fullpath"],
                )

    def list_remote(self) -> Dict:
        """
        列出远程仓库所有的文件信息
        :return: List
        """
        result = {}
        for file in self.bucket.list(self.bucket_name)[0]["items"]:
            result[file["key"]] = file
        return result

    def list_local(self) -> Dict:
        """
        列出本地仓库所有的文件信息
        """
        files = {}

        def get_files(path):
            for filename in os.listdir(path):
                if filename in self.exclude:
                    continue
                if filename.startswith('.git'):
                    continue
                fullpath = os.path.join(path, filename)
                if os.path.isfile(fullpath):
                    key = fullpath.split(self.sync_dir)[1]
                    files[key.replace('\\', '/')] = {
                        "fullpath": fullpath,
                        "hash": etag(fullpath)
                    }
                else:
                    get_files(fullpath)

        get_files(self.sync_dir)
        return files
Exemple #26
0
class Sync:
    """
    同步目录至七牛云
    """
    def __init__(
        self,
        access_key: str,
        secret_key: str,
        bucket_name: str,
        sync_dir: str,
        exclude: List,
        cover: bool,
        remove_redundant: bool,
    ):
        self.bucket_name = bucket_name
        self.q = Auth(access_key, secret_key)
        self.bucket = BucketManager(self.q)
        self.sync_dir = sync_dir
        self.exclude = exclude
        self.cover = cover
        self.remove_redundant = remove_redundant
        self.sync()

    def sync(self):
        """
        同步操作
        :return:
        """
        remote_files = self.list_remote()
        local_files = self.list_local()
        # 首先删除远端仓库中多余的文件
        remove_remote_files = []
        remove_count = 0
        put_count = 0
        for remote_filename in remote_files:
            if remote_filename not in local_files:
                remove_remote_files.append(remote_filename)
                remove_count += 1
                print("remve " + str(remove_count) + " => " + remote_filename)
        self.bucket.batch(
            build_batch_delete(self.bucket_name, remove_remote_files))
        # 上传本地文件到远端(仅上传远端不存在的以及修改过的)
        for local_filename in local_files:
            if (local_filename not in remote_files
                    or local_files[local_filename]["hash"] !=
                    remote_files[local_filename]["hash"]):
                ret, info = put_file(
                    self.q.upload_token(self.bucket_name, local_filename,
                                        3600),
                    local_filename,
                    local_files[local_filename]["fullpath"],
                )
                put_count += 1
                print("puting " + str(put_count) + " => " + local_filename)

    def list_remote(self) -> Dict:
        """
        列出远程仓库所有的文件信息
        :return: List
        """
        result = {}
        for file in self.bucket.list(self.bucket_name)[0]["items"]:
            if (file["mimeType"] not in [
                    "image/png", "image/jpg", "image/jpeg", "image/gif"
            ]):
                result[file["key"]] = file
        return result

    def list_local(self) -> Dict:
        """
        列出本地仓库所有的文件信息
        """
        files = {}

        def get_files(path):
            for filename in os.listdir(path):
                if filename in self.exclude:
                    continue
                fullpath = os.path.join(path, filename)
                if os.path.isfile(fullpath):
                    key = fullpath.split(self.sync_dir)[1]
                    files[key] = {"fullpath": fullpath, "hash": etag(fullpath)}
                else:
                    get_files(fullpath)

        get_files(self.sync_dir)
        return files
Exemple #27
0
class QiniuBucketManager:
    def __init__(self, auth, bucket):
        """
        @param auth: qiniu auth object
        @param bucket: bucket name
        """
        self.auth = auth
        self.bucket = bucket
        self.bktmanager = BucketManager(auth)

        self.upload_token = auth.upload_token(bucket)

    def data_info(self, key):
        """
        return information with keys {fsize, hash, mimetype, putTime} else
        raise  `QiniuReadDataError` with status_code and error message
        """
        r, info = self.bktmanager.stat(self.bucket, key)
        if not r:
            raise QiniuReadDataError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))
        else:
            return r

    def push_data(self, key, data):
        """
        return hash code if upload sucess else raise `QiniuPushDataError` 
        with status_code and error message
        """
        ret, info = put_data(self.upload_token, key, data)
        if not ret:
            raise QiniuPushDataError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))
        else:
            return ret['hash']

    def delete_data(self, key):
        """
        delete data `key`
        """
        _, info = self.bktmanager.delete(self.bucket, key)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))

    def batch_delete_data(self, keys):
        if keys:
            ops = build_batch_delete(self.bucket, keys)
            _, info = self.bktmanager.batch(ops)
            if info.status_code != 200:
                msg = ''
                for e in info.text_body:
                    msg += msg + ';' + 'status_code:{0}, error:{1}'.format(e)
                raise QiniuError(msg)

    def copy_data_to(self, sdata, dbucket, ddata):
        """
        copy data `sdata` in this bucket to destination bucket `dbucket` with name `ddata`
        """
        _, info = self.bktmanager.copy(self.bucket, sdata, dbucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))

    def copy_data_from(self, sbucket, sdata, ddata):
        """
        copy data from `sdata` in bucket `sbucket` to this bucket with name `ddata`
        """
        _, info = self.bktmanager.copy(sbucket, sdata, self.bucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))

    def move_data_to(self, sdata, dbucket, ddata):
        """
        move data `sdata` in this bucket to destination bucket `dbucket` with name `ddata`
        """
        _, info = self.bktmanager.move(self.bucket, sdata, dbucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))

    def move_data_from(self, sbucket, sdata, ddata):
        """
        move data from `sdata` in bucket `sbucket` to this bucket with name `ddata`
        """
        _, info = self.bktmanager.move(sbucket, sdata, self.bucket, ddata)
        if info.status_code != 200:
            raise QiniuError('status_code:{0}, error:{1}'.format(
                info.status_code, info.text_body))

    def datas(self, prefix=None, limit=None, marker=None):
        """
        list datas in bucket with keys fsize, hash, key, mimeType, puttime of each data 
        """
        def list_files(marker, limit):
            k = {'bucket': self.bucket}
            if limit:
                k['limit'] = limit
            if prefix:
                k['prefix'] = prefix
            if marker:
                k['marker'] = marker
            return self.bktmanager.list(**k)

        eof = False
        res = []
        mk = marker
        lm = limit if limit else MAX_INT64
        while not eof and lm:
            _r, eof, info = list_files(mk, lm)
            if info.status_code != 200:
                raise QiniuError('status_code:{0}, error:{1}'.format(
                    info.status_code, info.text_body))

            mk = _r.get('marker', None)
            lm = lm - len(_r['items'])
            res += _r['items']
        return res
Exemple #28
0
class QiniuClient(object):
    def __init__(self, access_key, secret_key, bucket_name):
        self.client = Auth(access_key, secret_key)
        self.bucket_name = bucket_name
        self.bucket = BucketManager(self.client)

    def set_bucket_name(self, bucket_name):
        """重设bucketname"""
        self.bucket_name = bucket_name

    def upload_file(self, filename, file_io=None):
        """上传文件
        接受文件名或者IO形式的数据

        :filename:
            可以是文件系统中的文件路径,也可以直接使用文件名称,
            最后会根据是否带`file_io`参数来决定是否需要读取文件。
        :file_io:
            文件IO对象,如果带有这个参数则直接将其上传。
        :return:
            - error: 是否有错误
            - exception: 错误原因
            - hash: 上传文件的hash
            - key: 上传文件的key
        """
        if file_io is None:
            file_io = BytesIO()
            with open(filename, 'rb') as f:
                file_io.write(f.read())
        filename = filename.rsplit("/")[-1]

        try:
            token = self.client.upload_token(self.bucket_name, filename)
            result, info = put_data(token, filename, file_io.getvalue())
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        else:
            return {
                "error": info.status_code != 200,
                "exception": info.exception,
                "hash": result.get('hash', None),
                "key": result.get('key', None)
            }

    def file_delete(self, filename):
        """删除指定的文件"""
        try:
            result, info = self.bucket.delete(self.bucket_name, filename)
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        else:
            return {
                'error': info.status_code != 200,
                'exception': info.exception
            }

    def file_list(self, prefix=None, delimiter=None, marker=None, limit=None):
        """获取文件列表,提供了若干选项供筛选文件

        :param prefix: 前缀
        :param delimiter: 分隔符
        :param marker: 标记
        :param limit: 条目数量
        :return:
            - error: 是否有错误
            - exception: 错误的原因
            - items: 文件数据列表
                - key
                - hash
                - fsize
                - mimeType
                - putTime
                - type
                - status
        """
        try:
            result, info = self.bucket.list(self.bucket_name, prefix, marker,
                                            limit, delimiter)
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        else:
            return {
                'error': info.status_code != 200,
                'exception': info.exception,
                'items': result.get('items', None)
            }

    def stat_info(self, filename):
        """获取文件信息

        :return: 同上
        """
        try:
            result, info = self.bucket.stat(self.bucket_name, filename)
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        return {
            'error': info.status_code != 200,
            'exception': info.exception,
            'fsize': result.get('fsize', None),
            'hash': result.get('hash', None),
            'mimeType': result.get("mimeType", None),
            'putTime': result.get("putTime", None),
            'type': result.get('type', None)
        }

    def batch_stat(self, filenames):
        """批量获取文件信息

        :return: 同上
        """
        try:
            ops = build_batch_stat(self.bucket_name, filenames)
            result, info = self.bucket.batch(ops)
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        else:
            return {
                'error': info.status_code != 200,
                'exception': info.exception,
                'items': result.get('items', None)
            }

    def batch_delete(self, filenames):
        """批量删除文件
        """
        try:
            ops = build_batch_delete(self.bucket_name, filenames)
            result, info = self.bucket.batch(ops)
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        else:
            return {
                'error': info.status_code != 200,
                'exception': info.exception
            }

    def fetch(self, url, filename):
        """抓去网络资源到空间"""
        try:
            result, info = self.bucket.fetch(url, self.bucket_name, filename)
        except Exception as exc:
            return {'error': True, 'exception': str(exc)}
        return {'error': info.status_code != 200, 'exception': info.exception}