def get_file_info(self,bucket_name,keys=[]): """Args: bucket_name:'bucket_name' keys: ['fileName1','fileName2'] """ bucket = BucketManager(self.__auth) ops = build_batch_stat(bucket_name, keys) ret, info = bucket.batch(ops) return ret,info
def get_file_info(self, bucket_name, keys=[]): """Args: bucket_name:'bucket_name' keys: ['fileName1','fileName2'] """ bucket = BucketManager(self.__auth) ops = build_batch_stat(bucket_name, keys) ret, info = bucket.batch(ops) return ret, info
def update_file(k2f, ulist): ops = qiniu.build_batch_stat(bucket_name, ulist) rets, infos = bucket.batch(ops) for i in xrange(len(ulist)): k = ulist[i] f = k2f.get(k) ret = rets[i]["data"] remote_hash = ret.get("hash", None) local_f = os.path.join(basedir, f) local_hash = etag(local_f) if local_hash != remote_hash: print "update_file: %s" % f upload_file(k, os.path.join(basedir, f), flag=True)
def batch_stat(self, filenames): """批量获取文件信息 :return: 同上 """ try: ops = build_batch_stat(self.bucket_name, filenames) result, info = self.bucket.batch(ops) except Exception as exc: return {'error': True, 'exception': str(exc)} else: return { 'error': info.status_code != 200, 'exception': info.exception, 'items': result.get('items', None) }
def update_file(k2f, ulist): ops = qiniu.build_batch_stat(bucket_name, ulist) rets, infos = bucket.batch(ops) for i in xrange(len(ulist)): k = ulist[i] f = k2f.get(k) ret = rets[i]["data"] size = ret.get("fsize", None) put_time = int(ret.get("putTime") / 10000000) local_size = os.path.getsize(f) local_time = int(os.path.getatime(f)) if local_size == size: continue if put_time >= local_time - diff_time: # is new continue # update upload_file(k, os.path.join(basedir, f))
def update_file(k2f,ulist): ops=qiniu.build_batch_stat(bucket_name,ulist) rets,infos = bucket.batch(ops) for i in xrange(len(ulist)): k=ulist[i] f=k2f.get(k) ret=rets[i]["data"] size=ret.get("fsize",None) put_time = int(ret.get("putTime")/10000000) local_size=os.path.getsize(f) local_time=int(os.path.getatime(f)) if local_size==size: continue if put_time >= local_time - diff_time: # is new continue # update upload_file(k,os.path.join(basedir,f))
def diff_file(rlist, llist, basedir=""): diff_list = [] rlist = [r.encode(charset) if isinstance(r, unicode) else r for r in rlist] ops = qiniu.build_batch_stat(bucket_name, rlist) rets, infos = bucket.batch(ops) for i in xrange(len(rlist)): r = rlist[i] if r not in llist: diff_list.append(r) else: local_f = os.path.join(basedir, r) local_hash = etag(local_f) ret = rets[i]["data"] remote_hash = ret.get("hash", None) if local_hash != remote_hash: diff_list.append(r) return diff_list
def test_batch_stat(self): ops = build_batch_stat(bucket_name, ['python-sdk.html']) ret, info = self.bucket.batch(ops) print(info) assert ret[0]['code'] == 200
# -*- coding: utf-8 -*- # flake8: noqa """ 批量查询文件信息 https://developer.qiniu.com/kodo/api/1250/batch """ from qiniu import build_batch_stat, Auth, BucketManager access_key = '' secret_key = '' q = Auth(access_key, secret_key) bucket = BucketManager(q) bucket_name = '' # 需要查询的文件名 keys = ['1.gif', '2.txt', '3.png', '4.html'] ops = build_batch_stat(bucket_name, keys) ret, info = bucket.batch(ops) print(info)
def check(key): ops = build_batch_stat(bucket_name, key) ret, info = bucket.batch(ops) print(info)
# -*- coding: utf-8 -*- """ 批量查询文件信息 https://developer.qiniu.com/kodo/api/1250/batch """ from qiniu import build_batch_stat, Auth, BucketManager access_key = '' secret_key = '' q = Auth(access_key, secret_key) bucket = BucketManager(q) bucket_name = '' # 需要查询的文件名 keys = ['1.gif', '2.txt', '3.png', '4.html'] ops = build_batch_stat(bucket_name, keys) ret, info = bucket.batch(ops) print(info)