def flush_resource(url): cdn_manager = CdnManager(q) # 需要刷新的文件链接 urls = [url] # 刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) print(refresh_url_result)
def update(auth): print('刷新') cdn_manager = CdnManager(auth) # 需要刷新的文件链接 urls = ['{}/{}'.format(Config.link, Config.file)] # 刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) print(refresh_url_result)
class QiNiuCDNTool(QiNiuTool): def __init__(self): self.manager = CdnManager( Auth(access_key=settings.QINIU_ACCESSKEY, secret_key=settings.QINIU_ACCESSSECRET)) def get_log_data(self): pass def check(self, url): if 'http' in url: index = url.find('//') header = url.find('/', index + 2) if '.' in url[header + 1:]: # uri return 'FILE' else: return 'DIR' else: index = url.find('/') if '.' in url[index + 1:]: return 'FILE' else: return 'DIR' def sort(self, urls): file_urls = [] dir_urls = [] for url in urls: if self.check(url) == 'FILE': file_urls.append(url) else: dir_urls.append(url) return file_urls, dir_urls def refreshs(self, urls): file_urls, dir_urls = self.sort(urls) print(file_urls, dir_urls) if file_urls: ret, info = self.manager.refresh_dirs(dir_urls) print(ret, 'AAA', info) if dir_urls: ret, info = self.manager.refresh_urls(file_urls) print(ret, 'BBB', info) def refresh(self, url): ret = None info = None if self.check(url) == 'FILE': ret, info = self.manager.refresh_urls([url]) else: ret, info = self.manager.refresh_dirs([url]) if ret['code'] == 200: return True else: return False
def generate(card): try: msstream = BytesIO() data = { 'copyright': 'open', 'id': card.id, 'title': card.get('name'), 'content': card.get('content'), 'author': card.get('author'), 'url': card.get('img_url') } Movie.movie(data, msstream) url = os.environ["QINIU_ACCESS_URL"] access_key = os.environ["QINIU_ACCESS_KEY"] secret_key = os.environ["QINIU_SECRET_KEY"] #构建鉴权对象 q = Auth(access_key, secret_key) #要上传的空间 bucket_name = 'card' key = card.get('objectId') if card.get('photo') is None: token = q.upload_token(bucket_name) ret, info = put_data(token, key, msstream.getvalue()) else: cdn_manager = CdnManager(q) urls = ['http://oppyrwj3t.bkt.clouddn.com/' + key] token = q.upload_token(bucket_name, key) ret, info = put_data(token, key, msstream.getvalue()) refresh_url_result = cdn_manager.refresh_urls(urls) if (info.ok()): metaData = {'owner': card.get('username')} photo = Photo() photo.set('mine_type', 'image/jpeg') photo.set('key', key) photo.set('name', key) photo.set('url', url + '/' + key) photo.set('provider', 'qiniu') photo.set('metaData', metaData) photo.set('bucket', bucket_name) photo.save() update = Card.create_without_data(key) update.set('photo', photo) update.save() return 'ok' else: return 'failed' return 'already' except LeanCloudError as e: if e.code == 101: # 服务端对应的 Class 还没创建 card = '' return HttpResponse(e, content_type="text/plain") else: raise e return HttpResponse(e, content_type="text/plain")
def __init__(self, access_key, secret_key, bucket_name, bucket_domain): self.bucket_name = bucket_name self.bucket_domain = bucket_domain # 构建鉴权对象 q = Auth(access_key, secret_key) self.q = q self.cdn_manager = CdnManager(q) #初始化BucketManager self.bucket = BucketManager(q) self.file2refresh = []
def refresh_data(q, url): cdn_manager = CdnManager(q) # 需要刷新的文件链接 urls = [ url, # 'https://tmn07.com/rank_v1911/events_list.json', ] # 刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) return refresh_url_result
def clear_qiniu(url): # 账户ak,sk access_key = 'MeUQwQAvlOJ7C-wVasdfcCsJIvq0PppRT3VrYKog34xv' secret_key = 'wAwXfUY12YD3vkHa_Qqi9S3F31RF6F8reIP7cWj-jj' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的文件链接 urls = [url] # 刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) return refresh_url_result[0]['requestId']
def publish(self, target_url, credentials=None, **extra): # read auth keys from config file which they name shoudl be "qiniu.ini" config = self.get_config('qiniu.ini') access_key = config.get('auth.Access_Key') secret_key = config.get('auth.Secret_Key') bucket_name, key_prefix = self.split_bucket_uri(target_url) # prepare local files and upload status variables local = self.list_local() qn = Auth(access_key, secret_key) start_time = datetime.now() yield "### start to upload to bucket %s %s " % (bucket_name, key_prefix) counter = 0 num_files = len(local) ### upload files for filename in local: counter = counter+1 abs_path = os.path.join(self.output_path, filename) yield '### uploading %s ###' % filename key_name = key_prefix+filename token = qn.upload_token(bucket_name,key_name,120) ret,info=put_file(token,key_name,abs_path) if info.status_code ==200: yield '-- Successfully uploaded %s of %s files --' % (str(counter),num_files) else: yield 'something wrong %s' % info.status_code yield 'Full error message: %s' % info last_time = datetime.now()-start_time yield "$$$ Upload Done in %s seconds!" % last_time.seconds ##start to refresh cdn dir if config.get('cdn.refresh_enable') == "yes": yield "start to refresh" cdn_namager = CdnManager(qn) refresh_url = config.get('cdn.refresh_url') if refresh_url[-1]!='/': refresh_url = refresh_url+"/" dir = [refresh_url] refresh_dir_result = cdn_namager.refresh_dirs(dir) print(refresh_dir_result) if refresh_dir_result[0]['code'] ==200: yield "refresh complete!" else: yield "Error refreshing: %s" % refresh_dir_result[0] yield "ALL TASK ARE COMPLETED!"
def upload(local_file, bucket_name, ak, sk, bucket_host=None, prefix=None, key=None): """ 上传指定文件到指定bucket Args: ---- local_file: 待上传文件路径 bucket_name: 指定上传的bucket ak: bucket 所在账号ak sk: bucket 所在账号sk bucket_host: bucket 对应的外链域名, 若不指定, 则动态获取 prefix: 上传前缀, 若不指定, 则无前缀 key: 上传后保存的文件名, 若不指定, 则与本地文件同名 Return: ------ url: 上传文件的访问外链 """ auth = Auth(ak, sk) if bucket_host is None: # 获取bucket外链 bucket_host = get_bucket_host(bucket_name, auth) if key is None: key = os.path.split(local_file)[-1] if prefix is not None: key = os.path.join(prefix, key) token = auth.upload_token(bucket_name, key, 3600) ret, info = put_file(token, key, local_file) assert ret['key'] == key assert ret['hash'] == etag(local_file) url = bucket_host + '/' + key # 强制刷新CDN cdn_manager = CdnManager(auth) cdn_manager.refresh_urls([url]) # refresh_cdn(url, auth) #rd = int(round(time.time()*1000)) #url = url + '?v=%d'%rd return url
def qiniu_flux_data(urls): q = Auth(qiniu_ak, qiniu_sk) cdn_manager = CdnManager(q) startDate = time.strftime('%Y-%m-01', time.localtime()) endDate = time.strftime('%Y-%m-%d', time.localtime()) granularity = 'day' ret, info = cdn_manager.get_flux_data(urls, startDate, endDate, granularity) if info.status_code == 200: data = ret['data'] flux_byte_num = 0 for url in data.values(): for area in url.values(): flux_byte_num += sum(area) flux_g_num = flux_byte_num / 1024 / 1024 / 1024 return flux_g_num else: logger.error(f"流量查询失败:{info}")
def sync(self, request, queryset): access_key = settings.QINIU_ACCESS_KEY secret_key = settings.QINIU_SECRET_KEY bucket = settings.QINIU_BUCKET host = settings.QINIU_HOST auth = Auth(access_key, secret_key) filename = __file__.replace(os.path.basename(__file__), '') + '/templates/index.html' uploads = [ 'mirrors/index.html', 'mirrors.json', ] token = auth.upload_token(bucket, uploads[0], 3600) put_file(token, uploads[0], filename) mirrors = Mirror.objects.all() serializer = MirrorSerializer(instance=mirrors, many=True) token = auth.upload_token(bucket, uploads[1], 3600) put_data( token, uploads[1], json.dumps({ 'title': settings.APP_NAME, 'url': settings.MEDIA_URL, 'data': serializer.data, })) cdnManager = CdnManager(auth) for i in range(len(uploads)): uploads[i] = host + '/' + uploads[i] uploads.append(host + '/mirrors/') cdnManager.refresh_urls(uploads) return JsonResponse({'status': 'success', 'msg': '处理成功!'})
def del_fig(ak, sk, png_files, fig_urls, delete=True): BASEURL = "http://p7d7ismcm.bkt.clouddn.com" QINIU = Auth(ak, sk) BUCKET = 'report' bucket = BucketManager(QINIU) if delete: for k in png_files: key = os.path.basename(k) ret, info = bucket.delete(BUCKET, key) if ret == {}: rep = 'Figure has been deleted.' else: rep = 'Error when deleting figure.' print(k, rep) # refresh cdn cache cdn_manager = CdnManager(QINIU) refresh_url_result = cdn_manager.refresh_urls(fig_urls) if refresh_url_result[0]['error'] == u'success': print('CDN has been refreshed') return
class QiniuClient(object): def __init__(self, access_key, secret_key, bucket_name, bucket_domain): self.bucket_name = bucket_name self.bucket_domain = bucket_domain # 构建鉴权对象 q = Auth(access_key, secret_key) self.q = q self.cdn_manager = CdnManager(q) #初始化BucketManager self.bucket = BucketManager(q) self.file2refresh = [] def is_diff(self, key, localfile): ''' 比较本地文件与文件文件是否一致 ''' ret, info = self.bucket.stat(self.bucket_name, key) #print(ret,info) hash = ret if hash: diff = ret['hash'] != etag(localfile) # diff==False,说明文件相同,这个有点绕 return diff else: diff = True # 不存在hash,说明还没上传过 return True # 为True,说明文件对比结果为不一致 def refresh(self, urls): #https://github.com/qiniu/python-sdk/blob/master/examples/cdn_manager.py 列出所有上传文件,在七牛中刷新 refresh_url_result = self.cdn_manager.refresh_urls(urls) #print_result(refresh_url_result) # 上传整个目录到七牛,保存目录结构(在url中实现,本质上是通过命名) def upload(self, key, localfile): ''' key 文件在七牛上的名字,和它的url有关 ,参考:http://qiniu-developer.u.qiniudn.com/docs/v6/api/overview/faq.html localfile 本地文件路径 ''' # 比对本地文件和云端文件,如果不同则上传 if self.is_diff(key, localfile): token = self.q.upload_token(self.bucket_name, key, 3600) ret, info = put_file(token, key, localfile) #print(ret,info) # 打印出更新文件以便于在七牛中刷新 : https://portal.qiniu.com/refresh url = 'http://{}/{}'.format(self.bucket_domain, key) print("refresh url:", url) self.file2refresh.append(url) #全局
# -*- coding: utf-8 -*- # flake8: noqa import qiniu from qiniu import CdnManager """ 日志分析 https://developer.qiniu.com/fusion/api/4081/cdn-log-analysis """ """1、区域运营商流量查询:https://developer.qiniu.com/fusion/api/4081/cdn-log-analysis#4""" # 账户ak,sk access_key = "<access_key>" secret_key = "<secret_key>" auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 域名列表,总数不超过100条 domains = ['pcppc4i3f.bkt.clouddn.com'] # 粒度,可选项为 5min、1hour、1day freq = '1day' # 区域,选项见 Region 参数列表:https://developer.qiniu.com/fusion/api/4081/cdn-log-analysis#region regions = [""] # ISP运营商,比如all(所有 ISP),telecom(电信),unicom(联通),mobile(中国移动),drpeng(鹏博士),tietong(铁通),cernet(教育网) isp = "" # 开始时间,格式为:2006-01-02。起止最大间隔为31天 startDate = ""
def post(self, request): if not request.user.is_authenticated: return JsonResponse({'code': 7, 'msg': '请先登录'}) avatar_file = request.FILES.get('avatar') if not avatar_file: return JsonResponse({'code': 2, 'msg': '请选择需要上传的图片'}) allow_types = ['image/jpeg', 'image/png', 'image/gif'] if avatar_file.content_type not in allow_types: return JsonResponse({'code': 3, 'msg': '上传失败,文件类型错误'}) file_name = '{}_avatar_{}_{}'.format(request.user.username, time.time(), avatar_file.name) fs = FileSystemStorage() fs.save(file_name, avatar_file) file_path = os.path.join(MEDIA_ROOT, file_name) # compress the image i = Image.open(file_path) i.thumbnail((300, 300)) i.save(file_path) access_key = 'M2TrolxfManTFNP4Clr3M12JW0tvAaCV0xIbrZk5' secret_key = 'Llh0byt0KDHwiFlcNVvPiTpQSrH8IrZSt5puu1zS' q = qiniu_auth(access_key, secret_key) bucket_name = 'avatar' redis = get_redis() try: token = q.upload_token(bucket_name, file_name, 3600) ret, info = put_file(token, file_name, file_path) assert ret['key'] == file_name assert ret['hash'] == etag(file_path) except Exception as e: return JsonResponse({'code': 4, 'msg': '上传文件出错'}) finally: # 删除本地 fs.delete(file_name) # 删除以前头像地址 used_avatar = redis.hget( 'user:{}:detail'.format(request.user.username), 'avatar').decode() if used_avatar != 'http://avatar.cdn.henji.xyz/default.jpg': try: bucket = BucketManager(q) key = os.path.basename(used_avatar) ret, info = bucket.delete(bucket_name, key) assert ret == {} except Exception as e: pass # 更新头像地址 url = 'http://avatar.cdn.henji.xyz/{}'.format(file_name) redis.hset('user:{}:detail'.format(request.user.username), 'avatar', "{}-avatar".format(url)) # 刷新缓存 cdn_manager = CdnManager(q) urls = [url] cdn_manager.refresh_urls(urls) return JsonResponse({'code': 1, 'msg': url})
# -*- coding: utf-8 -*- import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的文件链接 urls = [ 'http://aaa.example.com/a.gif', 'http://bbb.example.com/b.jpg' ] # 刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) print(refresh_url_result)
def __init__(self): self.manager = CdnManager( Auth(access_key=settings.QINIU_ACCESSKEY, secret_key=settings.QINIU_ACCESSSECRET))
# 演示函数调用结果 def print_result(result): if result[0] is not None: print(type(result[0])) print(result[0]) print(type(result[1])) print(result[1]) # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) urls = [ 'http://if-pbl.qiniudn.com/qiniu.jpg', 'http://if-pbl.qiniudn.com/qiniu2.jpg' ] # 注意链接最后的斜杠表示目录 dirs = [ 'http://if-pbl.qiniudn.com/test1/', 'http://if-pbl.qiniudn.com/test2/' ] """刷新文件,目录""" # 刷新链接 print('刷新文件')
if res.status_code != 200: raise Exception("upload failed") return ret, res if __name__ == "__main__": # 不要开代理 access_key = sys.argv[1] secret_key = sys.argv[2] sync = Sync( access_key=access_key, # access_key secret_key=secret_key, # secret_key bucket_name="bi-she", # bucket_name sync_dir="./", # 静态文件目录(后面必须有斜杠/) exclude=[".DS_Store"], cover=True, remove_redundant=True, ) # 刷新缓存 cdn_manager = CdnManager(sync.q) # 需要刷新的文件链接 urls = ['http://aaa.example.com/a.gif', 'http://bbb.example.com/b.jpg'] # URL刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) # 目录刷新链接 refresh_dir_result = cdn_manager.refresh_dirs(['xxx'])
# 演示函数调用结果 def print_result(result): if result[0] is not None: print(type(result[0])) print(result[0]) print(type(result[1])) print(result[1]) # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) urls = [ 'http://if-pbl.qiniudn.com/qiniu.jpg', 'http://if-pbl.qiniudn.com/qiniu2.jpg' ] # 注意链接最后的斜杠表示目录 dirs = ['http://if-pbl.qiniudn.com/test1/', 'http://if-pbl.qiniudn.com/test2/'] """刷新文件,目录""" # 刷新链接 print('刷新文件') refresh_url_result = cdn_manager.refresh_urls(urls) print_result(refresh_url_result)
from qiniu import Auth, CdnManager access_key = 'v_s4L4kwQ-er524cv0ByjdiU7KtwzcaTgb7-y_nU' secret_key = 'e-m9s9sDqfVhFFblA8Xq9eQaMdBPoPJ9AMtrfFLm' q = Auth(access_key, secret_key) cdn = CdnManager(q) urls = ['http://phd6u0gel.bkt.clouddn.com/Geekbench-4.3.0-Linux.tar.gz', 'http://phd6u0gel.bkt.clouddn.com/super_pi.tgz'] refresh_urls_response = cdn.refresh_urls(urls=urls) print(refresh_urls_response)
def __init__(self, **kwargs): CDN.__init__(self, **kwargs) self.auth = Auth(self.ak, self.sk) self.bucket_mgr = BucketManager(self.auth) self.cdn_mgr = CdnManager(self.auth) self.old_etags = dict()
#需要填写你的 Access Key 和 Secret Key #export QINIU_ACCESS_KEY=xxx access_key = os.environ["QINIU_ACCESS_KEY"] #export QINIU_SECRET_KEY=xxx secret_key = os.environ["QINIU_SECRET_KEY"] #要上传的空间 #export QINIU_BUCKET=xxx bucket_name = os.environ["QINIU_BUCKET"] bucket_domain = os.environ["QINIU_BUCKET_DOMAIN"] # 构建鉴权对象 q = Auth(access_key, secret_key) cdn_manager = CdnManager(q) #初始化BucketManager bucket = BucketManager(q) file2refresh = [] def is_diff(key, localfile): ''' 比较本地文件与文件文件是否一致 ''' ret, info = bucket.stat(bucket_name, key) #print(ret,info) hash = ret if hash: diff = ret['hash'] != etag(localfile) # diff==False,说明文件相同,这个有点绕
#!/usr/bin/python # -*- coding: utf-8 -*- """ 查询指定域名指定时间段内的带宽 """ import qiniu from qiniu import CdnManager from configobj import ConfigObj config = ConfigObj("../../config.ini", encoding='UTF8') # 读配置文件 access_key = config['account']['access_key'] secret_key = config['account']['secret_key'] auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) startDate = '2018-10-28' endDate = '2018-10-29' granularity = '5min' urls = ['x.xxx.com'] ret, info = cdn_manager print(ret) print(info)
# -*- coding: utf-8 -*- import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的目录链接 dirs = [ 'http://aaa.example.com/doc/img/', 'http://bbb.example.com/doc/video/' ] # 刷新链接 refresh_dir_result = cdn_manager.refresh_dirs(dirs)
# -*- coding: utf-8 -*- import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的文件链接 urls = ['http://aaa.example.com/a.gif', 'http://bbb.example.com/b.jpg'] # 刷新链接 refresh_url_result = cdn_manager.refresh_urls(urls) print(refresh_url_result)
# -*- coding: utf-8 -*- """ 获取指定域名指定时间内的日志链接 """ import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '' secret_key = '' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) log_date = '2017-07-20' urls = [ 'a.example.com', 'b.example.com' ] ret, info = cdn_manager.get_log_list_data(urls, log_date) print(ret) print(info)
class QN(CDN): def __init__(self, **kwargs): CDN.__init__(self, **kwargs) self.auth = Auth(self.ak, self.sk) self.bucket_mgr = BucketManager(self.auth) self.cdn_mgr = CdnManager(self.auth) self.old_etags = dict() def _pull(self, bucket): try: eof = False marker = None while not eof: if marker is not None: log.debug("more files on CDN") ret, eof, info = self.bucket_mgr.list(bucket, prefix=None, marker=marker, limit=None, delimiter=None) marker = ret.get("marker") self._process_data(ret) except Exception as e: log.error("QN:_pull error: {error}".format(error=str(e))) raise Exception( "Distribute_QN_pull_error: {error}".format(error=str(e))) log.debug("{d} items on QINIU CDN".format(d=len(self.old_etags))) def _process_data(self, ret): try: if ret.get("items") is None: return items = ret.get("items") for dic in items: rname = dic["key"] if rname == "": log.warning("illegal name: null") continue self.online_files.add(self.to_unicode(rname)) self.old_etags[rname] = dic["hash"] except Exception as e: log.error("QN:_process_data error: {error}".format(error=str(e))) raise Exception("Distribute_QN_process_data_error: {error}".format( error=str(e))) def _set_file_info(self, parent, rpath, bucket, rname): return QN_FileInfo(parent, rpath, bucket, rname) def _filter_push_file(self, fi): # log.debug("filter_push_file {fi}".format(fi=fi)) if fi.rname in self.old_etags: if self.kwargs.get("check_prefix_filter"): fb, fn = os.path.split(fi.rname) if fn.startswith(self.kwargs.get("check_prefix_filter")): return not fi.etag == self.old_etags[fi.rname] else: if self.kwargs.get("no_check_overwrite"): return True if self.kwargs.get("no_check_no_overwrite"): return False return False else: if self.kwargs.get("no_check_overwrite"): return True if self.kwargs.get("no_check_no_overwrite"): return False return not fi.etag == self.old_etags[fi.rname] return True def _push(self, fi): try: token = self.auth.upload_token(fi.bucket, fi.rname, 7200) ret, info = put_file(token, fi.rname, fi.rpath, check_crc=True) log.debug("{ret} check".format(ret=ret, info=info)) if ret is None: log.error("QN:_push: {file} error".format(file=fi.rpath)) raise Exception("QN:_push: {file} error".format(file=fi.rpath)) if ret["key"] != fi.rname or ret["hash"] != fi.etag: log.error("QN:_push check: {file} error".format(file=fi.rpath)) raise Exception( "QN:_push check: {file} error".format(file=fi.rpath)) log.info("{fi} distributed".format(fi=fi)) self.upload_files.add(fi.rname) except Exception as e: log.error("QN:_push error: {error}".format(error=str(e))) raise Exception( "Distribute_QN_push_error :{error}".format(error=str(e))) def _refresh(self, fi): try: urls = [os.path.join(self.cdn_uri, fi.rpath)] ret, info = self.cdn_mgr.refresh_urls(urls) if ret["code"] == 200: log.info("QN:_refresh success:{0}--{1}".format( ret["urlSurplusDay"], ret["requestId"])) else: log.warn("QN:_refresh failed:{0}--{1}".format( ret["code"], ret["error"])) except Exception as e: log.error("QN:_refresh error: {error}".format(error=str(e))) raise Exception( "Distribute_QN_refresh_error :{error}".format(error=str(e))) def _prefetch(self, fi): try: urls = [os.path.join(self.cdn_uri, fi.rpath)] ret, info = self.cdn_mgr.prefetch_urls(urls) if ret["code"] == 200: log.info("QN:_prefetch success:{0}--{1}".format( ret["urlSurplusDay"], ret["requestId"])) else: log.warn("QN:_prefetch failed:{0}--{1}".format( ret["code"], ret["error"])) except Exception as e: log.error("QN:_prefetch error: {error}".format(error=str(e))) raise Exception( "Distribute_QN_prefetch_error :{error}".format(error=str(e))) def _filter_delete_files(self, rname): fb, fn = os.path.split(rname) if self.kwargs.get("delete_filter_dir") and fb.startswith( self.kwargs.get("delete_filter_dir")): return True if self.kwargs.get("delete_filter") and fn.startswith( self.kwargs.get("delete_filter")): return True if fn.startswith(".") or " " in fn or fb.startswith("/"): return True return False def _delete_online_files(self, fi): self.bucket_mgr.delete(self.bucket, fi.rname) log.info("{bucket} {rname} removed".format(bucket=self.bucket, rname=fi.rname)) pass
# -*- coding: utf-8 -*- """ 预取资源到cdn节点 https://developer.qiniu.com/fusion/api/1227/file-prefetching """ import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的文件链接 urls = ['http://aaa.example.com/doc/img/', 'http://bbb.example.com/doc/video/'] # 刷新链接 refresh_dir_result = cdn_manager.prefetch_urls(urls)
# -*- coding: utf-8 -*- """ 查询指定域名指定时间段内的流量 """ import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '' secret_key = '' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) startDate = '2017-07-20' endDate = '2017-08-20' granularity = 'day' urls = [ 'a.example.com', 'b.example.com' ] # 获得指定域名流量 ret, info = cdn_manager.get_flux_data(urls, startDate, endDate, granularity) print(ret) print(info)
# -*- coding: utf-8 -*- """ 获取指定域名指定时间内的日志链接 """ import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '' secret_key = '' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) log_date = '2017-07-20' urls = ['a.example.com', 'b.example.com'] ret, info = cdn_manager.get_log_list_data(urls, log_date) print(ret) print(info)
def refresh(): cdn_manager = CdnManager(Config.q) refresh_url_result = cdn_manager.refresh_urls(Config.urls) print(refresh_url_result) return refresh_url_result
# -*- coding: utf-8 -*- # flake8: noqa import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的目录链接 dirs = ['http://aaa.example.com/doc/img/', 'http://bbb.example.com/doc/video/'] # 刷新链接 refresh_dir_result = cdn_manager.refresh_dirs(dirs)
# -*- coding: utf-8 -*- """ 预取资源到cdn节点 https://developer.qiniu.com/fusion/api/1227/file-prefetching """ import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '...' secret_key = '...' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 需要刷新的文件链接 urls = [ 'http://aaa.example.com/doc/img/', 'http://bbb.example.com/doc/video/' ] # 刷新链接 refresh_dir_result = cdn_manager.prefetch_urls(urls)
# -*- coding: utf-8 -*- # flake8: noqa """ 查询指定域名指定时间段内的流量 """ import qiniu from qiniu import CdnManager # 账户ak,sk access_key = '' secret_key = '' auth = qiniu.Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) startDate = '2017-07-20' endDate = '2017-08-20' granularity = 'day' urls = ['a.example.com', 'b.example.com'] # 获得指定域名流量 ret, info = cdn_manager.get_flux_data(urls, startDate, endDate, granularity) print(ret) print(info)
# -*- coding: utf-8 -*- from qiniu import Auth, QiniuMacAuth, CdnManager from qiniu.services.cdn.manager import create_timestamp_anti_leech_url """批量查询动态加速之动态请求数:https://developer.qiniu.com/fusion/api/1230/traffic-bandwidth#5""" # 账户ak,sk access_key = "<access_key>" secret_key = "<secret_key>" auth = Auth(access_key=access_key, secret_key=secret_key) cdn_manager = CdnManager(auth) # 起始日期,例如:2016-07-01 startDate = '2017-07-20' # 结束日期,例如:2016-07-03 endDate = '2017-08-20' # 粒度,取值:5min / hour /day granularity = 'day' # 域名列表 domains = ['a.example.com', 'b.example.com'] ret, info = cdn_manager.get_bandwidth_data(domains, startDate, endDate, granularity) print(ret) print(info)