def amerge(ufopCmd, srcBucket, srcKey, dstFormat, dstMime, urlBucket, url, duration="first", usePipeline="", notifyUrl=None, saveBucket=None, saveKey=None): #prepare fop fop = "{0}/format/{1}/mime/{2}/bucket/{3}/url/{4}".format( ufopCmd, dstFormat, qiniu.urlsafe_base64_encode(dstMime), qiniu.urlsafe_base64_encode(urlBucket), qiniu.urlsafe_base64_encode(url)) if saveBucket != None and saveKey != None: fop = qiniu.op_save(fop, saveBucket, saveKey) #do pfop auth = qiniu.Auth(accessKey, secretKey) pfop = qiniu.PersistentFop(auth, srcBucket, usePipeline, notifyUrl) retData, respInfo = pfop.execute(srcKey, [fop], force=None) if retData != None: print("PersistentId:" + retData["persistentId"]) else: print("Error:") print("--StatusCode:" + str(respInfo.status_code)) print("--Reqid:" + respInfo.req_id) print("--Message:" + respInfo.error)
def transcoder(q, bucket_name, pipeline, key, trns_model): #sufix = os.path.splitext(key_name)[1][1:] #assert(sufix == 'rmvb' or ...) key_path = os.path.split(key)[0] key_name = os.path.split(key)[1] key_name = os.path.splitext(key_name)[0] fops = '' if trns_model == 0: fops = 'avthumb/m3u8/segtime/10/ab/32k/ar/44100/acodec/libfaac/r/15/vb/200k/vcodec/libx264/s/424x240/autoscale/1/stripmeta/0/noDomain/1' saveas_key = urlsafe_base64_encode(saved_bucket_name + ':' + key_path + '/' + key_name + '_240p.m3u8') fops = fops + '|saveas/' + saveas_key elif trns_model == 1: fops = 'avthumb/m3u8/segtime/10/ab/64k/ar/44100/acodec/libfaac/r/18/vb/500k/vcodec/libx264/s/848x480/autoscale/1/stripmeta/0/noDomain/1' saveas_key = urlsafe_base64_encode(saved_bucket_name + ':' + key_path + '/' + key_name + '_480p.m3u8') fops = fops + '|saveas/' + saveas_key elif trns_model == 2: fops = 'avthumb/m3u8/segtime/10/ab/96k/ar/44100/acodec/libfaac/r/24/vb/1000k/vcodec/libx264/s/1280x720/autoscale/1/stripmeta/0/noDomain/1' saveas_key = urlsafe_base64_encode(saved_bucket_name + ':' + key_path + '/' + key_name + '_720p.m3u8') fops = fops + '|saveas/' + saveas_key else: print('ERROR: unexpected transcoding model!') #print(key_path + key_name + ' Transcoding to: ' + saved_bucket_name + ':' + key_path + '/' + key_name + '_xxx.m3u8') pfop = PersistentFop(q, bucket_name, pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(key, ops, 1) print(info) assert ret['persistentId'] is not None
def waterMark(): ''' 已上传的图片打水印 :return: ''' #1、指定 空间绑定的域名、 水印图片地址、 空间名称、 空间水印原图Key、 私有处理队列 domain = "test.zhaojianfeng.cn" wmImg = "https://olhvkds73.qnssl.com/logo.png" bucket = "test-bucket" key = "a0.jpg" pipeline = "image-pipeline" encodeWmImg = urlsafe_base64_encode(wmImg) #2、创建Auth对象、 指定fops 操作、 saveas接口及签名 auth = Auth(access_key=accessKey, secret_key=secretKey) fops = "watermark/1/image/" + encodeWmImg + "/gravity/SouthWest" saveKey = urlsafe_base64_encode(bucket + ":" + "p111_a0.jpg") fops = fops + "|saveas/" + saveKey signTarget = domain + "/" + key + "?" + fops sign = urlsafe_base64_encode(signTarget) fops = fops + "/sign/" + sign #3、执行持久化操作 pfop = PersistentFop(auth=auth, bucket=bucket, pipeline=pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(key, ops) #4、查看执行结果 print(info) assert ret['persistentId'] is not None
def trigger_img_persistent_fop(img_src, access_key=settings.QINIU_ACCESS_KEY, secret_key=settings.QINIU_SECRET_KEY, bucket=settings.QINIU_BUCKET_NAME): ''' 触发图片静态持久性操作 ''' try: if settings.QINIU_BUCKET_DOMAIN in img_src: key = '/'.join(img_src.split("//")[1].split("/") [1:]) #去掉 http://7xsx9g.com1.z0.glb.clouddn.com/ q = Auth(access_key, secret_key) large_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-large") medium_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-medium") large_ops = "imageView2/2/w/970/h/970/q/100|saveas/" + large_saveas_key #设置七牛的静态持久化,参数需要跟七牛账号那里设置的一致 medium_ops = "imageView2/2/w/580/h/350/q/50|saveas/" + medium_saveas_key #具体看这个文章 http://blog.csdn.net/netdxy/article/details/50223733 ops = [large_ops, medium_ops] pfop = PersistentFop(q, bucket) ret, info = pfop.execute(key, ops, 1) if info.exception is None: print key + " persistent fop to success" else: print key + " persistent fop to failed" except Exception, e: print str(e) print key + " persistent fop to failed"
def qiniu_download(data): #构建鉴权对象 q = qiniu_q() #上传的空间 bucket_domain = settings.BUCKET_DOMAIN # generate key if data["url"] == "diy_url": logo_url = urlsafe_base64_encode(data["diy_url"]) else: logo_url = urlsafe_base64_encode(data["url"]) key = "?imageView2/1/w/%d/h/%d/q/%d/format/%s|watermark/1/image/%s/dissolve/%d/gravity/%s/dx/30/dy/30" % ( int(data["width"]), int(data["height"]), int(data["quality"]), data['format'], logo_url, int(data["dissolve"]), data["place"], ) key = data["key"] + key #有两种方式构造base_url的形式 base_url = 'http://%s/%s' % (bucket_domain, key) #可以设置token过期时间 private_url = q.private_download_url(base_url, expires=3600) h = httplib2.Http() res, content = h.request(private_url, "GET") if res['status'] == '200': return {"result": True, "private_url": private_url} else: return {"result": False, "private_url": ""}
def trigger_img_persistent_fop( img_src, access_key=settings.QINIU_ACCESS_KEY, secret_key=settings.QINIU_SECRET_KEY, bucket=settings.QINIU_BUCKET_NAME, ): """ 触发图片静态持久性操作 """ try: if settings.QINIU_BUCKET_DOMAIN in img_src: key = "/".join(img_src.split("//")[1].split("/")[1:]) # 去掉 http://7xsx9g.com1.z0.glb.clouddn.com/ q = Auth(access_key, secret_key) large_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-large") medium_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-medium") large_ops = "imageView2/2/w/970/h/970/q/100|saveas/" + large_saveas_key # 设置七牛的静态持久化,参数需要跟七牛账号那里设置的一致 medium_ops = ( "imageView2/2/w/580/h/350/q/50|saveas/" + medium_saveas_key ) # 具体看这个文章 http://blog.csdn.net/netdxy/article/details/50223733 ops = [large_ops, medium_ops] pfop = PersistentFop(q, bucket) ret, info = pfop.execute(key, ops, 1) if info.exception is None: print key + " persistent fop to success" else: print key + " persistent fop to failed" except Exception, e: print str(e) print key + " persistent fop to failed"
def upload_video(bucket, local): bucket_mp4_video = 'video-mp4-test' bucket_video_cover_image = 'vframe-test' pipeline = 'video-transcode' q = Auth(access_key, secret_key) # 构造视频转码参数 fops_transcode = 'avthumb/mp4/vb/1.25m' save_key = '$(endUser)/$(year)/$(mon)/$(day)/$(etag).mp4' saveas_key = urlsafe_base64_encode('{}:{}'.format(bucket_mp4_video, save_key)) fops_transcode = '{}|saveas/{}'.format(fops_transcode, saveas_key) # 构造截图参数 fops_vframe = 'vframe/jpg/offset/1' save_key = '$(endUser)/$(year)/$(mon)/$(day)/$(etag).jpg' saveas_key = urlsafe_base64_encode('{}:{}'.format(bucket_video_cover_image, save_key)) fops_vframe = '{}|saveas/{}'.format(fops_vframe, saveas_key) persistentOps = ';'.join([fops_transcode, fops_vframe]) print('persistentOps: {}'.format(persistentOps)) policy = { 'scope': '{}:$(endUser)'.format(bucket), 'saveKey': '$(endUser)/$(year)/$(mon)/$(day)/$(etag)$(ext)', 'endUser': '******', 'persistentOps': persistentOps, 'persistentPipeline': pipeline } token = q.upload_token(bucket, policy=policy) ret, info = put_file(token, None, local) print(ret) print(info) assert ret['key'] == etag(local)
def qiniu_download(data): #构建鉴权对象 q = qiniu_q() #上传的空间 bucket_domain = settings.BUCKET_DOMAIN # generate key if data["url"] == "diy_url": logo_url = urlsafe_base64_encode(data["diy_url"]) else: logo_url = urlsafe_base64_encode(data["url"]) key ="?imageView2/1/w/%d/h/%d/q/%d/format/%s|watermark/1/image/%s/dissolve/%d/gravity/%s/dx/30/dy/30" % ( int(data["width"]), int(data["height"]), int(data["quality"]), data['format'], logo_url, int(data["dissolve"]), data["place"], ) key = data["key"]+key #有两种方式构造base_url的形式 base_url = 'http://%s/%s' % (bucket_domain, key) #可以设置token过期时间 private_url = q.private_download_url(base_url, expires=3600) h = httplib2.Http() res,content = h.request(private_url, "GET") if res['status'] == '200': return {"result" : True, "private_url" : private_url } else: return {"result" : False, "private_url" : "" }
def move_res(self, key, new_key): entry = '%s:%s' % (self.bucket, key) encoded_entry = urlsafe_base64_encode(entry) new_entry = '%s:%s' % (self.bucket, new_key) encoded_new_entry = urlsafe_base64_encode(new_entry) target = '/move/%s/%s' % (encoded_entry, encoded_new_entry) access_token = self.auth.token_of_request( target, content_type='application/json') return self.deal_manage_res(target, access_token)
def watermark_with_image_and_text(srcBucket, srcKey, destFormat, wmText, wmImage, wmGravity="NorthWest", wmGravityText="NorthEast", wmFont=None, wmFontColor=None, wmFontSize=None, saveBucket=None, saveKey=None, usePipeline=None, notifyUrl=None): cmd = "avthumb" params = { "wmText": qiniu.urlsafe_base64_encode(wmText), "wmGravityText": wmGravityText, "wmImage": qiniu.urlsafe_base64_encode(wmImage), "wmGravity": wmGravity, } if wmFont != None: params.update({"wmFont": qiniu.urlsafe_base64_encode(wmFont)}) if wmFontColor != None: params.update( {"wmFontColor": qiniu.urlsafe_base64_encode(wmFontColor)}) if wmFontSize != None: params.update({"wmFontSize": wmFontSize}) fop = qiniu.build_op(cmd, destFormat, **params) #saveas if saveBucket != None and saveKey != None: fop = qiniu.op_save(fop, saveBucket, saveKey) #pfop auth = qiniu.Auth(accessKey, secretKey) pfop = qiniu.PersistentFop(auth, srcBucket, pipeline=usePipeline, notify_url=notifyUrl) retData, respInfo = pfop.execute(srcKey, [fop], force=None) if retData != None: print("PersistentId:" + retData["persistentId"]) else: print("Error:") print("--StatusCode:" + str(respInfo.status_code)) print("--Reqid:" + respInfo.req_id) print("--Message:" + respInfo.error)
def batch_data(filename, freezeAfterDays): data = "" for i in filename: entry = f"{bucket_name}:{i}" encodedEntryURI = urlsafe_base64_encode(entry) data = data + f"op=/restoreAr/{encodedEntryURI}/freezeAfterDays/{freezeAfterDays}&" return data[0:-1]
def save_mp3(): access_key = '0ZT-Rd0AswhPQti5lX2Ytt1T6XkyM80eY_4w9Pm9' secret_key = 'MbscrgLx_FefkUZ21SjY-GRE1oPJcvP2vvN6oXgW' # 初始化Auth状态 q = Auth(access_key, secret_key) # 你要测试的空间, 并且这个key在你空间中存在 bucket_name = 'live-bucket' key = 'mayun12_13.mp4' # 指定转码使用的队列名称 pipeline = 'image-pipeline' # 设置转码参数(以视频转码为例) fops = 'avthumb/mp3/ar/48000/vn/1|saveas/' # 通过添加'|saveas'参数,指定处理后的文件保存的bucket和key,不指定默认保存在当前空间,bucket_saved为目标bucket,bucket_saved为目标key saveas_key = urlsafe_base64_encode(bucket_name + ':mayun_mp3_test.mp3') fops = fops + saveas_key # 在上传策略中指定fobs和pipeline policy = {'persistentOps': fops, 'persistentPipeline': pipeline} token = q.upload_token(bucket_name, key, 3600, policy) localfile = '/Users/ryanxu/Downloads/mayun.mp4' ret, info = put_file(token, key, localfile) print(info) assert ret['key'] == key assert ret['hash'] == etag(localfile)
def get_qiniu_uptoken(key, access_key=settings.QINIU_ACCESS_KEY, secret_key=settings.QINIU_SECRET_KEY, bucket=settings.QINIU_BUCKET_NAME): ''' 获取七牛的认证凭证 ''' large_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-large") medium_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-medium") large_ops = "imageView2/2/w/970/h/970/q/100|saveas/" + large_saveas_key #设置七牛的静态持久化,参数需要跟七牛账号那里设置的一致 medium_ops = "imageView2/2/w/580/h/350/q/50|saveas/" + medium_saveas_key #具体看这个文章 http://blog.csdn.net/netdxy/article/details/50223733 policy = {"persistentOps": large_ops + ";" + medium_ops} auth = Auth(access_key, secret_key) up_token = auth.upload_token(bucket, policy=policy) return up_token
def disableStreams(access_key, secret_key, hub, streamTitle, disabledTill): """ 禁播流 https://developer.qiniu.com/pili/api/2775/off-the-air-flow :param access_key: 公钥 :param secret_key: 私钥 :param hub: 直播空间 :param streamTitle: 流名 :param disabledTill: 整数,Unix 时间戳,表示流禁播的结束时间,单位 s(秒),-1 表示永久禁播。0 表示解除禁播。 :return: 200 {} 612 { "error": "stream not found" } """ auth = QiniuMacAuth(access_key, secret_key) # 流名base64安全编码 EncodedStreamTitle = urlsafe_base64_encode(streamTitle) # 请求URL url = f'http://pili.qiniuapi.com/v2/hubs/{hub}/streams/{EncodedStreamTitle}/disabled' # 请求体 body = {"disabledTill": disabledTill} # 发起POST请求 ret, res = http._post_with_qiniu_mac(url, body, auth) headers = {"code": res.status_code, "reqid": res.req_id, "xlog": res.x_log} # 格式化响应体 Headers = json.dumps(headers, indent=4, ensure_ascii=False) result = json.dumps(ret, indent=4, ensure_ascii=False) return Headers, result
def streamsInfo(access_key, secret_key, hub, streamTitle): """ 查询流信息 https://developer.qiniu.com/pili/api/2773/query-stream :param access_key: 公钥 :param secret_key: 私钥 :param hub: 直播空间 :param streamTitle: 流名 :return: 200 { "createdAt": <CreatedAt>, // Unix Time "updatedAt": <UpdatedAt>, // Unix Time,更新流配置时会自动更新这个时间 "expireAt": <ExpireAt>, // Unix Time,过期时间 "converts": ["<Profile1>", "<Profile2>"], // 流的转码规格 "disabledTill": <DisabledTill> // 禁用的结束时间,-1 表示永久禁用 } """ auth = QiniuMacAuth(access_key, secret_key) # 流名base64安全编码 EncodedStreamTitle = urlsafe_base64_encode(streamTitle) # 请求URL url = f'http://pili.qiniuapi.com/v2/hubs/{hub}/streams/{EncodedStreamTitle}' # 发起GET请求 ret, res = http._get_with_qiniu_mac(url=url, params=None, auth=auth) headers = {"code": res.status_code, "reqid": res.req_id, "xlog": res.x_log} # 格式化响应体 Headers = json.dumps(headers, indent=4, ensure_ascii=False) result = json.dumps(ret, indent=4, ensure_ascii=False) return Headers, result
def bangumi_cover_transcoding(key, sizi): q = Auth(access_key, secret_key) #要转码的文件所在的空间和文件名。 bucket = 'cover-cache' #转码是使用的队列名称。 #pipeline = 'mpsdemo' #要进行转码的转码操作。 if sizi == 'min': fops = 'imageView2/2/w/450/h/600/format/jpg/q/75|imageslim' newkey = 'bangumi/cover/' + key else: fops = 'imageView2/2/w/120/h/160/format/jpg/q/75|imageslim' newkey = 'bangumi/cover/min/' + key #可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间 saveas_key = urlsafe_base64_encode('video-cover:' + newkey) fops = fops + '|saveas/' + saveas_key pfop = PersistentFop(q, bucket) #, pipeline ops = [] ops.append(fops) ret, info = pfop.execute(key, ops, 1) print(info)
def saveasStreams(access_key, secret_key, hub, streamTitle, body): """ 保存直播截图 https://developer.qiniu.com/pili/api/2520/save-the-live-capture :param access_key: 公钥 :param secret_key: 私钥 :param hub: 直播空间 :param streamTitle: 流名 :param body: 请求体 :return: 200 { "fname": "<Fname>" } """ auth = QiniuMacAuth(access_key, secret_key) # 流名base64安全编码 EncodedStreamTitle = urlsafe_base64_encode(streamTitle) # 请求URL url = f'http://pili.qiniuapi.com/v2/hubs/{hub}/streams/{EncodedStreamTitle}/snapshot' # 发起POST请求 ret, res = http._post_with_qiniu_mac(url, body, auth) headers = {"code": res.status_code, "reqid": res.req_id, "xlog": res.x_log} # 格式化响应体内容 Headers = json.dumps(headers, indent=4, ensure_ascii=False) result = json.dumps(ret, indent=4, ensure_ascii=False) return Headers, result
def delete_res(self, key): entry = '%s:%s' % (self.bucket, key) encoded_entry = urlsafe_base64_encode(entry) target = '/delete/%s' % encoded_entry access_token = self.auth.token_of_request( target, content_type='application/json') return self.deal_manage_res(target, access_token)
def water_mark(self, key): saveas_key = urlsafe_base64_encode('{}:{}'.format(self.bucket_name, key)) fops = self.fops + '|saveas/' + saveas_key pfop = PersistentFop(self.q, self.bucket_name, self.pipeline) ops = list() ops.append(fops) pfop.execute(key, ops, 1)
def roundPic(self, bn, key): """ 触发持久化 :param bn: 空间名称 :param key: 文件名称 :return: """ #创建授权对象 auth = Auth(self.AK, self.SK) #指定 私有队列 pipeline = "image-pipeline" #指定数据处理操作 fops = "roundPic/radius/!50p" #定义另存的文件名称 new_key = "round_" + key #另存的编码 saveAs = urlsafe_base64_encode(bn + ":" + new_key) #管道拼接 fops = fops + "|saveas/" + saveAs #创建数据处理对象 pfop pfop = PersistentFop(auth=auth, bucket=bn, pipeline=pipeline) ops = [] ops.append(fops) #执行数据处理 execute r, i = pfop.execute(key, ops, 1) #查看处理信息 print(i)
def download_weixin_media(cls, data): media_id = data.get('media_id') result = weixin_api.get_media(media_id) localfile = env.STORAGE_FILE_DIR + '/weixin_media_' + media_id + '.amr' with open(localfile, 'w') as f: f.write(result) # 要上传的空间 bucket_name = 'weixin-media-resource' key = media_id + '.amr' # 设置转码参数 fops = 'avthumb/mp3/ab/128k/ar/44100/acodec/libmp3lame' # 转码时使用的队列名称 pipeline = 'weixin_media' # 构建鉴权对象 q = Auth(qiniu_config.access_key, qiniu_config.secret_key) # 可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间 saveas_key = urlsafe_base64_encode(bucket_name + ':' + media_id + '.mp3') fops = fops + '|saveas/' + saveas_key # 在上传策略中指定 policy = {'persistentOps': fops, 'persistentPipeline': pipeline} # 生成上传Token,可以指定过期时间等 token = q.upload_token(bucket_name, key, 3600, policy) ret, info = put_file(token, key, localfile) print ret, info return cls.success_with_result('http://' + qiniu_config.resource_domain + '/' + media_id + '.mp3')
def video_transcoding(key, pxtype): if pxtype == '360p': video_sizi = '640x360' video_kbps = '250k' video_mp3kbps = '128k' if pxtype == '480p': video_sizi = '852x480' video_kbps = '550k' video_mp3kbps = '192k' if pxtype == '720p': video_sizi = '1280x720' video_kbps = '850k' video_mp3kbps = '230k' if pxtype == '1080p': video_sizi = '1920x1080' video_kbps = '1000k' video_mp3kbps = '320k' if pxtype == '2k': video_sizi = '2560x1440' video_kbps = '3200k' video_mp3kbps = '320k' if pxtype == '4k': video_sizi = '3840x2160' video_kbps = '4000k' video_mp3kbps = '320k' q = Auth(access_key, secret_key) #要转码的文件所在的空间和文件名。 bucket = 'video-cache' key = str(key) filekey = str(key) #转码是使用的队列名称。 pipeline = 'transcoding' #要进行转码的转码操作。 fops = 'avthumb/mp4/s/' + video_sizi + '/vb/' + video_kbps + '/ab/' + video_mp3kbps + '/acodec/libmp3lame' #生成新的文件名 name = key.split(".")[0] key_tpye = key.split(".")[1] key = name + '-' + pxtype + '.' + 'mp4' #可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间 saveas_key = urlsafe_base64_encode('video-store:' + "bangumi/" + key) fops = fops + '|saveas/' + saveas_key pfop = PersistentFop(q, bucket, pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(filekey, ops, 1) print(info)
def get_qiniu_uptoken( key, access_key=settings.QINIU_ACCESS_KEY, secret_key=settings.QINIU_SECRET_KEY, bucket=settings.QINIU_BUCKET_NAME ): """ 获取七牛的认证凭证 """ large_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-large") medium_saveas_key = urlsafe_base64_encode(bucket + ":" + key + "-medium") large_ops = "imageView2/2/w/970/h/970/q/100|saveas/" + large_saveas_key # 设置七牛的静态持久化,参数需要跟七牛账号那里设置的一致 medium_ops = ( "imageView2/2/w/580/h/350/q/50|saveas/" + medium_saveas_key ) # 具体看这个文章 http://blog.csdn.net/netdxy/article/details/50223733 policy = {"persistentOps": large_ops + ";" + medium_ops} auth = Auth(access_key, secret_key) up_token = auth.upload_token(bucket, policy=policy) return up_token
def bigfileupload(request): """ 分片上传 """ try: bucket_name = request.GET.get('bucket') if bucket_name not in qiniu_url.keys(): raise InvestError(2020, msg='bucket error') data_dict = request.FILES uploaddata = None for key in data_dict.keys(): uploaddata = data_dict[key] q = qiniu.Auth(ACCESS_KEY, SECRET_KEY) filetype = str(uploaddata.name).split('.')[-1] key = datetime.datetime.now().strftime('%Y%m%d%H%M%s') + ''.join( random.sample(string.ascii_lowercase, 6)) + '.' + filetype if filetype != 'pdf' and bucket_name not in ['image', u'image']: saveas_key = qiniu.urlsafe_base64_encode( 'file:%s' % (key.split('.')[0] + '.pdf')) persistentOps = fops + '|saveas/' + saveas_key policy = { 'persistentOps': persistentOps, # 'persistentPipeline': pipeline, 'deleteAfterDays': 1, } else: policy = None print key params = {'x:a': 'a'} mime_type = uploaddata.content_type token = q.upload_token(bucket_name, key, 3600, policy=policy) progress_handler = lambda progress, total: progress / total uploader = _Resume(token, key, uploaddata, uploaddata.size, params, mime_type, progress_handler, upload_progress_recorder=MyUploadProgressRecorder(), modify_time=None, file_name=key) ret, info = uploader.upload() if info is not None: if info.status_code == 200: return_url = getUrlWithBucketAndKey(bucket_name, ret['key']) else: raise InvestError(2020, msg=str(info)) else: raise InvestError(2020, msg=str(ret)) if policy: key = key.split('.')[0] + '.pdf' return JSONResponse(SuccessResponse({'key': key, 'url': return_url})) except InvestError as err: return JSONResponse(InvestErrorResponse(err)) except Exception: return JSONResponse( ExceptionResponse(traceback.format_exc().split('\n')[-2]))
def saveas(url_with_fop, save_bucket, save_key, ak, sk): encoded_entry = qiniu.urlsafe_base64_encode(save_bucket + ":" + save_key) new_url_with_fop = "{0}|saveas/{1}".format(url_with_fop, encoded_entry) scheme = urlparse.urlparse(new_url_with_fop).scheme new_url_without_scheme = new_url_with_fop.strip(scheme + "://") auth = qiniu.Auth(ak, sk) encoded_sign = auth.token(new_url_without_scheme) final_url = "{0}/sign/{1}".format(new_url_with_fop, encoded_sign) return final_url
def historyactivity(access_key, secret_key, hub, streamTitle, startTime, endTime): """ 直播历史查询 https://developer.qiniu.com/pili/api/2778/live-history :param access_key: 公钥 :param secret_key: 私钥 :param hub: 直播空间 :param streamTitle: 流名 :param startTime: 整数,Unix 时间戳,起始时间,不指定或 0 值表示不限制起始时间。 :param endTime: 整数,Unix 时间戳,结束时间,不指定或 0 值表示当前时间。 :return: 200 { "items": [ { "start": <Start>, "end": <End> }, ... ] } """ auth = QiniuMacAuth(access_key, secret_key) # 流名base64安全编码 EncodedStreamTitle = urlsafe_base64_encode(streamTitle) l = [] # 日期转时间戳 def time2timestamp(datetime): # 日期格式 # datetime = '2020-06-08 00:00:00' # 转为时间数组 timeArray = time.strptime(datetime, "%Y-%m-%d %H:%M:%S") timeStamp = int(time.mktime(timeArray)) return timeStamp start = time2timestamp(startTime) end = time2timestamp(endTime) # 请求URL url = f'http://pili.qiniuapi.com/v2/hubs/{hub}/streams/{EncodedStreamTitle}/historyactivity?start={start}&end={end}' # 发起GET请求 ret, res = http._get_with_qiniu_mac(url=url, params=None, auth=auth) # 格式化响应体 result = ret["items"] for i in result: t = i["end"] - i["start"] l.append(t) return sum(l) / 60
def min_url(self, filename): q = QiniuClass.Auth(self._access_key, self._secret_key) #要缩略的文件所在的空间和文件名。 from_bucket_name = 'user-img' to_bucket_name = 'min-img' from_key = filename to_key = 'min_' + from_key self.min_delete(to_key) #pipeline是使用的队列名称,不设置代表不使用私有队列,使用公有队列。 pipeline = '' #要进行缩略的操作。 fops = 'imageView2/1/w/50/h/50/q/100|imageslim' saveas_key = QiniuClass.urlsafe_base64_encode(to_bucket_name + ':' + to_key) fops = fops + '|saveas/' + saveas_key pfop = QiniuClass.PersistentFop(q, from_bucket_name, pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(from_key, ops, 1) print(info) new_src = 'http://opkrd0ovy.bkt.clouddn.com' new_url = urljoin(new_src, to_key) #map要缩略的文件所在的空间和文件名。 from_bucket_name = 'min-img' to_bucket_name = 'min-img' map_from_key = 'st_out.png' map_to_key = 'map_' + from_key self.min_delete(map_to_key) #pipeline是使用的队列名称,不设置代表不使用私有队列,使用公有队列。 pipeline = '' #要进行缩略的操作。 bg_img = QiniuClass.urlsafe_base64_encode(new_url) fops = 'imageView2/2/w/65/h/90/q/75|watermark/1/image/' + bg_img + '/dissolve/100/gravity/North/dx/0/dy/5|imageslim' print(fops) saveas_key = QiniuClass.urlsafe_base64_encode(to_bucket_name + ':' + map_to_key) fops = fops + '|saveas/' + saveas_key pfop = QiniuClass.PersistentFop(q, from_bucket_name, pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(map_from_key, ops, 1) print(info) return urljoin(new_src, map_to_key)
def rename(old_name): post_fix = old_name.split('.')[-1] fullname = old_name.split('.')[0] salt = 'fewihsdhwidw' all_name = '{}-{}-{}'.format(fullname, time.time(), salt) sha_obj = hashlib.sha1(all_name.encode('utf-8')) new_fullname = qiniu.urlsafe_base64_encode(sha_obj.digest()).replace( '=', '') final_name = '.'.join([new_fullname, post_fix]) return final_name
def pack_large_number_of_files(self, bucket_name: str, index_file_key: str, encoding: str = 'utf-8', save_as_bucket_name: str = None, save_as_key: str = None, delete_after_days: int = None, pipeline: str = None): """ 大量文件压缩(mode=4) 为了将大量文件压缩,可以将待压缩文件URL写入一个索引文件,上传至bucket,再对该索引文件进行的mkzip操作。 索引文件格式为:<br> /url/<Base64EncodedURL1>[/alias/<Base64EncodedAlias1>] /url/<Base64EncodedURL2>[/alias/<Base64EncodedAlias2>] ... ... /url/<Base64EncodedURLN>[/alias/<Base64EncodedAliasN>] :param bucket_name: 索引文件储存空间名 :param index_file_key: 索引文件名 :param encoding: 压缩包内资源命名的编码,目前支持 gbk 和 utf-8,默认 utf-8 :param save_as_bucket_name: 储存压缩文件的空间名 :param save_as_key: 压缩文件另存为的名称 :param delete_after_days: 将在多少天之后删除此压缩好的文件,不填则不会自动删除 :param pipeline: 管道名称,不填则使用默认管道 :return: """ fops = 'mkzip/4/encoding/' + urlsafe_base64_encode(encoding) if save_as_bucket_name is not None and save_as_key is not None: save_as = urlsafe_base64_encode(save_as_bucket_name + ':' + save_as_key) fops = fops + '|saveas/' + save_as if delete_after_days is not None: fops += '/deleteAfterDays/' + str(delete_after_days) ops = [] pfop = PersistentFop(self._qiniu_auth, bucket_name, pipeline) ops.append(fops) ret, info = pfop.execute(index_file_key, ops, 1) return ret, info
def upload_token(self, key): saveas = '{bucket}:{key}'.format(bucket=settings['qiniu_file_bucket'], key=key) saveas_key = urlsafe_base64_encode(saveas) policy = QINIU_POLICY.copy() policy['persistentOps'] = QINIU_THUMB + '|saveas/' + saveas_key token = self.qiniu.upload_token( bucket=settings['qiniu_file_bucket'], expires=settings['qiniu_token_timeout'], policy=policy) return token
def get_dt_url(klist, pic_host="", dur=50): if not klist: return part_str = "" for k in klist: part_str += "/key/" + urlsafe_base64_encode(str(k)) if not pic_host: pic_host = app.config["PIC_HOST"] url = pic_host + "/" + klist[0] + '?animate/duration/%s/merge%s' % ( dur, part_str) return url
def picture_thu(self, file_name): # 要进行转码的转码操作。 fops = 'imageMogr2/auto-orient/thumbnail/600x600>/blur/1x0/quality/75|imageslim' # 可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间 saveas_key = urlsafe_base64_encode('{}:{}'.format(self.bucket_name, file_name)) fops = fops+'|saveas/'+saveas_key pfop = PersistentFop(self.q, self.bucket_name) ops = [] ops.append(fops) ret, info = pfop.execute(file_name, ops, 1) print(ret)
def main(): bucket = "if-pbl" key = "qiniu_temp.mp4" filePath = "/Users/jemy/Documents/qiniu.mp4" auth = qiniu.Auth(accessKey, secretKey) wmImage = qiniu.urlsafe_base64_encode( "http://if-pbl.qiniudn.com/qiubai.jpg") saveAs = qiniu.urlsafe_base64_encode(bucket + ":qiniu_tree.mp4") fops = [] fops.append("avthumb/mp4/wmImage/{0}|saveas/{1}".format(wmImage, saveAs)) fops.append( "vframe/png/offset/10/w/400/h/224/rotate/auto|saveas/{0}".format( qiniu.urlsafe_base64_encode(bucket + ":first_tree.png"))) fops.append( "vsample/png/ss/0/t/180/s/400x224/rotate/auto/interval/10/pattern/{0}". format(qiniu.urlsafe_base64_encode("sample_$(count).png"))) policy = {"persistentOps": ";".join(fops), "persistentPipeline": "fff"} upToken = auth.upload_token(bucket, key=key, policy=policy) upload(upToken, key, filePath)
def avconcat(self, save_bucket_name, saveas, base_bucket_name, base, urls, mode='2', format='mp4', pipeline='concatevideo', notify_url=None): pfop = qiniu.PersistentFop( auth=self._auth, bucket=base_bucket_name, pipeline=pipeline, notify_url=notify_url ) op = qiniu.build_op('avconcat', mode, format=format) if len(urls) > 5: raise ValueError('cannot append more then 5 videos') encoded_keys = [qiniu.urlsafe_base64_encode(url) for url in urls] encoded_keys.insert(0, op) op = '/'.join(encoded_keys) op = qiniu.op_save(op, save_bucket_name, saveas) logging.debug('[op] {}'.format(op)) ops = [op] ret, info = pfop.execute(base, ops, 1) return ret, info
# -*- coding: utf-8 -*- # flake8: noqa from qiniu import Auth, PersistentFop, build_op, op_save, urlsafe_base64_encode #对已经上传到七牛的视频发起异步转码操作 access_key = 'Access_Key' secret_key = 'Secret_Key' q = Auth(access_key, secret_key) #要转码的文件所在的空间和文件名。 bucket = 'Bucket_Name' key = '1.mp4' #转码是使用的队列名称。 pipeline = 'mpsdemo' #要进行视频截图操作。 fops = 'vframe/jpg/offset/1/w/480/h/360/rotate/90' #可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间 saveas_key = urlsafe_base64_encode('目标Bucket_Name:自定义文件key') fops = fops+'|saveas/'+saveas_key pfop = PersistentFop(q, bucket, pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(key, ops, 1) print(info) assert ret['persistentId'] is not None
def test_urlsafe(self): a = '你好\x96' u = urlsafe_base64_encode(a) assert b(a) == urlsafe_base64_decode(u)
# 初始化Auth状态 q = Auth(access_key, secret_key) # 你要测试的空间, 并且这个key在你空间中存在 bucket_name = 'Bucket_Name' key = 'python_video.flv' # 指定转码使用的队列名称 pipeline = 'your_pipeline' # 设置转码参数(以视频转码为例) fops = 'avthumb/mp4/vcodec/libx264' # 通过添加'|saveas'参数,指定处理后的文件保存的bucket和key,不指定默认保存在当前空间,bucket_saved为目标bucket,bucket_saved为目标key saveas_key = urlsafe_base64_encode('bucket_saved:bucket_saved')# fops = fops+'|saveas/'+saveas_key # 在上传策略中指定fobs和pipeline policy={ 'persistentOps':fops, 'persistentPipeline':pipeline } token = q.upload_token(bucket_name, key, 3600, policy) localfile = './python_video.flv' ret, info = put_file(token, key, localfile) print(info)
# flake8: noqa from qiniu import Auth, PersistentFop, build_op, op_save, urlsafe_base64_encode #对已经上传到七牛的视频发起异步转码操作 access_key = 'Access_Key' secret_key = 'Secret_Key' q = Auth(access_key, secret_key) #要转码的文件所在的空间和文件名。 bucket = 'Bucket_Name' key = '1.mp4' #转码是使用的队列名称。 pipeline = 'mpsdemo' #需要添加水印的图片UrlSafeBase64,可以参考http://developer.qiniu.com/code/v6/api/dora-api/av/video-watermark.html base64URL = urlsafe_base64_encode('http://developer.qiniu.com/resource/logo-2.jpg'); #视频水印参数 fops = 'avthumb/mp4/'+base64URL #可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间 saveas_key = urlsafe_base64_encode('目标Bucket_Name:自定义文件key') fops = fops+'|saveas/'+saveas_key pfop = PersistentFop(q, bucket, pipeline) ops = [] ops.append(fops) ret, info = pfop.execute(key, ops, 1) print(info) assert ret['persistentId']
def test_urlsafe(self): a = "hello\x96" u = urlsafe_base64_encode(a) assert b(a) == urlsafe_base64_decode(u)