def describebucket(self, bucket=None, offset=0, limit=10, header=None): """ 获取空间的信息,如果不提供空间名称,则获取所有空间的信息 @param bucketname: string类型, 空间名称 @param offset: integer类型, 起始空间编码,当提供空间名时无效 @param limit: integer类型,获取空间数量,当提供具体空间名时无效 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') param = dict() param['Action'] = 'DescribeBucket' if bucket is not None: param['BucketName'] = bucket param['Offset'] = s(str(offset)) param['Limit'] = s(str(limit)) signature = self.__auth.bucket_signature(param) param['Signature'] = signature logger.info('start request the bucket {0} details'.format(bucket)) return _bucket_request(UCLOUD_API_URL, param, header)
def createbucket(self, bucket, buckettype='private', domainlist=None, header=None): """ 创建新的空间 @param bucket: string类型,空间名称 @param buckettype: string类型,'private' 或者 'public' @param domainlist: list 类型, 要绑定的域名列表 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') param = dict() param['Action'] = 'CreateBucket' param['BucketName'] = bucket param['Type'] = buckettype if domainlist is None: domainlist = [] for number, item in enumerate(domainlist): param['Domain.{0}'.format(number)] = item signature = self.__auth.bucket_signature(param) param['Signature'] = signature logger.info('start create bucket {0}'.format(bucket)) return _bucket_request(UCLOUD_API_URL, param, header)
def getfilelist(self, bucket, offset=0, limit=20, header=None): """ 获取空间中文件列表 @param bucket: string类型,空间名称 @param offset: integer类型,文件列表偏移位置 @param limit: integer类型,返回文件数量 @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') param = dict() param['Action'] = 'GetFileList' param['BucketName'] = bucket param['Offset'] = s(str(offset)) param['Limit'] = s(str(limit)) signature = self.__auth.bucket_signature(param) param['Signature'] = signature logger.info('start request the file list of bucket {0}'.format(bucket)) return _bucket_request(UCLOUD_API_URL, param, header)
def __initialsharding(self): """ 初始化分片请求 @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if self.__header is None: self.__header = {} else: _check_dict(self.__header) if 'User-Agent' not in self.__header: self.__header['User-Agent'] = config.get_default('user_agent') self.__header['Content-Length'] = 0 self.__header['Content-Type'] = 'text/plain' authorization = self.authorization('post', self.__bucket, self.__key, self.__header) self.__header['Authorization'] = authorization url = initialsharding_url(self.__bucket, self.__key) logger.info('start initialize sharding') logger.info('initial sharding url: {0}'.format(url)) return _initialsharding(url, self.__header)
def download_file(self, bucket, key, localfile, isprivate=True, expires=config.get_default('expires'), content_range=None, header=None): """ 下载UFile文件并且保存为本地文件 @param bucket: string类型, UFile空间名称 @param key: string类型, 下载文件在空间中的名称 @param localfile: string类型,要保存的本地文件名称 @param isprivate: boolean类型,如果为私有空间则为True @param expires: integer类型,私有文件链接有效时间 @param content_range: tuple类型,元素为两个整型 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') if isinstance(content_range, tuple) and len(content_range) == 2: header['Range'] = 'bytes=' + '-'.join(map(lambda x: str(x), content_range)) if not isprivate: url = self.public_download_url(bucket, key) else: url = self.private_download_url(bucket, key, expires, header, True) logger.info('get ufile url:{0}'.format(url)) return _download_file(url, header, localfile)
def describeProject(self): """ 获取项目列表 @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码,ProjectCount: 项目总数,ProjectSet: array, 项目列表} PorjectSet: [{ ProjectId: 项目ID ProjectName: 项目名称 ParentId: 父项目ID ParentName: 父项目名称 CreateTime: 创建时间(unix时间戳) IsDefault: 是否为默认项目 bool ResourceCount: 项目下资源数量 MemberCount: 项目下成员数量 }...] """ payload = dict() payload['Action'] = 'GetProjectList' payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('describe project') return _post(payload)
def inviteSubaccount(self, email, password, phone, userName, isFinance="false"): """ 邀请项目成员 @param email string 用户邮箱 @param password string 密码 @param phone string 手机号 如:(86)15012344321 @param userName string 用户姓名 @param isFinance string 是否为财务人员(可以申请开发票等,默认为'false') @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码} """ payload = dict() payload['Action'] = 'InviteSubaccount' payload['UserEmail'] = email payload['UserPwd'] = password payload['UserPhone'] = phone payload['UserName'] = userName payload['IsFinance'] = isFinance payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('describe project') return _post(payload)
def deletebucket(self, bucket, header=None, projectid=None): """ 删除空间 @param bucket: string类型,空间名称 @param projectid: string类型,项目ID @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') param = dict() param['Action'] = 'DeleteBucket' param['BucketName'] = bucket if projectid is not None: param['ProjectId'] = projectid signature = self.__auth.bucket_signature(param) param['Signature'] = signature logger.info('start delete bucket {0}'.format(bucket)) return _bucket_request(UCLOUD_API_URL, param, header)
def test_downloadwrongkey(self): # download the wrong key file self.downloadufile_handler.set_keys(public_key, private_key) logger.info('start download with nonexist key') ret, resp = self.downloadufile_handler.download_file( public_bucket, wrong_key, public_small_download, isprivate=False) assert resp.status_code == 404
def test_postufiletowrongbucket(self): self.postfile_handler.set_keys(public_key, private_key) # post file to wrong bucket logger.info('\nstart post small file to wrong bucket') ret, resp = self.postfile_handler.postfile(wrong_bucket, post_small_key, small_local_file) logger.error(resp.error) assert resp.status_code == 400
def test_postufilewithwrongkey(self): # set the wrong api keys self.postfile_handler.set_keys(wrong_public_key, wrong_private_key) # post small file with the wrong api keys logger.info('\nstart post small file to public bucket with wrong api keys pair') ret, resp = self.postfile_handler.postfile(public_bucket, post_small_key, small_local_file) logger.error(resp.error) assert resp.status_code == 403
def test_downloadwithwrongapikeys(self): self.downloadufile_handler.set_keys(wrong_public_key, wrong_private_key) # download from the private bucket logger.info('start download from private bucket with wrong api keys') ret, resp = self.downloadufile_handler.download_file( private_bucket, put_small_key, private_small_download) assert resp.status_code == 403
def test_downloadwithrange(self): self.downloadufile_handler.set_keys(public_key, private_key) logger.info('start download with range condition from public bucket') ret, resp = self.downloadufile_handler.download_file(public_bucket, put_range_key, public_range_download, isprivate=False, expires=get_default('expires'), content_range=(0, 5), header=None) assert resp.status_code == 206 logger.info('start download with range condition from private bucket') ret, resp = self.downloadufile_handler.download_file(public_bucket, put_range_key, private_range_download, isprivate=True, expires=get_default('expires'), content_range=(0, 5), header=None) assert resp.status_code == 206
def test_uploadtowrongbucket(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload file to wrong bucket logger.info('start upload file to wrong bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(wrong_bucket, sharding_small_key, small_local_file) assert resp.status_code == 400 print(resp.error)
def test_putfiletowrongbucket(self): self.putufile_handler.set_keys(public_key, private_key) # put file to wrong bucket logger.info('\nput file to wrong bucket') ret, resp = self.putufile_handler.putfile(wrong_bucket, put_small_key, small_local_file) assert resp.status_code == 400 logger.info(resp.error)
def test_putstream(self): self.putufile_handler.set_keys(public_key, private_key) logger.info('\nput stream to public bucket') ret, resp = self.putufile_handler.putstream(public_bucket, put_stream_key, bio) assert resp.status_code == 200 bio.seek(0, os.SEEK_SET) logger.info('\nput stream to private bucket') ret, resp = self.putufile_handler.putstream(private_bucket, put_stream_key, bio) assert resp.status_code == 200
def test_uploadtowrongbucket(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload file to wrong bucket logger.info('start upload file to wrong bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( wrong_bucket, sharding_small_key, small_local_file) assert resp.status_code == 400 print(resp.error)
def test_downloadfromwrongbucket(self): # download from the wrong bucket self.downloadufile_handler.set_keys(public_key, private_key) logger.info('start download from wrong bucket') ret, resp = self.downloadufile_handler.download_file( wrong_bucket, put_small_key, public_small_download, isprivate=False) assert resp.status_code == 400
def test_downloadprivate(self): self.downloadufile_handler.set_keys(public_key, private_key) # download the small file logger.info('start download small file from private bucket') ret, resp = self.downloadufile_handler.download_file(private_bucket, put_small_key, private_small_download) assert resp.status_code == 200 # download the big file logger.info('start download big file from pirvate bucket') ret, resp = self.downloadufile_handler.download_file(private_bucket, put_big_key, private_big_download) assert resp.status_code == 200
def test_downloadprivate(self): self.downloadufile_handler.set_keys(public_key, private_key) # download the small file logger.info('start download small file from private bucket') ret, resp = self.downloadufile_handler.download_file( private_bucket, put_small_key, private_small_download) assert resp.status_code == 200 # download the big file logger.info('start download big file from pirvate bucket') ret, resp = self.downloadufile_handler.download_file( private_bucket, put_big_key, private_big_download) assert resp.status_code == 200
def test_postufilewithwrongkey(self): # set the wrong api keys self.postfile_handler.set_keys(wrong_public_key, wrong_private_key) # post small file with the wrong api keys logger.info( '\nstart post small file to public bucket with wrong api keys pair' ) ret, resp = self.postfile_handler.postfile(public_bucket, post_small_key, small_local_file) logger.error(resp.error) assert resp.status_code == 403
def test_postufile(self): self.postfile_handler.set_keys(public_key, private_key) # post small file to public bucket logger.info('\nstart post small file to public bucket') ret, resp = self.postfile_handler.postfile(public_bucket, post_small_key, small_local_file) logger.error(resp.error) assert resp.status_code == 200 # post big file to public bucket logger.info('\nstart post big file to public bucket') ret, resp = self.postfile_handler.postfile(public_bucket, post_big_key, big_local_file) logger.error(resp.error) assert resp.status_code == 200 # post small file to private bucket logger.info('\nstart post small file to private bucket') ret, resp = self.postfile_handler.postfile(private_bucket, post_small_key, small_local_file) logger.error(resp.error) assert resp.status_code == 200 # post big file to private bucket logger.info('\nstart post big file to private bucket') ret, resp = self.postfile_handler.postfile(private_bucket, post_big_key, big_local_file) logger.error(resp.error) assert resp.status_code == 200
def test_uploadfile(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload small file to public bucket logger.info('start sharding small file to public bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( public_bucket, sharding_small_key, small_local_file) print(resp.error) assert resp.status_code == 200 # upload big file to public bucket logger.info('start sharding upload big file to public bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( public_bucket, sharding_big_key, big_local_file) print(resp.error) assert resp.status_code == 200 # upload small file to private bucket logger.info('start sharding upload small file to private bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( private_bucket, sharding_small_key, small_local_file) print(resp.error) assert resp.status_code == 200 # upload big file to private bucket logger.info('start sharding upload big file to private bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( private_bucket, sharding_big_key, big_local_file) print(resp.error) assert resp.status_code == 200
def __finishupload(self): """ 完成分片上传的请求 @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if self.__header is None: self.__header = dict() else: _check_dict(self.__header) if 'User-Agent' not in self.__header: self.__header['User-Agent'] = config.get_default('user_agent') self.__header['Content-Type'] = 'text/plain' logger.info(self.etaglist) data = ','.join(self.etaglist) logger.info(data) self.__header['Content-Length'] = len(data) authorization = self.authorization('post', self.__bucket, self.__key, self.__header) self.__header['Authorization'] = authorization logger.info(json.dumps(self.__header, indent=4)) params = {'uploadId': self.uploadid} url = finishsharding_url(self.__bucket, self.__key) logger.info('start finish sharding request') return _finishsharding(url, params, self.__header, data)
def test_uploadstream(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload binary data stream to public bucket logger.info('start upload stream to public bucket') ret, resp = self.multipartuploadufile_handler.uploadstream(public_bucket, sharding_stream_key, bio) print(resp.error) assert resp.status_code == 200 # upload binary data stream to private bucket logger.info('start upload stream to private bucket') bio.seek(0, os.SEEK_SET) ret, resp = self.multipartuploadufile_handler.uploadstream(private_bucket, sharding_stream_key, bio) print(resp.error) assert resp.status_code == 200
def test_uploadwithwrongkeys(self): self.multipartuploadufile_handler.set_keys(wrong_public_key, wrong_private_key) # upload file to public bucket with wrong api keys logger.info('start upload file to bucket with wrong api keys') ret, resp = self.multipartuploadufile_handler.uploadfile(public_bucket, sharding_small_key, small_local_file) assert resp.status_code == 403 print(resp.error) # upload filet to private bucket with wrong api keys logger.info('start upload file to private bucket with wrong api keys') ret, resp = self.multipartuploadufile_handler.uploadfile(private_bucket, sharding_small_key, small_local_file) assert resp.status_code == 403 print(resp.error)
def test_getfilelist(self): self.getfilelist_hander.set_keys(public_key, private_key) prefix = '' limit = 100 marker = '' ret, resp = self.getfilelist_hander.getfilelist(bucket, prefix=prefix, limit=limit, marker=marker) assert resp.status_code == 200 for item in ret['DataSet']: key = item['FileName'].encode('utf-8') logger.info(key) nextMarker = ret['NextMarker'] logger.info('NextMarker is {0}'.format(nextMarker))
def test_uploadstream(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload binary data stream to public bucket logger.info('start upload stream to public bucket') ret, resp = self.multipartuploadufile_handler.uploadstream( public_bucket, sharding_stream_key, bio) print(resp.error) assert resp.status_code == 200 # upload binary data stream to private bucket logger.info('start upload stream to private bucket') bio.seek(0, os.SEEK_SET) ret, resp = self.multipartuploadufile_handler.uploadstream( private_bucket, sharding_stream_key, bio) print(resp.error) assert resp.status_code == 200
def putfile(dir, file): # 构造上传对象,并设置公私钥 handler = putufile.PutUFile(public_key, private_key) # upload small file to public bucket logger.info('start upload file to public bucket') # 要上传的目标空间 bucket = bucketname # 上传到目标空间后保存的文件名 key = file # 要上传文件的本地路径 local_file = dir + '/' + file print(local_file) # 请求上传 ret, resp = handler.putfile(bucket, key, local_file) assert resp.status_code == 200
def test_uploadwithwrongkeys(self): self.multipartuploadufile_handler.set_keys(wrong_public_key, wrong_private_key) # upload file to public bucket with wrong api keys logger.info('start upload file to bucket with wrong api keys') ret, resp = self.multipartuploadufile_handler.uploadfile( public_bucket, sharding_small_key, small_local_file) assert resp.status_code == 403 print(resp.error) # upload filet to private bucket with wrong api keys logger.info('start upload file to private bucket with wrong api keys') ret, resp = self.multipartuploadufile_handler.uploadfile( private_bucket, sharding_small_key, small_local_file) assert resp.status_code == 403 print(resp.error)
def terminateMember(self, memberEmail): """ 注销账号,注销后,账号消失不存在,原手机号邮箱可注册(或受邀请)成为新账号 @param memberEmail string 被注销账号的邮箱 @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码} """ payload = dict() payload['Action'] = 'TerminateMember' payload['MemberEmail'] = memberEmail payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('terminate member') return _post(payload)
def removeProject(self, projectId): """ 删除项目 @param projectId string 项目ID @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码} """ payload = dict() payload['Action'] = 'TerminateProject' payload['ProjectId'] = projectId payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('remove project') return _post(payload)
def getfilelist(self, bucket, prefix=None, marker=None, limit=None, header=None): """ 获取bucket下的文件列表 @param bucket: string 类型,空间名称 @param prefix: string 类型,文件前缀, 默认为空字符串 @param marker: string 类型,文件列表起始位置, 默认为空字符串 @param limit: integer 类型,文件列表数目, 默认为20 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') header['Content-Length'] = 0 authorization = self.authorization('get', bucket, '', header) header['Authorization'] = authorization param = dict() if marker is not None and (isinstance(marker, str) or isinstance(marker, unicode)): param['marker'] = s(marker) if prefix is not None and (isinstance(prefix, str) or isinstance(prefix, unicode)): param['prefix'] = s(prefix) if limit is not None and isinstance(limit, int): param['limit'] = s(str(limit)) info_message = ''.join([ 'start get file list from bucket {0}'.format(bucket), '' if marker is None else ', marker: {0}'.format( marker if isinstance(marker, str) else marker.encode('utf-8')), '' if limit is None else ', limit: {0}'.format(limit), '' if prefix is None else ', prefix: {0}'.format(prefix) ]) logger.info(info_message) url = ufile_getfilelist_url(bucket) return _getfilelist(url, header, param)
def postfile(self, bucket, key, localfile, header=None): """ 表单上传文件到UFile空间 @param bucket: string类型,上传空间名称 @param key: string 类型,上传文件在空间中的名称 @param localfile: string类型,本地文件名称 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') mime_type = s(Mimetype.from_file(localfile)) # update the request header content-type boundary = self.__make_boundary() header['Content-Type'] = 'multipart/form-data; boundary={0}'.format( boundary) # form fields authorization = self.authorization('post', bucket, key, header, mime_type) fields = dict() fields['FileName'] = key fields['Authorization'] = authorization with open(localfile, 'rb') as stream: postdata = self.__make_postbody(boundary, fields, stream, mime_type, localfile) # update the request header content-length header['Content-Length'] = str(len(postdata)) # post url url = ufile_post_url(bucket) # start post file logger.info('start post file {0} to bucket {1} as {2}'.format( localfile, bucket, key)) logger.info('post url is {0}'.format(url)) return _post_file(url, header, postdata)
def createProject(self, projectname): """ 创建新的项目 @param projectname: string 类型,请求参数 @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码,ProjectId:所创建项目的ID(目前返回的不对,已内部沟通,最迟下周二修复(2018.8.20))} """ payload = dict() payload['Action'] = 'CreateProject' payload['ProjectName'] = projectname payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('create project {0}'.format(projectname)) print(payload) return _post(payload)
def removeMemberFromProject(self, projectId, memberEmail): """ 从项目中移除成员,移除后成员账号还在,但不能访问项目资源,可以添加到另一个项目中 @param projectId string 项目ID @param memberEmail string 被移除账号的邮箱 @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码} """ payload = dict() payload['Action'] = 'RemoveMemberFromProject' payload['ProjectId'] = projectId payload['MemberEmail'] = memberEmail payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('remove member from project') return _post(payload)
def postfile(self, bucket, key, localfile, header=None): """ 表单上传文件到UFile空间 @param bucket: string类型,上传空间名称 @param key: string 类型,上传文件在空间中的名称 @param localfile: string类型,本地文件名称 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() _check_dict(header) if "User-Agent" not in header: header["User-Agent"] = config.get_default("user_agent") mime_type = s(Mimetype.from_file(localfile)) # update the request header content-type boundary = self.__make_boundary() header["Content-Type"] = "multipart/form-data; boundary={0}".format(boundary) # form fields authorization = self.authorization("post", bucket, key, header, mime_type) fields = dict() fields["FileName"] = key fields["Authorization"] = authorization with open(localfile, "rb") as stream: postdata = self.__make_postbody(boundary, fields, stream, mime_type, localfile) # update the request header content-length header["Content-Length"] = len(postdata) # post url url = ufile_post_url(bucket) # start post file logger.info("start post file {0} to bucket {1} as {2}".format(localfile, bucket, key)) logger.info("post url is {0}".format(url)) return _post_file(url, header, postdata)
def addMemberToProject(self, projectId, memberEmail, characterId='Admin'): """ 把账号添加到特定项目 @param projectId string 项目ID (来自于GetProjectList) @param characterId string 角色ID, 默认Admin Admin角色默认存在,拥有ucloud所有开放产品权限,可以创建新角色,给角色特定产品权限 @param memberEmail string 被添加账号的邮箱 @return jsonbody: 如果http状态码不为200 或者RetCode不为0,则抛出异常;否则返回dict类型 @return dict {Action: 操作名称,RetCode: 返回码} """ payload = dict() payload['Action'] = 'AddMemberToProject' payload['ProjectId'] = projectId payload['CharacterId'] = characterId payload['MemberEmail'] = memberEmail payload['PublicKey'] = self.getPublicKey() signature = self.signature(payload) payload['Signature'] = signature logger.info('add member to project') return _post(payload)
def putstream(self, bucket, key, stream, mime_type=None, header=None): """ 上传二进制流到空间 @param bucket: string类型,上传空间名称 @param key: string 类型,上传文件在空间中的名称 @param stream: 二进制数据流,从文件指针位置开始发送数据,在调用时需调用者自己调整文件指针位置 @param mime_type: 二进制数据流的MIME类型 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') if mime_type is None: mime_type = 'application/octet-stream' header['Content-Type'] = mime_type authorization = self.authorization('put', bucket, key, header) header['Authorization'] = authorization url = ufile_put_url(bucket, key) logger.info('start put stream to bucket {0} as {1}'.format(bucket, key)) logger.info('put UFile url: {0}'.format(url)) logger.info('request header:\n{0}'.format(json.dumps(header, indent=4))) return _put_stream(url, header, stream)
def putfile(self, bucket, key, localfile, header=None): """ upload localfile to bucket as key @param bucket: string类型,上传空间名称 @param key: string 类型,上传文件在空间中的名称 @param localfile: string类型,本地文件名称 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') mime_type = s(Mimetype.from_file(localfile)) file_size = os.path.getsize(localfile) header['Content-Type'] = mime_type authorization = self.authorization('put', bucket, key, header) header['Authorization'] = authorization header['Content-Length'] = file_size url = ufile_put_url(bucket, key) logger.info('start put file {0} to bucket {1} as {2}'.format(localfile, bucket, key)) logger.info('put UFile url: {0}'.format(url)) logger.info('request header:\n{0}'.format(json.dumps(header, indent=4))) return _put_file(url, header, localfile)
def uploadhit(self, bucket, key, localfile, header=None): """ 尝试秒传文件到UFile空间 @param bucket: string类型,上传空间名称 @param key: string 类型,上传文件在空间中的名称 @param localfile: string类型,本地文件名称 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') filesize = os.path.getsize(localfile) fileetags = file_etag(localfile, BLOCKSIZE) mimetype = s(Mimetype.from_file(localfile)) # update request header header['Content-Type'] = mimetype header['Content-Length'] = 0 authorization = self.authorization('post', bucket, key, header) header['Authorization'] = authorization # parameter params = {'Hash': fileetags, 'FileName': key, 'FileSize': filesize} url = ufile_uploadhit_url(bucket) logger.info('start upload hit localfile {0} as {1} in bucket {2}'.format(localfile, key, bucket)) logger.info('request url: {0}'.format(url)) return _uploadhit_file(url, header, params)
def test_putufilewithwrongkey(self): self.putufile_handler.set_keys(wrong_public_key, wrong_private_key) logger.info('\nput small file to public bucket with wrong api keys pair') # put small file to public bucket with wrong api keys pair ret, resp = self.putufile_handler.putfile(public_bucket, put_small_key, small_local_file) assert resp.status_code == 403 logger.info(resp.error) # put small file to private bucket with wrong api keys pair logger.info('\nput small file to private bucket with wrong api keys pair') ret, resp = self.putufile_handler.putfile(private_bucket, put_small_key, small_local_file) logger.error('status_code:{0}'.format(resp.status_code)) assert resp.status_code == 403 logger.info(resp.error)
def deletefile(self, bucket, key, header=None): """ 删除空间中文件方法 @param bucket: string类型, 空间名称 @param key: string类型, 被删除文件在空间中的名称 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') authorization = self.authorization('delete', bucket, key, header) header['Authorization'] = authorization logger.info('start delete file {0} in bucket {1}'.format(key, bucket)) url = ufile_put_url(bucket, key) return _delete_file(url, header)
def deletebucket(self, bucket, header=None): """ 删除空间 @param bucket: string类型,空间名称 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if header is None: header = dict() else: _check_dict(header) if 'User-Agent' not in header: header['User-Agent'] = config.get_default('user_agent') param = dict() param['Action'] = 'DeleteBucket' param['BucketName'] = bucket signature = self.__auth.bucket_signature(param) param['Signature'] = signature logger.info('start delete bucket {0}'.format(bucket)) return _bucket_request(UCLOUD_API_URL, param, header)
def test_putufile(self): self.putufile_handler.set_keys(public_key, private_key) # put small file to public bucket logger.info('\nput small file to public bucket') ret, resp = self.putufile_handler.putfile(public_bucket, put_small_key, small_local_file) assert resp.status_code == 200 # put big file to public bucket logger.info('\nput big file to public bucket') ret, resp = self.putufile_handler.putfile(public_bucket, put_big_key, big_local_file) assert resp.status_code == 200 # put small file to private bucket logger.info('\nput small file to private bucket') ret, resp = self.putufile_handler.putfile(private_bucket, put_small_key, small_local_file) assert resp.status_code == 200 # put big file to private bucket logger.info('\nput big file to private bucket') ret, resp = self.putufile_handler.putfile(private_bucket, put_big_key, big_local_file) assert resp.status_code == 200
def test_uploadfile(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload small file to public bucket logger.info('start sharding small file to public bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(public_bucket, sharding_small_key, small_local_file) print(resp.error) assert resp.status_code == 200 # upload big file to public bucket logger.info('start sharding upload big file to public bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(public_bucket, sharding_big_key, big_local_file) print(resp.error) assert resp.status_code == 200 # upload small file to private bucket logger.info('start sharding upload small file to private bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(private_bucket, sharding_small_key, small_local_file) print(resp.error) assert resp.status_code == 200 # upload big file to private bucket logger.info('start sharding upload big file to private bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(private_bucket, sharding_big_key, big_local_file) print(resp.error) assert resp.status_code == 200
def test_uploadhitunexistfile(self): self.uploadhitufile_handler.set_keys(public_key, private_key) logger.info('start uploadhit nonexistfile') ret, resp = self.uploadhitufile_handler.uploadhit(public_bucket, instead_key, nonexistfile) assert resp.status_code == 404
def test_downloadwithwrongapikeys(self): self.downloadufile_handler.set_keys(wrong_public_key, wrong_private_key) # download from the private bucket logger.info('start download from private bucket with wrong api keys') ret, resp = self.downloadufile_handler.download_file(private_bucket, put_small_key, private_small_download) assert resp.status_code == 403
def test_uploadhittowrongbucket(self): self.uploadhitufile_handler.set_keys(public_key, private_key) logger.info('start uploadhit to wrong bucket') ret, resp = self.uploadhitufile_handler.uploadhit(wrong_bucket, instead_key, existfile) assert resp.status_code == 400
def test_uploadhitwithwrongkeys(self): self.uploadhitufile_handler.set_keys(wrong_public_key, wrong_private_key) logger.info('start uploadhit with wrong api keys') ret, resp = self.uploadhitufile_handler.uploadhit(public_bucket, instead_key, existfile) assert resp.status_code == 403
def uploadstream(self, bucket, key, stream, retrycount=3, retryinterval=5, mime_type=None, header=None): """ 分片上传二进制数据流到UFile空间 @param bucket: 空间名称 @param key: 上传数据在空间中的名称 @param stream: file-like 对象或者二进制数据流 @param mime_type: 上传数据的MIME类型 @param retrycount: integer 类型,分片重传次数 @param retryinterval: integer 类型,同个分片失败重传间隔,单位秒 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ self.__bucket = bucket self.__key = key self.etaglist = [] self.uploadid = None self.blocksize = None self.__header = header self.__stream = stream self.pausepartnumber = 0 if self.__header is None: self.__header = dict() else: _check_dict(self.__header) if 'User-Agent' not in self.__header: self.__header['User-Agent'] = config.get_default('user_agent') # initial sharding request ret, resp = self.__initialsharding() if resp.ok(): self.uploadid = ret.get('UploadId') self.blocksize = ret.get('BlkSize') logger.info('multipart upload id: {0}'.format(self.uploadid)) else: logger.error('multipar upload init failed. error message: {0}'.format(resp.error)) return ret, resp # mulitple sharding upload if mime_type is None: if hasattr(self.__stream, 'seek') and hasattr(self.__stream, 'read'): self.__mimetype = s(Mimetype.from_buffer(self.__stream.read(1024))) self.__stream.seek(0, os.SEEK_SET) else: self.__mimetype = 'application/octec-stream' else: self.__mimetype = mime_type self.__header['Content-Type'] = self.__mimetype authorization = self.authorization('put', self.__bucket, self.__key, self.__header) self.__header['Authorization'] = authorization for data in _file_iter(self.__stream, self.blocksize): url = shardingupload_url(self.__bucket, self.__key, self.uploadid, self.pausepartnumber) ret = None resp = None for index in range(retrycount): logger.info('try {0} time sharding upload sharding {1}'.format(index + 1, self.pausepartnumber)) logger.info('sharding url:{0}'.format(url)) ret, resp = _shardingupload(url, data, self.__header) if not resp.ok(): logger.error('failed {0} time when upload sharding {1}.error message: {2}, uploadid: {3}'.format(index + 1, self.pausepartnumber, resp.error, self.uploadid)) if index < retrycount - 1: time.sleep(retryinterval) else: break if not resp.ok(): logger.error('upload sharding {0} failed. uploadid: {1}'.format(self.pausepartnumber, self.uploadid)) return ret, resp logger.info('upload sharding {0} succeed.etag:{1}, uploadid: {2}'.format(self.pausepartnumber, resp.etag, self.uploadid)) self.pausepartnumber += 1 self.etaglist.append(resp.etag) logger.info('start finish sharding request.') ret, resp = self.__finishupload() if not resp.ok(): logger.error('multipart upload failed. uploadid:{0}, pausepartnumber: {1}, key: {2} FAIL!!!'.format(self.uploadid, self.pausepartnumber, self.__key)) else: logger.info('mulitpart upload succeed. uploadid: {0}, key: {1} SUCCEED!!!'.format(self.uploadid, self.__key)) return ret, resp
def resumeuploadstream(self, retrycount=3, retryinterval=5, bucket=None, key=None, uploadid=None, blocksize=None, etaglist=None, stream=None, pausepartnumber=None, mime_type=None, header=None): """ 断点续传失败数据流的分片 可以在调用uploadstream失败后重新续传,也可以通过传递所有需要的参数续传 @param retrycount: integer 类型,分片重传次数 @param retryinterval: integer 类型,同个分片失败重传间隔,单位秒 @param bucket: string类型, 空间名称 @param key: string类型,文件或数据在空间中的名称 @param uploadid: string类型,初始化分片获得的uploadid @param blocksize: integer类型,分片大小 @param etaglist: list类型,元素为已经上传成功的分片的etag @param pausepartnumber: integer类型,第一个失败分片的编号(编号从0开始) @param stream: file-like对象或者二进制数据流,需要重新上传的数据 @param mime_type: string类型,上传数据的MIME类型 @param header: dict类型,http 请求header,键值对类型分别为string,比如{'User-Agent': 'Google Chrome'} @return ret: 如果http状态码为[200, 204, 206]之一则返回None,否则如果服务器返回json信息则返回dict类型,键值对类型分别为string, unicode string类型,否则返回空的dict @return ResponseInfo: 响应的具体信息,UCloud UFile 服务器返回信息或者网络链接异常 """ if bucket: self.__bucket = bucket if key: self.__key = key if uploadid: self.uploadid = uploadid if blocksize: self.blocksize = blocksize if stream: self.__stream = stream if etaglist: self.etaglist = etaglist if pausepartnumber: self.pausepartnumber = pausepartnumber if header: self.__header = header if mime_type is not None: self.__mimetype = mime_type elif self.__mimetype is None: self.__mimetype = 'application/octec-stream' if self.__header is None: self.__header = dict() else: _check_dict(self.__header) if 'User-Agent' not in self.__header: self.__header['User-Agent'] = config.get_default('user_agent') # initial sharding request if self.uploadid is None: ret, resp = self.__initialsharding() if resp.ok(): self.uploadid = ret.get('UploadId') self.blocksize = ret.get('BlkSize') logger.info('multipart upload id: {0}'.format(self.uploadid)) else: logger.error('multipar upload init failed. error message: {0}'.format(resp.error)) return ret, resp self.__header['Content-Type'] = self.__mimetype authorization = self.authorization('put', self.__bucket, self.__key, self.__header) self.__header['Authorization'] = authorization for data in _file_iter(self.__stream, self.blocksize): url = shardingupload_url(self.__bucket, self.__key, self.uploadid, self.pausepartnumber) ret = None resp = None for index in range(retrycount): logger.info('retry {0} time sharding upload sharing {0}'.format(index + 1, self.pausepartnumber)) logger.info('sharding url:{0}'.format(url)) ret, resp = _shardingupload(url, data, self.__header) if not resp.ok(): logger.error('failed {0} time when retry upload sharding {1},error message: {2}, uploadid: {3}'.format(index + 1, self.pausepartnumber, resp.error, self.uploadid)) if index < retrycount - 1: time.sleep(retryinterval) else: break if not resp.ok(): logger.error('retry upload sharding {0} failed, uploadid: {1}'.format(self.pausepartnumber, self.uploadid)) return ret, resp logger.info('retry upload sharding {0} succeed. etag: {1}, uploadid: {2}'.format(self.pausepartnumber, resp.etag, self.uploadid)) self.pausepartnumber += 1 self.etaglist.append(resp.etag) # finish sharding upload logger.info('start finish upload request') ret, resp = self.__finishupload() if not resp.ok(): logger.error('multipart upload failed. uploadid:{0}, pausepartnumber: {1}, key: {2} FAIL!!!'.format(self.uploadid, self.pausepartnumber, self.__key)) else: logger.info('mulitpart upload succeed. uploadid: {0}, key: {1} SUCCEED!!!'.format(self.uploadid, self.__key)) return ret, resp