def test_downloadprivate(self): self.downloadufile_handler.set_keys(public_key, private_key) # download the small file logger.info('start download small file from private bucket') ret, resp = self.downloadufile_handler.download_file( private_bucket, put_key, private_download) assert resp.status_code == 200
def test_compareetag(self): result = self.putufile_handler.compare_file_etag( PUBLIC_BUCKET, put_small_key, small_local_file) if result == True: logger.info('\netag are the same!') else: logger.info('\netag are different!')
def test_restore_file(self): #解冻冷存文件 self.restorefile_handler.set_keys(public_key, private_key) # restore archive file in public bucket logger.info('start restore archive file to public bucket') ret, resp = self.restorefile_handler.restore_file( public_bucket, mput_archive_key) print(resp.error) assert resp.status_code == 200
def test_classswitch_file(self): #转换文件存储类型 self.classswitch_handler.set_keys(PUBLIC_KEY, PRIVATE_KEY) # file storage class switch to IA in private bucket logger.info('start switch file storage class in private bucket') ret, resp = self.classswitch_handler.class_switch_file( PRIVATE_BUCKET, post_standard_key, IA) print(resp.error) assert resp.status_code == 200
def test_classswitch_file(self): #转换文件存储类型 self.classswitch_handler.set_keys(public_key, private_key) # file storage class switch to IA in private bucket logger.info('start switch file storage class in private bucket') ret, resp = self.classswitch_handler.class_switch_file( private_bucket, post_standard_key, IA) print(resp.error) assert resp.status_code == 200
def test_restore_file(self): #解冻冷存文件 self.restorefile_handler.set_keys(PUBLIC_KEY, PRIVATE_KEY) # restore archive file in public bucket logger.info('start restore archive file to public bucket') ret, resp = self.restorefile_handler.restore_file( PUBLIC_BUCKET, mput_archive_key) print(resp.error) assert resp.status_code == 200
def test_getfilelist(self): self.getfilelist_hander.set_keys(public_key, private_key) prefix = '' limit = 100 marker = '' ret, resp = self.getfilelist_hander.getfilelist(bucket, prefix=prefix, limit=limit, marker=marker) assert resp.status_code == 200 for item in ret['DataSet']: key = item['FileName'].encode('utf-8') logger.info(key) nextMarker = ret['NextMarker'] logger.info('NextMarker is {0}'.format(nextMarker))
def test_putufile(self): self.putufile_handler.set_keys(public_key, private_key) # put small file to public bucket logger.info('\nput small file to public bucket') ret, resp = self.putufile_handler.putfile(public_bucket, put_small_key, small_local_file) assert resp.status_code == 200 # put small file to private bucket logger.info('\nput small file to private bucket') ret, resp = self.putufile_handler.putfile(private_bucket, put_small_key, small_local_file) assert resp.status_code == 200
def test_uploadfile(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload big file to public bucket logger.info('start sharding upload big file to public bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(public_bucket, sharding_file_key, local_file,maxthread=2) print(resp.error) assert resp.status_code == 200 # upload big file to private bucket logger.info('start sharding upload big file to private bucket') ret, resp = self.multipartuploadufile_handler.uploadfile(private_bucket, sharding_file_key, local_file,maxthread=2) print(resp.error) assert resp.status_code == 200
def test_putufile(self): self.putufile_handler.set_keys(PUBLIC_KEY, PRIVATE_KEY) # put small file to public bucket logger.info('\nput small file to public bucket') ret, resp = self.putufile_handler.putfile(PUBLIC_BUCKET, put_small_key, small_local_file) assert resp.status_code == 200 # put small file to private bucket logger.info('\nput small file to private bucket') ret, resp = self.putufile_handler.putfile(PRIVATE_BUCKET, put_small_key, small_local_file) assert resp.status_code == 200
def test_uploadstream(self): self.multipartuploadufile_handler.set_keys(public_key, private_key) # upload binary data stream to public bucket logger.info('start upload stream to public bucket') ret, resp = self.multipartuploadufile_handler.uploadstream(public_bucket, sharding_stream_key, bio,maxthread=2) print(resp.error) assert resp.status_code == 200 # upload binary data stream to private bucket logger.info('start upload stream to private bucket') bio.seek(0, os.SEEK_SET) ret, resp = self.multipartuploadufile_handler.uploadstream(private_bucket, sharding_stream_key, bio,maxthread=2) print(resp.error) assert resp.status_code == 200
def putfile_thread(self, sem, bucket, key, local_file, header=None): for i in range(1, 4): try: ret, resp = self.putufile_handler.putfile( bucket, key, local_file, header) if resp.status_code != 200: logger.error( 'put file {0} failed. err: {1}, retry {2}'.format( key, resp, i)) continue else: logger.info('put file {0} succeed.'.format(key)) break except Exception as e: logger.error('put file {0} failed. exception: {1}'.format( key, e)) sem.release()
def test_upload_ia_ufile(self): #使用put方式上传低频文件 self.putufile_handler.set_keys(public_key, private_key) #设置header header = dict() header['X-Ufile-Storage-Class'] = IA # upload file to public bucket logger.info('\nstart put small file to public bucket') ret, resp = self.putufile_handler.putfile(public_bucket, put_ia_key, small_local_file, header) assert resp.status_code == 200 # put small file to private bucket logger.info('\nstart put small file to private bucket') ret, resp = self.putufile_handler.putfile(private_bucket, put_ia_key, small_local_file, header) assert resp.status_code == 200
def test_upload_standard_ufile(self): #使用post方式上传标准文件 self.postfile_handler.set_keys(public_key, private_key) #设置header header = dict() header['X-Ufile-Storage-Class'] = STANDARD # post small file to public bucket logger.info('\nstart post small file to public bucket') ret, resp = self.postfile_handler.postfile(public_bucket, post_standard_key, small_local_file, header) assert resp.status_code == 200 # post small file to private bucket logger.info('\nstart post small file to private bucket') ret, resp = self.postfile_handler.postfile(private_bucket, post_standard_key, small_local_file, header) assert resp.status_code == 200
def test_upload_archive_ufile(self): #使用分片上传方式上传冷存文件 self.multipartuploadufile_handler.set_keys(public_key, private_key) #设置header header = dict() header['X-Ufile-Storage-Class'] = ARCHIVE # upload big file to public bucket logger.info('start sharding upload big file to public bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( public_bucket, mput_archive_key, big_local_file, header=header) print(resp.error) assert resp.status_code == 200 # upload big file to private bucket logger.info('start sharding upload big file to private bucket') ret, resp = self.multipartuploadufile_handler.uploadfile( private_bucket, mput_archive_key, big_local_file, header=header) print(resp.error) assert resp.status_code == 200
def test_listobjects(self): self.listobjects_hander.set_keys(public_key, private_key) prefix = '' marker = '' maxkeys = 100 ret, resp = self.listobjects_hander.listobjects(bucket, prefix=prefix, maxkeys=maxkeys, marker=marker, delimiter='/') assert resp.status_code == 200 logger.info('Contents:') for item in ret['Contents']: key = item['Key'].encode('utf-8') logger.info('key: {0}'.format(key)) logger.info('CommonPrefixes: ') for item in ret['CommonPrefixes']: pre = item['Prefix'].encode('utf-8') logger.info('prefix: {0}'.format(pre)) nextMarker = ret['NextMarker'] logger.info('NextMarker is {0}'.format(nextMarker))
def test_downloadwithrange(self): self.downloadufile_handler.set_keys(public_key, private_key) logger.info('start download with range condition from public bucket') ret, resp = self.downloadufile_handler.download_file( public_bucket, put_range_key, public_range_download, isprivate=False, expires=get_default('expires'), content_range=(0, 15), header=None) assert resp.status_code == 206 logger.info('start download with range condition from private bucket') ret, resp = self.downloadufile_handler.download_file( private_bucket, put_range_key, private_range_download, isprivate=True, expires=get_default('expires'), content_range=(0, 15), header=None) assert resp.status_code == 206
def test_putstream(self): self.putufile_handler.set_keys(public_key, private_key) logger.info('\nput stream to public bucket') ret, resp = self.putufile_handler.putstream(public_bucket, put_stream_key, bio) assert resp.status_code == 200 bio.seek(0, os.SEEK_SET) logger.info('\nput stream to private bucket') ret, resp = self.putufile_handler.putstream(private_bucket, put_stream_key, bio) logger.info('response code:{0}'.format(resp.status_code)) assert resp.status_code == 200
def test_putstream(self): self.putufile_handler.set_keys(PUBLIC_KEY, PRIVATE_KEY) logger.info('\nput stream to public bucket') ret, resp = self.putufile_handler.putstream(PUBLIC_BUCKET, put_stream_key, bio) assert resp.status_code == 200 bio.seek(0, os.SEEK_SET) logger.info('\nput stream to private bucket') ret, resp = self.putufile_handler.putstream(PRIVATE_BUCKET, put_stream_key, bio) logger.info('response code:{0}'.format(resp.status_code)) assert resp.status_code == 200
def test_deleteufile(self): self.deleteufile_handler.set_keys(public_key, private_key) # delete file from public bucket logger.info('\ndelete file from public bucket') ret, resp = self.deleteufile_handler.deletefile( public_bucket, delete_key) logger.info(resp.error) logger.info(resp.status_code) #assert resp.status_code == 204 # delete file from private bucket logger.info('\ndelete file from private bucket') ret, resp = self.deleteufile_handler.deletefile( private_bucket, delete_key) logger.info(resp.error) logger.info(resp.status_code)
def test_renamefile(self): self.renameufile_handler.set_keys(public_key, private_key) logger.info('start rename file') ret, resp = self.renameufile_handler.rename(public_bucket, key, newkey, force) assert resp.status_code == 200
def test_uploadhitnonexistfile(self): self.uploadhitufile_handler.set_keys(public_key, private_key) logger.info('start uploadhit existfile') ret, resp = self.uploadhitufile_handler.uploadhit( public_bucket, nonexistkey, nonexistfile) assert resp.status_code == 404
def test_copyfile(self): self.copyufile_handler.set_keys(public_key, private_key) logger.info('start copy file') ret, resp = self.copyufile_handler.copy(public_bucket, key, srcbucket, srckey) assert resp.status_code == 200