def bucketlist(self, bucket, prefix=None, marker=None, limit=None, mode=None, starttime=None, endtime=None): options = { 'bucket': bucket, } if marker: options['marker'] = marker if limit: if limit in self._limit_check(): options['limit'] = limit else: error('Invalid limit ! Please redefine limit') raise ValueError("Invalid limit") if prefix: options['prefix'] = urlsafe_base64_encode(prefix) if mode == 1 or mode == 0: options['mode'] = mode if starttime: options['startTime'] = starttime if endtime: options['endTime'] = endtime url = https_check(self._make_url('list', options)) if options: debug('List options is %s' % options) debug('List bucket %s' % bucket) return _get(url=url, headers=super(BucketManager, self)._gernerate_headers(url))
def _make_block(self, offset): url, size = self._mlk_url(offset) url = https_check(url) headers = self.__generate_headers() try: mkblk_retries = int(self.cfg.mkblk_retries) except ValueError as e: warning( 'parameter mkblk_retries is invalid, so use default value 3') mkblk_retries = 3 with open(self.path, 'rb') as f: bput = readfile(f, offset, self.bput_size) blkcode, blktext, _ = _post(url=url, headers=headers, data=bput) while mkblk_retries and self.__need_retry(blkcode): blkcode, blktext, _ = _post(url=url, headers=headers, data=bput) mkblk_retries -= 1 if blkcode != 200: result = [offset, blkcode, blktext['message']] debug('make block fail,code :{0},message :{1}'.format( blkcode, blktext)) else: result = self._make_bput(f, blktext['ctx'], offset) self._record_upload_progress(result, size) return blkcode
def _fmgr_commons(self, reqdata, method): url = https_check('{0}/fmgr/{1}'.format(self.mgr_host, method)) debug('Request body is: %s' % (reqdata)) debug('Start to execute opration: %s' % method) return _post(url=url, data=reqdata, headers=super(Fmgr, self)._gernerate_headers(url, body=reqdata))
def setdeadline(self, bucket, key, deadline): url = https_check('{0}/setdeadline'.format(self.mgr_host)) param = {'bucket': urlsafe_base64_encode(bucket)} param['key'] = urlsafe_base64_encode(key) param['deadline'] = deadline body = super(BucketManager, self)._params_parse(param) debug('Set deadline of %s to %s' % (key, deadline)) return _post(url=url, data=body, headers=super(BucketManager, self)._gernerate_headers(url, body))
def image_detect(self, image, dtype, bucket): url = https_check('{0}/imageDetect'.format(self.mgr_host)) param = {'image': urlsafe_base64_encode(image)} param['type'] = dtype param['bucket'] = bucket body = super(BucketManager, self)._params_parse(param) debug('image detect for %s to %s' % (image, dtype)) return _post(url=url, data=body, headers=super(BucketManager, self)._gernerate_headers(url, body))
def _upload(self, url, encoder, headers, f): url = https_check(url) if self.cfg.keepalive == True: pass else: _session.keep_alive = False headers['Connection'] = 'close' try: if self.cfg.isverify: if self.cfg.returnUrl: r = _session.post(url=url, headers=headers, data=encoder, verify=True, allow_redirects=False) else: r = _session.post(url=url, headers=headers, data=encoder, verify=True) else: if self.cfg.returnUrl: r = _session.post(url=url, headers=headers, data=encoder, verify=False, allow_redirects=False) else: r = _session.post(url=url, headers=headers, data=encoder, verify=False) except requests.ConnectionError as conn_error: debug('Url connection abnormal,please check url!') return -1, conn_error except Exception as e: f.close() debug('Request url:' + url) debug('Headers:') debug(headers) debug('Exception:') debug(e) return -1, e f.close() try: r_header = {'x-reqid': r.headers['x-reqid']} if r.status_code == 303: return r.status_code, r.headers['Location'], r_header else: return r.status_code, r.text, r_header except: return r.status_code, r.text
def _make_file(self, ctx_string): try: mkfile_retries = int(self.cfg.mkfile_retries) except ValueError as e: warning( u"parameter mkfile_retries is invalid, so use default value 3") mkfile_retries = 3 url = https_check(self.__file_url()) body = ctx_string #','.join(blkstatus) headers = self.__generate_headers() code, text, logid = _post(url=url, headers=headers, data=body) while mkfile_retries and self.__need_retry(code): debug('make file fail.retry upload') code, text, logid = _post(url=url, headers=headers, data=body) mkfile_retries -= 1 return code, text, logid
def execute(self, fops, bucket, key, force=0, separate=0, notifyurl=None): data = { 'bucket': urlsafe_base64_encode(bucket), 'key': urlsafe_base64_encode(key), 'fops': urlsafe_base64_encode(fops) } if notifyurl is not None: data['notifyURL'] = urlsafe_base64_encode(notifyurl) if force == 1: data['force'] = 1 if separate == 1: data['separate'] = 1 url = https_check('{0}/fops'.format(self.mgr_host)) headers, reqdata = self._gernerate_headers(url, data) debug('PersistentFops is %s' % fops) debug('Start to post persistentFops') return _post(url=url, data=reqdata, headers=headers)
def _make_file(self): try: mkfile_retries = int(self.cfg.mkfile_retries) except ValueError as e: warning( u"parameter mkfile_retries is invalid, so use default value 3") mkfile_retries = 3 blkstatus = self._get_blkstatus() url = https_check(self.__file_url()) body = ','.join(blkstatus) headers = self.__generate_headers() code, text, logid = _post(url=url, headers=headers, data=body) while mkfile_retries and self.__need_retry(code): code, text, logid = _post(url=url, headers=headers, data=body) mkfile_retries -= 1 self.recorder.delete_upload_record() return code, text, logid
def bucket_statistics(self, name, stype, startdate, enddate, isListDetails='false'): encode_name = urlsafe_base64_encode(name) options = { 'name': encode_name, 'type': stype, 'startdate': startdate, 'enddate': enddate } url = https_check(self._make_url('bucket/statistics', options)) debug('Now get bucket %s of %s from %s to %s' % (stype, name, startdate, enddate)) return _get(url=url, headers=super(BucketManager, self)._gernerate_headers(url))
def bucket_stat(self, name, startdate, enddate, isListDetails='false', storageType=None): encode_name = urlsafe_base64_encode(name) options = { 'name': encode_name, 'startdate': startdate, 'enddate': enddate, 'isListDetails': isListDetails } if storageType: options['storageType'] = str(storageType) url = https_check(self._make_url('bucket/stat', options)) debug('Now check storage of %s from %s to %s' % (name, startdate, enddate)) return _get(url=url, headers=super(BucketManager, self)._gernerate_headers(url))
def wslive_list(self, channelname, startTime, endTime, bucket, start=None, limit=None): query = { 'channelname': channelname, 'startTime': startTime, 'endTime': endTime, 'bucket': bucket, } if start is not None: query['start'] = start if limit is not None: query['limit'] = limit url = https_check(self._make_list_url(query)) if query is not None: debug('List params is %s' % query) debug('List bucket %s' % bucket) return _get(url=url, headers=super(WsLive, self)._gernerate_headers(url))
def _make_move_url(self, srcbucket, srckey, dstbucket, dstkey): src = urlsafe_base64_encode('%s:%s' % (srcbucket, srckey)) dst = urlsafe_base64_encode('%s:%s' % (dstbucket, dstkey)) url = '{0}/move/{1}/{2}'.format(self.mgr_host, src, dst) return https_check(url)
def status(self, persistentId): url = '{0}/fmgr/status?persistentId={1}'.format( self.mgr_host, persistentId) url = https_check(url) debug('Start to get status of persistentId: %s' % persistentId) return _get(url=url)
def stat(self, bucket, key): url = https_check(self._make_filestat_url(bucket, key)) debug('Start to get the stat of %s:%s' % (bucket, key)) return _get(url=url, headers=super(BucketManager, self)._gernerate_headers(url))
def delete(self, bucket, key): url = https_check(self._make_delete_url(bucket, key)) debug('Start to post request of delete %s:%s' % (bucket, key)) return _post(url=url, headers=super(BucketManager, self)._gernerate_headers(url))
def bucket_list(self): url = '{0}/bucket/list'.format(self.mgr_host) url = https_check(url) debug('Now start to list buckets') return _get(url=url, headers=super(BucketManager, self)._gernerate_headers(url))
def _make_bput_post(self, ctx, bputnum, bput_next): url = https_check(self.__bput_url(ctx, bputnum * self.bput_size)) headers = self.__generate_headers() return _post(url=url, headers=headers, data=bput_next)
def fops_status(self, persistentId): url = https_check('{0}/status/get/prefop?persistentId={1}'.format( self.mgr_host, persistentId)) debug('Start to get status of persistentId: %s' % persistentId) return _get(url=url)