def _init(): session = requests.Session() adapter = requests.adapters.HTTPAdapter( pool_connections=config.get_default('connection_pool'), pool_maxsize=config.get_default('connection_pool'), max_retries=config.get_default('connection_retries')) session.mount('http://', adapter) global _session _session = session
def _form_put(up_token, key, data, params, mime_type, crc, is_file=False, progress_handler=None): fields = {} if params: for k, v in params.items(): fields[k] = str(v) if crc: fields['crc32'] = crc if key is not None: fields['key'] = key fields['token'] = up_token url = 'http://' + config.get_default('default_up_host') + '/' name = key if key else 'filename' r, info = http._post_file(url, data=fields, files={'file': (name, data, mime_type)}) if r is None and info.need_retry(): if info.connect_failed: url = 'http://' + config.UPBACKUP_HOST + '/' if hasattr(data, 'seek'): data.seek(0) r, info = http._post_file(url, data=fields, files={'file': (name, data, mime_type)}) return r, info
def _get(url, params, auth): try: r = requests.get( url, params=params, auth=RequestsAuth(auth) if auth is not None else None, timeout=config.get_default('connection_timeout'), headers=_headers) except Exception as e: return None, ResponseInfo(None, e) return __return_wrapper(r)
def _post(url, data, files, auth): if _session is None: _init() try: r = _session.post( url, data=data, files=files, auth=auth, headers=_headers, timeout=config.get_default('connection_timeout')) except Exception as e: return None, ResponseInfo(None, e) return __return_wrapper(r)
def _get(url, params, auth): try: r = requests.get(url, params=params, auth=RequestsAuth(auth) if auth is not None else None, timeout=config.get_default('connection_timeout'), headers=_headers) except Exception as e: return None, ResponseInfo(None, e) return __return_wrapper(r)
def _post(url, data, files, auth): if _session is None: _init() try: r = _session.post(url, data=data, files=files, auth=auth, headers=_headers, timeout=config.get_default('connection_timeout')) except Exception as e: return None, ResponseInfo(None, e) return __return_wrapper(r)
def list(self, bucket, prefix=None, marker=None, limit=None, delimiter=None): """前缀查询: 1. 首次请求 marker = None 2. 无论 err 值如何,均应该先看 ret.get('items') 是否有内容 3. 如果后续没有更多数据,err 返回 EOF,marker 返回 None(但不通过该特征来判断是否结束) 具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/list.html Args: bucket: 空间名 prefix: 列举前缀 marker: 列举标识符 limit: 单次列举个数限制 delimiter: 指定目录分隔符 Returns: 一个dict变量,类似 {"hash": "<Hash string>", "key": "<Key string>"} 一个ResponseInfo对象 一个EOF信息。 """ options = { 'bucket': bucket, } if marker is not None: options['marker'] = marker if limit is not None: options['limit'] = limit if prefix is not None: options['prefix'] = prefix if delimiter is not None: options['delimiter'] = delimiter url = 'http://{0}/list'.format(config.get_default('default_rsf_host')) ret, info = self.__get(url, options) eof = False if ret and not ret.get('marker'): eof = True return ret, eof, info
def upload(self): """上传操作""" self.blockStatus = [] host = config.get_default('default_up_host') for block in _file_iter(self.input_stream, config._BLOCK_SIZE): length = len(block) crc = crc32(block) ret, info = self.make_block(block, length, host) if ret is None and not info.need_retry: return ret, info if info.connect_failed: host = config.UPBACKUP_HOST if info.need_retry or crc != ret['crc32']: ret, info = self.make_block(block, length, host) if ret is None or crc != ret['crc32']: return ret, info self.blockStatus.append(ret) if(callable(self.progress_handler)): self.progress_handler(((len(self.blockStatus) - 1) * config._BLOCK_SIZE)+length, self.size) return self.make_file(host)
def execute(self, key, fops, force=None): """执行持久化处理: Args: key: 待处理的源文件 fops: 处理详细操作,规格详见 http://developer.qiniu.com/docs/v6/api/reference/fop/ force: 强制执行持久化处理开关 Returns: 一个dict变量,返回持久化处理的persistentId,类似{"persistentId": 5476bedf7823de4068253bae}; 一个ResponseInfo对象 """ ops = ';'.join(fops) data = {'bucket': self.bucket, 'key': key, 'fops': ops} if self.pipeline: data['pipeline'] = self.pipeline if self.notify_url: data['notifyURL'] = self.notify_url if force == 1: data['force'] = 1 url = 'http://{0}/pfop'.format(config.get_default('default_api_host')) return http._post_with_auth(url, data, self.auth)
def batch(self, operations): """批量操作: 在单次请求中进行多个资源管理操作,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/batch.html Args: operations: 资源管理操作数组,可通过 Returns: 一个dict变量,返回结果类似: [ { "code": <HttpCode int>, "data": <Data> }, { "code": <HttpCode int> }, { "code": <HttpCode int> }, { "code": <HttpCode int> }, { "code": <HttpCode int>, "data": { "error": "<ErrorMessage string>" } }, ... ] 一个ResponseInfo对象 """ url = 'http://{0}/batch'.format(config.get_default('default_rs_host')) return self.__post(url, dict(op=operations))
def upload(self): """上传操作""" self.blockStatus = [] host = config.get_default('default_up_host') for block in _file_iter(self.input_stream, config._BLOCK_SIZE): length = len(block) crc = crc32(block) ret, info = self.make_block(block, length, host) if ret is None and not info.need_retry: return ret, info if info.connect_failed: host = config.UPBACKUP_HOST if info.need_retry or crc != ret['crc32']: ret, info = self.make_block(block, length, host) if ret is None or crc != ret['crc32']: return ret, info self.blockStatus.append(ret) if (callable(self.progress_handler)): self.progress_handler( ((len(self.blockStatus) - 1) * config._BLOCK_SIZE) + length, self.size) return self.make_file(host)
def __io_do(self, operation, *args): return self.__server_do(config.get_default('default_io_host'), operation, *args)