def chunk_delete_many(self, chunks, cid=None, concurrency=PARALLEL_CHUNKS_DELETE, **kwargs): """ :rtype: `list` of either `urllib3.response.HTTPResponse` or `urllib3.exceptions.HTTPError`, with an extra "chunk" attribute. """ headers = kwargs['headers'].copy() if cid is not None: # This is only to get a nice access log headers['X-oio-chunk-meta-container-id'] = cid timeout = kwargs.get('timeout') if not timeout: timeout = urllib3.Timeout(CHUNK_TIMEOUT) def __delete_chunk(chunk_): try: resp = self.http_pool.request("DELETE", self.resolve_url(chunk_['url']), headers=headers, timeout=timeout) resp.chunk = chunk_ return resp except urllib3.exceptions.HTTPError as ex: ex.chunk = chunk_ return ex pile = GreenPile(concurrency) for chunk in chunks: pile.spawn(__delete_chunk, chunk) resps = [resp for resp in pile if resp] return resps
def _request(self, method, url, connection_timeout=None, read_timeout=None, **kwargs): if 'timeout' not in kwargs: if connection_timeout is None: connection_timeout = CONNECTION_TIMEOUT if read_timeout is None: read_timeout = CHUNK_TIMEOUT kwargs['timeout'] = urllib3.Timeout( connect=connection_timeout, read=read_timeout) return self.http_pool.request( method, self.resolve_url(url), **kwargs)
def chunk_head(self, url, **kwargs): """ Perform a HEAD request on a chunk. :param url: URL of the chunk to request. :keyword xattr: when False, ask the rawx not to read extended attributes of the chunk. :keyword check_hash: when True, ask the rawx to validate checksum of the chunk. :returns: a `dict` with chunk metadata (empty when xattr is False). """ _xattr = bool(kwargs.get('xattr', True)) url = self.resolve_url(url) headers = kwargs['headers'].copy() headers[FETCHXATTR_HEADER] = _xattr if bool(kwargs.get('check_hash', False)): headers[CHECKHASH_HEADER] = True timeout = kwargs.get('timeout') if not timeout: timeout = urllib3.Timeout(CHUNK_TIMEOUT) try: resp = self.http_pool.request('HEAD', url, headers=headers, timeout=timeout) except urllib3.exceptions.HTTPError as ex: oio_exception_from_httperror(ex, reqid=headers[REQID_HEADER], url=url) if resp.status == 200: if not _xattr: return dict() return extract_headers_meta(resp.headers) else: raise exc.from_response(resp)
def _direct_request(self, method, url, headers=None, data=None, json=None, params=None, admin_mode=False, pool_manager=None, **kwargs): """ Make an HTTP request. :param method: HTTP method to use (e.g. "GET") :type method: `str` :param url: URL to request :type url: `str` :keyword admin_mode: allow operations on slave or worm namespaces :type admin_mode: `bool` :keyword deadline: deadline for the request, in monotonic time. Supersedes `read_timeout`. :type deadline: `float` seconds :keyword timeout: optional timeout for the request (in seconds). May be a `urllib3.Timeout(connect=connection_timeout, read=read_timeout)`. This method also accepts `connection_timeout` and `read_timeout` as separate arguments. :type timeout: `float` or `urllib3.Timeout` :keyword headers: optional headers to add to the request :type headers: `dict` :raise oio.common.exceptions.OioTimeout: in case of read, write or connection timeout :raise oio.common.exceptions.OioNetworkException: in case of connection error :raise oio.common.exceptions.OioException: in other case of HTTP error :raise oio.common.exceptions.ClientException: in case of HTTP status code >= 400 """ # Filter arguments that are not recognized by Requests out_kwargs = { k: v for k, v in iteritems(kwargs) if k in URLLIB3_REQUESTS_KWARGS } # Ensure headers are all strings if headers: out_headers = {k: text_type(v) for k, v in headers.items()} else: out_headers = dict() if self.admin_mode or admin_mode: out_headers[ADMIN_HEADER] = '1' # Look for a request deadline, deduce the timeout from it. if kwargs.get('deadline', None) is not None: to = deadline_to_timeout(kwargs['deadline'], True) to = min(to, kwargs.get('read_timeout', to)) out_kwargs['timeout'] = urllib3.Timeout(connect=kwargs.get( 'connection_timeout', CONNECTION_TIMEOUT), read=to) # Shorten the deadline by 1% to compensate for the time spent # connecting and reading response. out_headers[TIMEOUT_HEADER] = int(to * 990000.0) # Ensure there is a timeout if 'timeout' not in out_kwargs: out_kwargs['timeout'] = urllib3.Timeout( connect=kwargs.get('connection_timeout', CONNECTION_TIMEOUT), read=kwargs.get('read_timeout', READ_TIMEOUT)) if TIMEOUT_HEADER not in out_headers: to = out_kwargs['timeout'] if isinstance(to, urllib3.Timeout): to = to.read_timeout else: to = float(to) out_headers[TIMEOUT_HEADER] = int(to * 1000000.0) # Convert json and add Content-Type if json: out_headers["Content-Type"] = "application/json" data = jsonlib.dumps(json) # Trigger performance measurments perfdata = kwargs.get('perfdata', self.perfdata) if perfdata is not None: out_headers[PERFDATA_HEADER] = 'enabled' out_kwargs['headers'] = out_headers out_kwargs['body'] = data # Add query string if params: out_param = [] for k, v in params.items(): if v is not None: if isinstance(v, text_type): v = text_type(v).encode('utf-8') out_param.append((k, v)) encoded_args = urlencode(out_param) url += '?' + encoded_args if not pool_manager: pool_manager = self.pool_manager def _reraise(exc_type, exc_value): reqid = out_headers.get('X-oio-req-id') exceptions.reraise(exc_type, exc_value, "reqid=%s" % reqid) try: resp = pool_manager.request(method, url, **out_kwargs) body = resp.data if body: try: body = jsonlib.loads(body) except ValueError: pass if perfdata is not None and PERFDATA_HEADER in resp.headers: for header_val in resp.headers[PERFDATA_HEADER].split(','): kv = header_val.split('=', 1) pdat = perfdata.get(kv[0], 0.0) + float(kv[1]) / 1000000.0 perfdata[kv[0]] = pdat except MaxRetryError as exc: if isinstance(exc.reason, NewConnectionError): _reraise(exceptions.OioNetworkException, exc) if isinstance(exc.reason, TimeoutError): _reraise(exceptions.OioTimeout, exc) _reraise(exceptions.OioNetworkException, exc) except (ProtocolError, ProxyError, ClosedPoolError) as exc: _reraise(exceptions.OioNetworkException, exc) except TimeoutError as exc: _reraise(exceptions.OioTimeout, exc) except HTTPError as exc: _reraise(exceptions.OioException, exc) if resp.status >= 400: raise exceptions.from_response(resp, body) return resp, body
def _direct_request(self, method, url, headers=None, data=None, json=None, params=None, admin_mode=False, pool_manager=None, force_master=False, **kwargs): """ Make an HTTP request. :param method: HTTP method to use (e.g. "GET") :type method: `str` :param url: URL to request :type url: `str` :keyword admin_mode: allow operations on slave or worm namespaces :type admin_mode: `bool` :keyword deadline: deadline for the request, in monotonic time. Supersedes `read_timeout`. :type deadline: `float` seconds :keyword timeout: optional timeout for the request (in seconds). May be a `urllib3.Timeout(connect=connection_timeout, read=read_timeout)`. This method also accepts `connection_timeout` and `read_timeout` as separate arguments. :type timeout: `float` or `urllib3.Timeout` :keyword headers: optional headers to add to the request :type headers: `dict` :keyword force_master: request will run on master service only. :type force_master: `bool` :raise oio.common.exceptions.OioTimeout: in case of read, write or connection timeout :raise oio.common.exceptions.OioNetworkException: in case of connection error :raise oio.common.exceptions.OioException: in other case of HTTP error :raise oio.common.exceptions.ClientException: in case of HTTP status code >= 400 """ out_kwargs = dict(kwargs) # Ensure headers are all strings if headers: out_headers = {k: str(v) for k, v in headers.items()} else: out_headers = dict() if self.admin_mode or admin_mode: out_headers[ADMIN_HEADER] = '1' if self.force_master or force_master: out_headers[FORCEMASTER_HEADER] = '1' # Look for a request deadline, deduce the timeout from it. if kwargs.get('deadline', None) is not None: to = deadline_to_timeout(kwargs['deadline'], True) to = min(to, kwargs.get('read_timeout', to)) out_kwargs['timeout'] = urllib3.Timeout( connect=kwargs.get('connection_timeout', CONNECTION_TIMEOUT), read=to) # Shorten the deadline by 1% to compensate for the time spent # connecting and reading response. out_headers[TIMEOUT_HEADER] = int(to * 990000.0) # Ensure there is a timeout if 'timeout' not in out_kwargs: out_kwargs['timeout'] = urllib3.Timeout( connect=kwargs.get('connection_timeout', CONNECTION_TIMEOUT), read=kwargs.get('read_timeout', READ_TIMEOUT)) if TIMEOUT_HEADER not in out_headers: to = out_kwargs['timeout'] if isinstance(to, urllib3.Timeout): to = to.read_timeout else: to = float(to) out_headers[TIMEOUT_HEADER] = int(to * 1000000.0) # Look for a request ID if 'reqid' in kwargs: out_headers[REQID_HEADER] = str(kwargs['reqid']) if len(out_headers.get(REQID_HEADER, '')) > STRLEN_REQID: out_headers[REQID_HEADER] = \ out_headers[REQID_HEADER][:STRLEN_REQID] self.__logger().warn('Request ID truncated to %d characters', STRLEN_REQID) # Convert json and add Content-Type if json: out_headers["Content-Type"] = "application/json" data = jsonlib.dumps(json) # Trigger performance measurments perfdata = kwargs.get('perfdata', None) if perfdata is not None: out_headers[PERFDATA_HEADER] = 'enabled' # Explicitly keep or close the connection if 'Connection' not in out_headers: out_headers['Connection'] = self.connection out_kwargs['headers'] = out_headers out_kwargs['body'] = data # Add query string if params: out_param = [] for k, v in params.items(): if v is not None: if isinstance(v, unicode): v = unicode(v).encode('utf-8') out_param.append((k, v)) encoded_args = urlencode(out_param) url += '?' + encoded_args if not pool_manager: pool_manager = self.pool_manager try: if perfdata is not None: request_start = monotonic_time() resp = pool_manager.request(method, url, **out_kwargs) if perfdata is not None: request_end = monotonic_time() service_perfdata = perfdata.setdefault( self.service_type, dict()) service_perfdata['total'] = service_perfdata.get( 'total', 0.0) + request_end - request_start body = resp.data if body: try: body = jsonlib.loads(body) except ValueError: pass if perfdata is not None and PERFDATA_HEADER in resp.headers: service_perfdata = perfdata[self.service_type] for header_val in resp.headers[PERFDATA_HEADER].split(','): kv = header_val.split('=', 1) service_perfdata[kv[0]] = service_perfdata.get( kv[0], 0.0) + float(kv[1]) / 1000000.0 except urllib3.exceptions.HTTPError as exc: oio_exception_from_httperror(exc, reqid=out_headers.get(REQID_HEADER), url=url) if resp.status >= 400: raise exceptions.from_response(resp, body) return resp, body
def _direct_request(self, method, url, headers=None, data=None, json=None, params=None, admin_mode=False, pool_manager=None, **kwargs): """ Make an HTTP request. :param method: HTTP method to use (e.g. "GET") :type method: `str` :param url: URL to request :type url: `str` :keyword admin_mode: allow operations on slave or worm namespaces :type admin_mode: `bool` :keyword timeout: optional timeout for the request (in seconds). May be a `urllib3.Timeout(connect=connection_timeout, read=read_timeout)`. This method also accepts `connection_timeout` and `read_timeout` as separate arguments. :type timeout: `float` or `urllib3.Timeout` :keyword headers: optional headers to add to the request :type headers: `dict` :raise oio.common.exceptions.OioTimeout: in case of read, write or connection timeout :raise oio.common.exceptions.OioNetworkException: in case of connection error :raise oio.common.exceptions.OioException: in other case of HTTP error :raise oio.common.exceptions.ClientException: in case of HTTP status code >= 400 """ # Filter arguments that are not recognized by Requests out_kwargs = { k: v for k, v in kwargs.items() if k in URLLIB3_REQUESTS_KWARGS } # Ensure headers are all strings if headers: out_headers = {k: str(v) for k, v in headers.items()} else: out_headers = dict() if self.admin_mode or admin_mode: out_headers[ADMIN_HEADER] = '1' # Ensure there is a timeout if 'timeout' not in out_kwargs: out_kwargs['timeout'] = urllib3.Timeout( connect=kwargs.get('connection_timeout', CONNECTION_TIMEOUT), read=kwargs.get('read_timeout', READ_TIMEOUT)) # Convert json and add Content-Type if json: out_headers["Content-Type"] = "application/json" data = jsonlib.dumps(json) out_kwargs['headers'] = out_headers out_kwargs['body'] = data # Add query string if params: out_param = [] for k, v in params.items(): if v is not None: if isinstance(v, unicode): v = unicode(v).encode('utf-8') out_param.append((k, v)) encoded_args = urlencode(out_param) url += '?' + encoded_args if not pool_manager: pool_manager = self.pool_manager try: resp = pool_manager.request(method, url, **out_kwargs) body = resp.data if body: try: body = jsonlib.loads(body) except ValueError: pass except MaxRetryError as exc: if isinstance(exc.reason, NewConnectionError): raise exceptions.OioNetworkException(exc), None, \ sys.exc_info()[2] if isinstance(exc.reason, TimeoutError): raise exceptions.OioTimeout(exc), None, sys.exc_info()[2] raise exceptions.OioNetworkException(exc), None, sys.exc_info()[2] except (ProtocolError, ProxyError, ClosedPoolError) as exc: raise exceptions.OioNetworkException(exc), None, sys.exc_info()[2] except TimeoutError as exc: raise exceptions.OioTimeout(exc), None, sys.exc_info()[2] except HTTPError as exc: raise exceptions.OioException(exc), None, sys.exc_info()[2] if resp.status >= 400: raise exceptions.from_response(resp, body) return resp, body