def _debug(method, url, params, payload, request_headers, response_headers, response, status_code, elapsed, cached=None): try: if g.app.debug: debug = g.app.debug else: debug = False except NoContextError: debug = True if debug: log_id = string_id(length=6) try: payload = js.loads(payload) payload = js.dumps(payload) except Exception: pass try: response = js.loads(response) response = js.dumps(response) except Exception: pass log.debug('Method: %s' % method + ', URL: %s' % url + ', Params: %s' % params + ' (%s %s)' % (status_code, HTTP_STATUS_CODES[status_code]) + ' Cache: %s' % cached, timer=elapsed, log_id=log_id) for header in request_headers: log.debug('Request Header: %s="%s"' % (header, request_headers[header]), log_id=log_id) for header in response_headers: log.debug('Response Header: %s="%s"' % (header, response_headers[header]), log_id=log_id) log.debug(payload, prepend='Request Payload', log_id=log_id) log.debug(response, prepend='Response Payload', log_id=log_id)
def parse(self, value): if isinstance(value, (dict, list)): return js.dumps(value, indent=0).replace('\n', '') if isinstance(value, str): try: return js.loads(value) except Exception: self.error("Invalid json in '%s'" % value, value)
def distribute(self, queue, **kwargs): retry = 5 for i in range(retry): try: message = js.dumps(kwargs) channel = self.channel() channel.queue_declare(queue=queue, durable=True) channel.basic_publish( exchange='', routing_key=queue, body=message, properties=pika.BasicProperties( delivery_mode=2, # msg persistent content_type='application/json', content_encoding='utf-8')) return True except pika.exceptions.ChannelClosed as e: if i == retry - 1: raise MessageBusError(e) self.connection = pika.BlockingConnection(self._params) except pika.exceptions.ConnectionClosed as e: if i == retry - 1: raise MessageBusError(e) self.connection = pika.BlockingConnection(self._params)
def request(client, method, url, params={}, data=None, headers={}, stream=False, **kwargs): with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None try: if g.current_request.user_token: headers['X-Auth-Token'] = g.current_request.user_token if g.current_request.context_domain: headers['X-Domain'] = g.current_request.context_domain if g.current_request.context_tenant_id: headers['X-Tenant-Id'] = g.current_request.context_tenant_id except NoContextError: pass for kwarg in kwargs: headers[kwarg] = kwargs if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: response = Response( client._s.request(method.upper(), url, params=params, data=data, headers=headers, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: try: title = None description = None if 'error' in response.json: error = response.json['error'] try: title = error.get('title') description = error.get('description') except AttributeError: pass raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: raise HTTPError(response.status_code) if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: raise HTTPClientConnectionError(e) except requests.exceptions.ProxyError as e: raise HTTPClientProxyError(e) except requests.exceptions.SSLError as e: raise HTTPClientSSLError(e) except requests.exceptions.Timeout as e: raise HTTPClientTimeoutError(e) except requests.exceptions.ConnectTimeout as e: raise HTTPClientConnectTimeoutError(e) except requests.exceptions.ReadTimeout as e: raise HTTPClientReadTimeoutError(e) except requests.exceptions.HTTPError as e: raise HTTPError(e.response.status_code, e) _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response
def json(self): """Return as serialized JSON. """ return js.dumps(self.transaction)
def request(method, uri, data, headers={}, auth=None, timeout=(2, 8), verify=True, cert=None): with Timer() as elapsed: method = method.upper() cache = Cache() if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) host = host_from_uri(uri) cache_obj = str(method) + str(uri) + str(data) cached = cache.load(cache_obj) if cached is not None: return Response(cached) try: session = sessions[host] log.debug("Using exisiting session: '%s'" % host) except KeyError: session = sessions[host] = requests.Session() if data is None: data = '' headers['User-Agent'] = __identity__ headers['Content-Length'] = str(len(data)) request = requests.Request(method, uri, data=data, headers=headers, auth=auth) session_request = session.prepare_request(request) response = session.send(session_request, timeout=timeout, verify=verify, cert=cert) _debug(method, uri, data, headers, response.headers, response.content, response.status_code, elapsed()) if 'Cache-Control' in response.headers: cache_control = parse_cache_control_header( response.headers['cache-control']) if cache_control.max_age is not None: cache.store(cache_obj, response, int(cache_control.max_age)) return Response(response)
def request(client, method, url, params={}, data=None, headers={}, stream=False, endpoint=None, **kwargs): if endpoint is None: endpoint = url with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None for kwarg in kwargs: # NOTE(cfrademan): # Generally headers have '-' not '_'. Also kwargs # cannot contain '-'. if kwargs[kwarg] is not None: header = kwarg.replace('_', '-') headers[header] = str(kwargs[kwarg]) if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: # response = Response(client._s.request(method.upper(), # url, # params=params, # data=data, # headers=headers, # stream=stream)) # NOTE(cfrademan): Using prepared requests, because we need to # no Transfer Encoding chunked, and expect Content-Length... # Chunked encoding is not well supported uploading to WSGI app. prepped = client._s.prepare_request( requests.Request(method.upper(), url, params=params, data=data, headers=headers)) if 'Content-Length' in prepped.headers: if 'Transfer-Encoding' in prepped.headers: del prepped.headers['Transfer-Encoding'] response = Response(client._s.send(prepped, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: if 'X-Expired-Token' in response.headers: raise TokenExpiredError() try: title = None description = None if ('json' in response.content_type.lower() and 'error' in response.json): error = response.json['error'] try: title = error.get('title') description = error.get('description') if endpoint is not None: title += " (%s)" % endpoint except AttributeError: if endpoint is not None: description = " Endpoint: %s" % endpoint else: if endpoint is not None: description = " Endpoint: %s" % endpoint if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: if endpoint is not None: description = 'Endpoint: %s' raise HTTPError(response.status_code, description=description) from None else: raise HTTPError(response.status_code) from None if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: e = append_to_error(e, endpoint) raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectionError( "API Connection error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ProxyError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientProxyError("API proxy error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.SSLError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientSSLError("API SSL error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.Timeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientTimeoutError( "API connection timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ConnectTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectTimeoutError( "API connect timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ReadTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientReadTimeoutError("API read timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.HTTPError as e: e = append_to_error(e, endpoint) raise HTTPError(e.response.status_code, e) if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response