def valid(password, hashed): """ Validate password against hash. Args: password (str): Clear Text Password hashed (str): Hashed value of Password Return: True if password matches. """ # PASSLIB slow to import... so only when neccessary. import passlib.hash import passlib.context # Initilize pwd_content globally per process. # Purpose is faster loading initially. global pwd_context try: pwd_context except Exception: schemes = [ "md5_crypt", "bcrypt", "sha256_crypt", "sha512_crypt", "ldap_md5", "ldap_salted_md5", "ldap_sha1", "ldap_salted_sha1", "ldap_bcrypt", "ldap_sha256_crypt", "ldap_sha512_crypt", "plaintext" ] pwd_context = passlib.context.CryptContext(schemes=schemes) # Validate Password using pwd_context with Timer() as elapsed: val = pwd_context.verify(password, hashed) log.debug('Hash Validated %s' % val + ' (DURATION: %.4fs)' % elapsed()) return val
def validate(self, rule): """Validate Access to view. Args: view (str): View Name """ # Default Value val = False if rule not in self._rule_set: log.error("No such rule '%s'" % rule) return val # Import bit, ensures save environment... exec_globals = { '__builtins__': {} } exec_globals = { '_validate_rule': rule } # Add Policy environment Kwargs to be referenced in rule_set. exec_globals.update(self._kwargs) try: with Timer() as elapsed: # Execute compiled rule_set code. exec(self._compiled, exec_globals, exec_globals) # Value from compiled code. val = exec_globals['_validate_result'] log.info('Rule %s validated to %s.' % (rule, val), timer=elapsed()) except AccessDeniedError: raise except Exception as e: log.error("Failed validating '%s' %s:%s" % (rule, e.__class__.__name__, e)) return val
def __init__(self, name, app_root=None, ini=None): try: with Timer() as elapsed: # Set current app as global g.app = self # Attempt to determine application root. g.app_root = self.app_root = app_root = determine_app_root( name, app_root) # Set Application Name self.name = name # Load Configuration if ini is None: g.config = self.config = load_config(self.app_root + '/settings.ini') else: g.config = self.config = load_config(ini) # Configure Logger. GetLogger().app_configure() # Started Application log.info('Started Application' ' %s' % name + ' app_root: %s' % app_root, timer=elapsed()) except Exception: trace = str(traceback.format_exc()) log.critical("%s" % trace) raise
def get_source(self, environment, template): """Get raw template for environment. First attempts to load overriding template then uses template within specified package. For example "package/template.html" """ with Timer() as elapsed: try: source = self._fsl.get_source(environment, template) log.info("Loaded Override Template %s" % template, timer=elapsed()) return source except TemplateNotFound: pass try: package_path = split_template_path(template) package = package_path[0] template = ".".join(package_path[1:]) if package not in self._pkgloaders: self._pkgloaders[package] = PackageLoader( package, package_path='/templates', encoding='UTF-8') source = self._pkgloaders[package].get_source( environment, template) log.info("Loaded Package Template %s/%s" % (package, template), timer=elapsed()) except ModuleNotFoundError: raise TemplateNotFound(package + '/' + template) from None return source
def test_Timer(): with Timer() as elapsed: assert elapsed() < 0.001 time.sleep(0.01) assert elapsed() < 0.02 and elapsed() > 0.01
def execute(self, query, args=None): """Prepare and execute a database operation (query or command). Parameters may be provided as sequence or mapping and will be bound to variables in the operation. Variables are specified in a database-specific notation (see the module's paramstyle attribute for details). A reference to the operation will be retained by the cursor. If the same operation object is passed in again, then the cursor can optimize its behavior. This is most effective for algorithms where the same operation is used, but different parameters are bound to it (many times). For maximum efficiency when reusing an operation, it is best to use the .setinputsizes() method to specify the parameter types and sizes ahead of time. It is legal for a parameter to not match the predefined information; the implementation should compensate, possibly with a loss of efficiency. The parameters may also be specified as list of tuples to e.g. insert multiple rows in a single operation, but this kind of usage is deprecated: .executemany() should be used instead. Return values are not defined in the standard. However goal is to always return a list of rows being dictionary of column/key values in this "IMPLEMENTATION". Reference PEP-0249 """ with Timer() as elapsed: self._rownumber = 0 try: if args is not None and not isinstance(args, (dict, list, tuple)): args = [args] query, args = args_to(query, args, self._conn.DEST_FORMAT, self._conn.CAST_MAP) if self._debug: _log(self, "Start " + query, elapsed(), values=args) if args is not None: self._uncommited = True self._executed = True self._crsr.execute(query, args) else: self._uncommited = True self._executed = True self._crsr.execute(query) return self except Exception as e: self._error_handler(self, e, self._conn.ERROR_MAP) finally: if self._debug: _log(self, "Completed " + query, elapsed(), values=args)
def rollback(self): """Rollback Transactional Queries If the database and tables support transactions, this rolls back (cancels) the current transaction; otherwise a NotSupportedError is raised. """ if self._uncommited is True: with Timer() as elapsed: try: self._crsr.rollback() except AttributeError: self._conn._conn.rollback() _log(self, "Rollback", elapsed())
def commit(self): """Commit Transactionl Queries. If the database and the tables support transactions, this commits the current transaction; otherwise this method successfully does nothing. """ if self._uncommited is True: with Timer() as elapsed: try: self._crsr.commit() except AttributeError: self._conn._conn.commit() _log(self, "Commit", elapsed()) self._uncommited = False
def import_module(module): """Import module. Args: module: Moodule import path definition. Returns module. """ if module not in sys.modules: with Timer() as elapsed: log.info('Importing module: %s' % module) __import__(module) log.info('Importing module: %s (Completed) (DURATION %.4fs)' % (module, elapsed())) return sys.modules[module]
def import_module(module): """Import module. Args: module: Module import path definition. Returns: Module in given path. """ if module not in sys.modules: with Timer() as elapsed: log.info('Importing module: %s' % module) __import__(module) log.info('Importing module: %s (Completed)' % module, timer=elapsed()) return sys.modules[module]
def proc(self, method, route): try: with Timer() as elapsed: # Request Object. request = g.current_request = Request(method, route) # Response Object. response = Response() # Set Response object for request. request.response = response # Debug output if g.app.debug is True: log.info('Request %s' % request.route + ' Method %s\n' % request.method) # Process the middleware 'pre' method before routing it for middleware in register._middleware_pre: middleware(request, response) # Route Object. resource, method, r_kwargs, target, tag, cache = router.find( request.method, request.route) # Route Kwargs in requests. request.route_kwargs = r_kwargs # Set route tag in requests. request.tag = tag # If route tagged validate with policy if tag is not None: if not request.policy.validate(tag): raise AccessDeniedError("Access Denied by" + " policy '%s'" % tag) # Execute Routed View. try: # Process the middleware 'resource' after routing it for middleware in register._middleware_resource: middleware(request, response) # Run View method. if resource is not None: view = resource(request, response, **r_kwargs) if view is not None: response.write(view) else: raise NotFoundError("Route not found" + " Method '%s'" % request.method + " Route '%s'" % request.route) finally: # Process the middleware 'post' at the end for middleware in register._middleware_post: middleware(request, response) except KeyboardInterrupt: response.write('CTRL-C / KeyboardInterrupt') except Exception as exception: trace = str(traceback.format_exc()) self.handle_error(request, response, exception, trace) # Return response object. finally: # Completed Request log.info('Completed CMD', timer=elapsed())
def compiler(dict_rule_set): """Policy Rules Compiler. Compiles rule set into python executable machine code for enhanced performance during conditional matching. Example of rule_set in dict format: { 'role:admin': '"admin" in role_kwarg', 'user:login': '******', 'match:both': '$role:admin or $system:login' } The syntax for the rule is exactly as per python conditional statements. Note these statements can be nested using braces (). Args: dict_rule_set (dict): Rule Set loaded from JSON file for example. """ with Timer() as elapsed: # MATCH : expression used within rule statements. interpolation_match = re.compile(r"\$[a-z_\-:]+", re.IGNORECASE) # Build Rules - Need todo this before compiling. # Some rules reference others. rule_set = '_validate_result = False\n' rule_set += '_rules = {}\n\n' for rule in dict_rule_set: # Validate Rules. if ' ' in rule: log.error("Error in rule name '" + rule + "' skipping. (expected value with no spaces)") continue log.info('Build rule %s = %s' % (rule, dict_rule_set[rule])) condition = dict_rule_set[rule] build_rule = ('def ' + rule.replace(':', '_') + '():\n return ' + condition + '\n') # Correct build_rule for interpolation. # Any string with '$value' is an expression. for expr in interpolation_match.findall(build_rule): if expr[1:] not in dict_rule_set: log.error("Missing rule for interpolation of '" + expr + "' in rule '" + rule + "' skipping.") build_rule = build_rule.replace(expr, 'False') continue build_rule = build_rule.replace( expr, expr.replace(':', '_')[1:] + '()') # Add Rule to _rules dictionary for validation to select rule. build_rule += "_rules['" + rule + "'] = " + rule.replace(':', '_') build_rule += '\n\n' rule_set += build_rule rule_set += '_validate_result = _rules[_validate_rule]()\n' # Compile Rules try: compiled = compile(rule_set, 'policy.json.compiled', 'exec') log.info('%s Rules compile completed.' % len(dict_rule_set), timer=elapsed()) return (compiled, dict_rule_set) except Exception: raise ValueError("Failed compiling rule_set")
def __call__(self, *args, **kwargs): """Application Request Interface. A clean request and response object is provided to the interface that is unique this to this thread. It passes any args and kwargs to the interface. Response object is returned. """ try: with Timer() as elapsed: # Request Object. request = g.current_request = self._REQUEST_CLASS( *args, **kwargs) # Response Object. response = self._RESPONSE_CLASS(*args, **kwargs) # Set Response object for request. request.response = response # Process the middleware 'pre' method before routing it for middleware in g.middleware_pre: middleware(request, response) # Route Object. resource, method, r_kwargs, target, tag = g.router.find( \ request.method, \ request.route) # Set route tag in requests. request.tag = tag # Execute Routed View. try: # Run View method. if resource is not None: # Process the middleware 'resource' after routing it for middleware in g.middleware_resource: middleware(request, response) view = resource(request, response, **r_kwargs) if view is not None: response.body(view) else: raise NotFound('Route not found' + " Method '%s'" % request.method + " Route '%s'" % request.route) finally: # Process the middleware 'post' at the end for middleware in g.middleware_post: middleware(request, response) # Return response object. return response except HTTPError as exception: trace = str(traceback.format_exc()) if log.debug_mode(): log.debug('%s' % (trace)) else: log.info('%s: %s' % (object_name(exception), exception)) self._proxy_handle_error(request, response, exception, trace) # Return response object. return response except Error as exception: trace = str(traceback.format_exc()) if log.debug_mode(): log.debug('%s' % (trace)) else: log.error('%s: %s' % (object_name(exception), exception)) self._proxy_handle_error(request, response, exception, trace) # Return response object. return response except Exception as exception: trace = str(traceback.format_exc()) if log.debug_mode(): log.debug('%s' % (trace)) else: log.error('%s: %s' % (object_name(exception), exception)) self._proxy_handle_error(request, response, exception, trace) # Return response object. return response finally: # Completed Request log.info('Completed Request', timer=elapsed())
def request(method, uri, data, headers={}, auth=None, timeout=(2, 8), verify=True, cert=None): with Timer() as elapsed: method = method.upper() cache = Cache() if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) host = host_from_uri(uri) cache_obj = str(method) + str(uri) + str(data) cached = cache.load(cache_obj) if cached is not None: return Response(cached) try: session = sessions[host] log.debug("Using exisiting session: '%s'" % host) except KeyError: session = sessions[host] = requests.Session() if data is None: data = '' headers['User-Agent'] = __identity__ headers['Content-Length'] = str(len(data)) request = requests.Request(method, uri, data=data, headers=headers, auth=auth) session_request = session.prepare_request(request) response = session.send(session_request, timeout=timeout, verify=verify, cert=cert) _debug(method, uri, data, headers, response.headers, response.content, response.status_code, elapsed()) if 'Cache-Control' in response.headers: cache_control = parse_cache_control_header( response.headers['cache-control']) if cache_control.max_age is not None: cache.store(cache_obj, response, int(cache_control.max_age)) return Response(response)
def __call__(self, *args, **kwargs): """Application Request Interface. A clean request and response object is provided to the interface that is unique this to this thread. It passes any args and kwargs to the interface. Response object is returned. """ try: with Timer() as elapsed: # Request Object. request = g.current_request = Request(*args, **kwargs) request.env['SCRIPT_NAME'] = g.app.config.get( 'application', 'script', fallback=request.env['SCRIPT_NAME']) script_name = request.get_header('X-Script-Name') if script_name: request.env['SCRIPT_NAME'] = script_name # Response Object. response = Response(*args, **kwargs) # Set Response object for request. request.response = response # Debug output if g.app.debug is True: log.info('Request %s' % request.route + ' Method %s\n' % request.method) # Process the middleware 'pre' method before routing it for middleware in register._middleware_pre: middleware(request, response) # Route Object. resource, method, r_kwargs, target, tag, cache = router.find( request.method, request.route) # Route Kwargs in requests. request.route_kwargs = r_kwargs # Set route tag in requests. request.tag = tag # If route tagged validate with policy if tag is not None: if not request.policy.validate(tag, access_denied_raise=True): raise AccessDeniedError("Access Denied by" + " policy '%s'" % tag) # Execute Routed View. try: # Process the middleware 'resource' after routing it for middleware in register._middleware_resource: middleware(request, response) # Run View method. if resource is not None: view = resource(request, response, **r_kwargs) if view is not None: response.body(view) else: raise NotFoundError("Route not found" + " Method '%s'" % request.method + " Route '%s'" % request.route) finally: # Process the middleware 'post' at the end self.post_middleware(request, response, False) # Cache GET Response. # Only cache for GET responses! if cache > 0 and request.method == 'GET': # Get session_id if any for Caching session_id = request.cookies.get(request.host) # NOTE(cfrademan): Instruct to use cache but revalidate on, # stale cache entry. Expire remote cache in same duration # as internal cache. if session_id: response.set_header( "cache-control", "must-revalidate, private, max-age=" + str(cache)) else: response.set_header( "cache-control", "must-revalidate, max-age=" + str(cache)) # Set Vary Header # NOTE(cfrademan): Client should uniquely cache # based these request headers. response.set_header( 'Vary', 'Cookie, Accept-Encoding' + ', Content-Type') # Set Etag # NOTE(cfrademan): Needed Encoding for Different Etag. if isinstance(response._stream, bytes): encoding = request.get_header('Accept-Encoding') response.etag.set(etagger(response._stream, encoding)) # If Etag matches do not return full body use # external/user-agent cache. if (len(request.if_none_match) > 0 and request.if_none_match in response.etag): # Etag matches do not return full body. response.not_modified() # NOTE(cfrademan): Use last_modified as last resort for # external/user-agent cache. elif (request.if_modified_since and response.last_modified and request.if_modified_since <= response.last_modified): # Last-Modified matches do not return full body. response.not_modified() else: response.set_header("cache-control", "no-store, no-cache, max-age=0") # Return response object. return response() except HTTPError as exception: trace = str(traceback.format_exc()) self.handle_error(request, response, exception, trace) self.post_middleware(request, response, True) # Return response object. return response() except Error as exception: trace = str(traceback.format_exc()) self.handle_error(request, response, exception, trace) self.post_middleware(request, response, True) # Return response object. return response() except Exception as exception: trace = str(traceback.format_exc()) self.handle_error(request, response, exception, trace) self.post_middleware(request, response, True) # Return response object. return response() finally: # Completed Request log.info('Completed Request', timer=elapsed())
def request(client, method, url, params={}, data=None, headers={}, stream=False, **kwargs): with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None try: if g.current_request.user_token: headers['X-Auth-Token'] = g.current_request.user_token if g.current_request.context_domain: headers['X-Domain'] = g.current_request.context_domain if g.current_request.context_tenant_id: headers['X-Tenant-Id'] = g.current_request.context_tenant_id except NoContextError: pass for kwarg in kwargs: headers[kwarg] = kwargs if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: response = Response( client._s.request(method.upper(), url, params=params, data=data, headers=headers, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: try: title = None description = None if 'error' in response.json: error = response.json['error'] try: title = error.get('title') description = error.get('description') except AttributeError: pass raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: raise HTTPError(response.status_code) if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: raise HTTPClientConnectionError(e) except requests.exceptions.ProxyError as e: raise HTTPClientProxyError(e) except requests.exceptions.SSLError as e: raise HTTPClientSSLError(e) except requests.exceptions.Timeout as e: raise HTTPClientTimeoutError(e) except requests.exceptions.ConnectTimeout as e: raise HTTPClientConnectTimeoutError(e) except requests.exceptions.ReadTimeout as e: raise HTTPClientReadTimeoutError(e) except requests.exceptions.HTTPError as e: raise HTTPError(e.response.status_code, e) _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response
def request(client, method, url, params={}, data=None, headers={}, stream=False, endpoint=None, **kwargs): if endpoint is None: endpoint = url with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None for kwarg in kwargs: # NOTE(cfrademan): # Generally headers have '-' not '_'. Also kwargs # cannot contain '-'. if kwargs[kwarg] is not None: header = kwarg.replace('_', '-') headers[header] = str(kwargs[kwarg]) if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: # response = Response(client._s.request(method.upper(), # url, # params=params, # data=data, # headers=headers, # stream=stream)) # NOTE(cfrademan): Using prepared requests, because we need to # no Transfer Encoding chunked, and expect Content-Length... # Chunked encoding is not well supported uploading to WSGI app. prepped = client._s.prepare_request( requests.Request(method.upper(), url, params=params, data=data, headers=headers)) if 'Content-Length' in prepped.headers: if 'Transfer-Encoding' in prepped.headers: del prepped.headers['Transfer-Encoding'] response = Response(client._s.send(prepped, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: if 'X-Expired-Token' in response.headers: raise TokenExpiredError() try: title = None description = None if ('json' in response.content_type.lower() and 'error' in response.json): error = response.json['error'] try: title = error.get('title') description = error.get('description') if endpoint is not None: title += " (%s)" % endpoint except AttributeError: if endpoint is not None: description = " Endpoint: %s" % endpoint else: if endpoint is not None: description = " Endpoint: %s" % endpoint if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: if endpoint is not None: description = 'Endpoint: %s' raise HTTPError(response.status_code, description=description) from None else: raise HTTPError(response.status_code) from None if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: e = append_to_error(e, endpoint) raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectionError( "API Connection error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ProxyError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientProxyError("API proxy error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.SSLError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientSSLError("API SSL error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.Timeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientTimeoutError( "API connection timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ConnectTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectTimeoutError( "API connect timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ReadTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientReadTimeoutError("API read timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.HTTPError as e: e = append_to_error(e, endpoint) raise HTTPError(e.response.status_code, e) if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response