def acct(msg): try: pkt = msg['attributes'] except KeyError: return try: pkt = decode_packet(pkt) except Exception: return try: nas_session_id = pkt.get('Acct-Session-Id', [None])[0] unique_session_id = pkt.get('Acct-Unique-Session-Id')[0] status = pkt.get('Acct-Status-Type', [''])[0].lower() username = pkt.get('User-Name', [None])[0] client = pkt.get('Client-IP-Address')[0] nas = pkt.get('NAS-IP-Address', ['0.0.0.0'])[0] except IndexError: return True dt = utc(parse_datetime(msg.get('datetime', None))) diff = (now()-dt).total_seconds() if diff > 60: log.error('Processing radius accounting message older' + ' than 60 seconds. Age(%s)' % diff) with db() as conn: with dbw() as connw: with conn.cursor() as crsr: user = get_user(crsr, client, nas, username) crsr.commit() if not user: log.debug("user '%s' not found" % username) return False input_octets, output_octets = do_acct(connw, pkt, client, nas, nas_session_id, unique_session_id, dt, user, status) usage(connw, pkt, client, nas, nas_session_id, unique_session_id, user, input_octets, output_octets, status) return True
def acct(msg): fr = parse_fr(msg.get('fr', ())) status = fr.get('Acct-Status-Type', 'start').lower() dt = utc(parse_datetime(msg.get('datetime', None))) diff = (now() - dt).total_seconds() if diff > 60: log.error('Processing radius accounting message older' + ' than 60 seconds. Age(%s)' % diff) if not require_attributes('accounting', fr, [ 'User-Name', 'NAS-IP-Address', 'Acct-Status-Type', 'Acct-Session-Id', 'Acct-Unique-Session-Id', 'Acct-Input-Octets64', 'Acct-Output-Octets64' ]): return False with db() as conn: with dbw() as connw: user = get_user(conn, fr['NAS-IP-Address'], fr['User-Name']) if not user: log.debug("user '%s' not found" % (fr['User-Name'], )) return False input_octets, output_octets = do_acct(connw, fr, dt, user, status) usage(connw, fr, user, input_octets, output_octets, status) if not user['static_ip4'] and user['pool_id']: update_ip(connw, user, fr) return True
def validate(self): if not self.authenticated: raise AccessDeniedError("Credentials token missing") from None elif 'expire' in self._credentials: utc_expire = utc(self._credentials['expire']) if now() > utc_expire: self.clear() raise AccessDeniedError('Credentials token expired') from None
def parse_token(self, token): self._initial() token = if_unicode_to_bytes(token) signature, token = token.split(b'!!!!') self._token_sig = self._check_token(signature, token) self._token = js.loads(base64.b64decode(token)) self._token_sig = signature utc_now = now() utc_expire = utc(self._token['expire']) if utc_now > utc_expire: raise AccessDenied('Token Expired')
def json(self): # Return json token. if not self.authenticated: raise AccessDeniedError("Credentials token missing") utc_expire = utc(self._credentials['expire']) if now() > utc_expire: raise AccessDeniedError('Auth Token Expired') credentials = {} credentials['token'] = self.token credentials.update(self._credentials) return js.dumps(credentials)
def token(self, token): # Load exisiting token token = if_unicode_to_bytes(token) signature, b64_token = token.split(b'!!!!') try: self._rsakey.verify(signature, b64_token) except ValueError as e: raise AccessDeniedError('Invalid Auth Token. %s' % e) decoded = js.loads(base64.b64decode(b64_token)) utc_expire = utc(decoded['expire']) if now() > utc_expire: raise AccessDeniedError('Auth Token Expired') self._credentials = decoded
def token(self): # Return serialized token. if not self.authenticated: raise AccessDeniedError("Credentials token missing") utc_expire = utc(self._credentials['expire']) if now() > utc_expire: raise AccessDeniedError('Auth Token Expired') bytes_token = if_unicode_to_bytes( js.dumps(self._credentials, indent=None)) b64_token = base64.b64encode(bytes_token) token_sig = if_unicode_to_bytes(self._rsakey.sign(b64_token)) token = if_bytes_to_unicode(token_sig + b'!!!!' + b64_token) if len(token) > 1280: raise ValueError("Auth Token exceeded 10KB" + " - Revise Assignments for credentials") return token
def request(client, method, url, params={}, data=None, headers={}, stream=False, **kwargs): with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None try: if g.current_request.user_token: headers['X-Auth-Token'] = g.current_request.user_token if g.current_request.context_domain: headers['X-Domain'] = g.current_request.context_domain if g.current_request.context_tenant_id: headers['X-Tenant-Id'] = g.current_request.context_tenant_id except NoContextError: pass for kwarg in kwargs: headers[kwarg] = kwargs if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: response = Response( client._s.request(method.upper(), url, params=params, data=data, headers=headers, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: try: title = None description = None if 'error' in response.json: error = response.json['error'] try: title = error.get('title') description = error.get('description') except AttributeError: pass raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: raise HTTPError(response.status_code) if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: raise HTTPClientConnectionError(e) except requests.exceptions.ProxyError as e: raise HTTPClientProxyError(e) except requests.exceptions.SSLError as e: raise HTTPClientSSLError(e) except requests.exceptions.Timeout as e: raise HTTPClientTimeoutError(e) except requests.exceptions.ConnectTimeout as e: raise HTTPClientConnectTimeoutError(e) except requests.exceptions.ReadTimeout as e: raise HTTPClientReadTimeoutError(e) except requests.exceptions.HTTPError as e: raise HTTPError(e.response.status_code, e) _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response
def usage(crsr, user): # Return Values # 0 All good. # 1 Deactivate Subscriber user_id = user['id'] utc_datetime = datetime.utcnow() if user['package_span'] and user['package_span'] > 0: if (user['package_expire'] and utc(utc_datetime) > utc(user['package_expire'])): log.warning('Package expired (%s)' % user['username']) return 1 if user: # IF DATA PLAN NOT UNCAPPED if user['plan'] == 'data': volume_used = user['volume_used'] volume_used_bytes = user['volume_used_bytes'] ###################### # CHECK PACKAGE DATA # ###################### package_volume_bytes = user['volume_gb'] * 1024 * 1024 * 1024 if utc(user['volume_expire']) < utc(utc_datetime): if user['volume_repeat']: return 0 else: log.warning('Package data expired (%s)' % user['username']) if (not volume_used and volume_used_bytes > package_volume_bytes): log.warning('Package data depleted (%s)' % user['username']) elif (not volume_used and volume_used_bytes <= package_volume_bytes): return 0 #################### # CHECK TOPUP DATA # #################### crsr.execute( 'SELECT * FROM calabiyau_topup' + ' WHERE user_id = %s' + ' ORDER BY creation_time asc' + ' FOR UPDATE', (user_id, )) topups = crsr.fetchall() for topup in topups: if topup['volume_gb']: topup_volume_bytes = (topup['volume_gb'] * 1024 * 1024 * 1024) else: topup_volume_bytes = 0 if utc(topup['volume_expire']) < utc(utc_datetime): if topup['volume_repeat']: log.warning('Topup renew (%s, %s, %s Gb, %s)' % ( user['username'], topup['id'], topup['volume_gb'], topup['creation_time'], )) db.commit() return 0 else: log.warning('Topup expired (%s, %s, %s Gb, %s)' % ( user['username'], topup['id'], topup['volume_gb'], topup['creation_time'], )) else: if volume_used_bytes < topup_volume_bytes: return 0 else: log.warning('Topup depleted (%s, %s, %s Gb, %s)' % ( user['username'], topup['id'], topup['volume_gb'], topup['creation_time'], )) return 1 else: return 0
def github(req, resp): root_path = g.app.path mkdir(joinpath(root_path, 'github')) mkdir(joinpath(root_path, 'docs')) try: projects = load(root_path + '/projects.pickle') except FileNotFoundError: projects = {} username = g.app.config.get('github', 'username') password = g.app.config.get('github', 'password') tachyonic = GitHub(auth=(username, password)) while True: try: teams = {} github_teams = tachyonic.teams('TachyonicProject') for github_team in github_teams: team = github_team['name'] if team == "Author": continue teams[team] = {} github_members = tachyonic.team_members(github_team['id']) for github_member in github_members: login = github_member['login'] teams[team][login] = {} teams[team][login]['github'] = github_member['html_url'] teams[team][login]['avatar'] = github_member['avatar_url'] save(teams, root_path + '/team.pickle', perms=664) save(tachyonic.projects('TachyonicProject'), root_path + '/planning.pickle', perms=664) found = [] log.info("Getting Repos") repos = tachyonic.repos('TachyonicProject') for repo in repos: name = repo['name'] found.append(name) description = repo['description'] if name not in projects: projects[name] = {} log.info("Scanning Repo " + name) updated_at = utc(repo['updated_at']) created_at = utc(repo['created_at']) pushed_at = utc(repo['pushed_at']) if (('updated_at' not in projects[name]) or ('updated_at' in projects[name] and updated_at != projects[name]['updated_at']) or ('pushed_at' not in projects[name]) or ('pushed_at' in projects[name] and pushed_at != projects[name]['pushed_at'])): projects[name]['created_at'] = created_at projects[name]['description'] = description projects[name]['clone_url'] = repo['clone_url'] log.info("Getting Branches for %s" % name) branches = tachyonic.branches('TachyonicProject', name) branches = [branch['name'] for branch in branches] projects[name]['branches'] = branches log.info("Getting Tags for %s" % name) tags = tachyonic.tags('TachyonicProject', name) tags = [tag['name'] for tag in tags] projects[name]['tags'] = tags projects[name]['refs'] = version_order(branches + tags) projects[name]['doc_refs'] = {} else: log.info("Project %s Already up-to-date (%s)" % ( name, updated_at, )) projects[name]['updated_at'] = updated_at projects[name]['pushed_at'] = pushed_at if 'updated_doc' not in projects[name]: projects[name]['updated_doc'] = {} for ref in projects[name]['refs']: current_datetime = now() if ref in projects[name]['updated_doc']: commits = tachyonic.commits( 'TachyonicProject', name, sha=ref, since=format_iso8601( projects[name]['updated_doc'][ref])) if len(commits) == 0: log.info("Documentation" + " '%s/%s'" % ( name, ref, ) + " Already up-to-date (%s)" % updated_at) continue venv_dir = "%s/github/%s_%s" % ( root_path, name, ref, ) doc_dir = "%s/docs/%s_%s" % ( root_path, name, ref, ) src_path = venv_dir + '/' + name log.info("Creating Virtual Environment '%s'" % venv_dir) create_env(str(venv_dir), wipe=True, site_packages=False) clone(projects[name]['clone_url'], src_path) if (exists(src_path + '/docs/source/conf.py') and exists(src_path + '/docs/Makefile')): log.info("Bulding '%s/%s'" % ( name, ref, )) projects[name]['doc_refs'][ref] = True info = build_doc(root_path, venv_dir, src_path, ref, doc_dir, name) updated(name, ref, info) else: projects[name]['doc_refs'][ref] = False log.warning("No Sphinx docs found '%s/%s'" % ( name, ref, )) projects[name]['updated_doc'][ref] = current_datetime save(projects, root_path + '/projects.pickle', perms=664) events = [] events_ordered = [] git_events = tachyonic.events('TachyonicProject') for pj in projects.copy(): if pj not in found: del projects[pj] else: for event in git_events: type = event['type'] payload = event['payload'] if type == 'PullRequestEvent': pr = payload['pull_request'] merged = pr['merged'] base = pr['base'] ref = base['ref'] if merged is True: merged_at = utc(pr['merged_at']) events.append((merged_at, "Code Updated", "Repo " + pj + "/" + ref + "")) for item in sorted(events, key=operator.itemgetter(0)): events_ordered.append(item) events_ordered = list(reversed(events_ordered)) save(events_ordered[0:10], root_path + '/events.pickle', perms=664) save(projects, root_path + '/projects.pickle', perms=664) log.info('Infinite loop sleeping 5 Minutes') sleep(300) except KeyboardInterrupt: print("Control-C closed / Killed") break except ExecuteError as e: handle_error(e.title, e.description) except Exception as e: trace = str(traceback.format_exc()) error = '%s: %s' % (object_name(e), e) handle_error(error, trace)
def do_acct(db, fr, dt, user, status): user_id = user['id'] nas_session_id = fr['Acct-Session-Id'] unique_session_id = fr['Acct-Unique-Session-Id'] input_octets = int(fr.get('Acct-Input-Octets64', 0)) output_octets = int(fr.get('Acct-Output-Octets64', 0)) with db.cursor() as crsr: ####################################### # GET USAGE IN & OUT FOR USER SESSION # ####################################### crsr.execute( "SELECT" + " id," + " acctstarttime," + " acctinputoctets," + " acctoutputoctets," + " acctuniqueid," + " acctstarttime," + " acctupdated," + " accttype" + " FROM calabiyau_session" + ' WHERE acctuniqueid = %s' + ' LIMIT 1' + ' FOR UPDATE', (unique_session_id, )) session = crsr.fetchone() if session and (utc(session['acctupdated']) >= dt or session['accttype'] == 'stop'): crsr.commit() return ( 0, 0, ) ############################################# # CHECK IF ACCOUNTING FOR TODAY FOR USER_ID # ############################################# crsr.execute( "SELECT" + " id" + " FROM calabiyau_accounting" + " WHERE user_id = %s" + " AND date(today) = date(now())" + " FOR UPDATE", (user_id, )) if (status == 'interim-update' or status == 'start' or status == 'stop'): ###################################################### # CREATE/UPDATE SESSION WITH INPUT AND OUTPUT OCTETS # ###################################################### crsr.execute( "INSERT INTO calabiyau_session" + " (id," + " user_id," + " acctsessionid," + " acctuniqueid," + " nasipaddress," + " nasportid," + " nasport," + " nasporttype," + " calledstationid," + " callingstationid," + " servicetype," + " framedprotocol," + " framedipaddress," + " acctinputoctets," + " acctoutputoctets," + " acctstarttime," + " acctupdated," + " processed," + " accttype)" + " VALUES" + " (uuid(), %s, %s, %s, %s, %s, %s, %s, %s," + " %s, %s, %s, %s, %s, %s, %s, %s, now(), %s)" + " ON DUPLICATE KEY UPDATE" + " acctsessionid = %s," + " nasipaddress = %s," + " nasportid = %s," + " nasport = %s," + " nasporttype = %s," + " calledstationid = %s," + " callingstationid = %s," + " servicetype = %s," + " framedprotocol = %s," + " framedipaddress = %s," + " acctinputoctets = %s," + " acctoutputoctets = %s," + " acctupdated = %s," + " processed = now()," + " accttype = %s", ( user_id, nas_session_id, unique_session_id, fr['NAS-IP-Address'], fr.get('NAS-Port-ID'), fr.get('NAS-Port'), fr.get('NAS-Port-Type'), fr.get('Called-Station-Id'), fr.get('Calling-Station-Id'), fr.get('Service-Type'), fr.get('Framed-Protocol'), fr.get('Framed-IP-Address'), input_octets, output_octets, dt, dt, status, nas_session_id, fr['NAS-IP-Address'], fr.get('NAS-Port-ID'), fr.get('NAS-Port'), fr.get('NAS-Port-Type'), fr.get('Called-Station-Id'), fr.get('Calling-Station-Id'), fr.get('Service-Type'), fr.get('Framed-Protocol'), fr.get('Framed-IP-Address'), input_octets, output_octets, dt, status, )) #################################### # RECORD USAGE IN ACCOUNTING TABLE # #################################### if not session: # IF NEW SESSION prev_acctinputoctets = 0 prev_acctoutputoctets = 0 else: # IF EXISTING SESSION # USE PREVIOUS VALUES TO DETERMINE NEW VALUES FOR TODAY prev_acctinputoctets = session['acctinputoctets'] prev_acctoutputoctets = session['acctoutputoctets'] if (input_octets >= prev_acctinputoctets and output_octets >= prev_acctoutputoctets): input_octets = (input_octets - prev_acctinputoctets) output_octets = (output_octets - prev_acctoutputoctets) # INSERT/UPDATE ACCOUNTING RECORD crsr.execute( 'INSERT INTO calabiyau_accounting' + ' (id, user_id, today, acctinputoctets,' + ' acctoutputoctets)' + ' VALUES' + ' (uuid(), %s, curdate(), %s, %s)' + ' ON DUPLICATE KEY UPDATE' + ' acctinputoctets = acctinputoctets + %s,' + ' acctoutputoctets = acctoutputoctets + %s', ( user_id, input_octets, output_octets, input_octets, output_octets, )) crsr.commit() return ( input_octets, output_octets, )
def usage(db, fr, user, input_octets=0, output_octets=0, status="start"): # Return Values # 0 All good. # 1 Deactivate Subscriber unique_session_id = fr['Acct-Unique-Session-Id'] user_id = user['id'] nas_secret = user['nas_secret'] # Combined input/output usage for session combined = input_octets + output_octets utc_datetime = now() with db.cursor() as crsr: #################### # GET USER SESSION # #################### crsr.execute( "SELECT" + " id," + " ctx" + " FROM calabiyau_session" + ' WHERE acctuniqueid = %s' + ' LIMIT 1' + ' FOR UPDATE', (unique_session_id, )) session = crsr.fetchone() session_ctx = session['ctx'] if user['package_span'] and user['package_span'] > 0: if (utc(user['package_expire']) and utc_datetime > utc(user['package_expire'])): if session_ctx != 1: applyctx(crsr, user, 1, fr, nas_secret, status) crsr.commit() return 1 crsr.execute( 'SELECT * FROM calabiyau_subscriber' + ' WHERE id = %s' + ' FOR UPDATE', (user_id, )) locked_user = crsr.fetchone() if user and locked_user: # IF DATA PLAN NOT UNCAPPED if user['plan'] == 'data': ###################### # CHECK PACKAGE DATA # ###################### volume_used_bytes = locked_user['volume_used_bytes'] + combined pkg_volume_used = locked_user['volume_used'] if user['volume_gb']: package_volume_bytes = (user['volume_gb'] * 1024 * 1024 * 1024) else: package_volume_bytes = 0 if utc(locked_user['volume_expire']) < utc_datetime: if user['volume_repeat']: log.info('Package data reloaded (%s)' % user['username']) new_expire = calc_next_expire(user['volume_metric'], user['volume_span'], utc_datetime) crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_expire = %s," + " volume_used_bytes = 0," + " volume_used = 0," + " ctx = 0" + " WHERE id = %s", ( new_expire, user['id'], )) pkg_volume_used = 0 if session_ctx != 0: applyctx(crsr, user, 0, fr, nas_secret, status) crsr.commit() return 0 else: crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_expire = %s," + " volume_used_bytes = 0," + " volume_used = 1," + " ctx = 1" + " WHERE id = %s", ( new_expire, user['id'], )) pkg_volume_used = 1 log.info('Package data expired (%s)' % user['username']) if (not pkg_volume_used and volume_used_bytes > package_volume_bytes): crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_used_bytes = 0," + " volume_used = 1," + " ctx = 1" + " WHERE id = %s", (user_id, )) log.info('Package data depleted (%s)' % user['username']) elif (not pkg_volume_used and volume_used_bytes <= package_volume_bytes): crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_used_bytes = " + " volume_used_bytes + %s," + " ctx = 0" + " WHERE id = %s", ( combined, user_id, )) if session_ctx != 0: applyctx(crsr, user, 0, fr, nas_secret, status) crsr.commit() return 0 #################### # CHECK TOPUP DATA # #################### crsr.execute( 'SELECT * FROM calabiyau_topup' + ' WHERE user_id = %s' + ' ORDER BY creation_time asc' + ' FOR UPDATE', (user_id, )) topups = crsr.fetchall() for topup in topups: if topup['volume_gb']: topup_volume_bytes = (topup['volume_gb'] * 1024 * 1024 * 1024) else: topup_volume_bytes = 0 if utc(topup['volume_expire']) < utc_datetime: if topup['volume_repeat']: log.auth('Topup renew (%s, %s Gb, %s)' % ( user['username'], topup['volume_gb'], topup['creation_time'], )) new_expire = calc_next_expire( topup['volume_metric'], topup['volume_span'], utc_datetime) crsr.execute( "UPDATE calabiyau_topup" + " SET volume_expire = %s" + " WHERE id = %s", ( new_expire, topup['id'], )) crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_used_bytes = 0," + " ctx = 0" + " WHERE id = %s", (user_id, )) if session_ctx != 0: applyctx(crsr, user, 0, fr, nas_secret, status) crsr.commit() return 0 else: log.auth('Topup expired (%s, %s Gb, %s)' % ( user['username'], topup['volume_gb'], topup['creation_time'], )) crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_used_bytes = 0," + " ctx = 0" + " WHERE id = %s", (user_id, )) crsr.execute( 'DELETE FROM' + ' calabiyau_topup' + ' WHERE id = %s', (topup['id'], )) else: if volume_used_bytes < topup_volume_bytes: crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_used_bytes = " + " volume_used_bytes + %s," + " ctx = 0" + " WHERE id = %s", ( combined, user_id, )) if session_ctx != 0: applyctx(crsr, user, 0, fr, nas_secret, status) crsr.commit() return 0 else: log.auth('Topup depleted (%s, %s Gb, %s)' % ( user['username'], topup['volume_gb'], topup['creation_time'], )) crsr.execute( "UPDATE calabiyau_subscriber" + " SET volume_used_bytes = 0," + " ctx = 0" + " WHERE id = %s", (user_id, )) crsr.execute( 'DELETE FROM' + ' calabiyau_topup' + ' WHERE id = %s', (topup['id'], )) if session_ctx != 1: applyctx(crsr, user, 1, fr, nas_secret, status) crsr.commit() return 1 else: if session_ctx != 0: applyctx(crsr, user, 0, fr, nas_secret, status) crsr.commit() return 0 if session_ctx != 1: applyctx(crsr, user, 1, fr, nas_secret, status) crsr.commit() return 1
def request(client, method, url, params={}, data=None, headers={}, stream=False, endpoint=None, **kwargs): if endpoint is None: endpoint = url with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None for kwarg in kwargs: # NOTE(cfrademan): # Generally headers have '-' not '_'. Also kwargs # cannot contain '-'. if kwargs[kwarg] is not None: header = kwarg.replace('_', '-') headers[header] = str(kwargs[kwarg]) if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: # response = Response(client._s.request(method.upper(), # url, # params=params, # data=data, # headers=headers, # stream=stream)) # NOTE(cfrademan): Using prepared requests, because we need to # no Transfer Encoding chunked, and expect Content-Length... # Chunked encoding is not well supported uploading to WSGI app. prepped = client._s.prepare_request( requests.Request(method.upper(), url, params=params, data=data, headers=headers)) if 'Content-Length' in prepped.headers: if 'Transfer-Encoding' in prepped.headers: del prepped.headers['Transfer-Encoding'] response = Response(client._s.send(prepped, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: if 'X-Expired-Token' in response.headers: raise TokenExpiredError() try: title = None description = None if ('json' in response.content_type.lower() and 'error' in response.json): error = response.json['error'] try: title = error.get('title') description = error.get('description') if endpoint is not None: title += " (%s)" % endpoint except AttributeError: if endpoint is not None: description = " Endpoint: %s" % endpoint else: if endpoint is not None: description = " Endpoint: %s" % endpoint if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: if endpoint is not None: description = 'Endpoint: %s' raise HTTPError(response.status_code, description=description) from None else: raise HTTPError(response.status_code) from None if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: e = append_to_error(e, endpoint) raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectionError( "API Connection error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ProxyError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientProxyError("API proxy error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.SSLError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientSSLError("API SSL error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.Timeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientTimeoutError( "API connection timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ConnectTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectTimeoutError( "API connect timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ReadTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientReadTimeoutError("API read timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.HTTPError as e: e = append_to_error(e, endpoint) raise HTTPError(e.response.status_code, e) if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response