def update(self, req, resp, id): user = obj(req, tradius_user, sql_id=id, hide=('password', )) if req.json.get('password'): user['password'] = md5sum(req.json['password']) user.commit() return user
def cache(expire, func, *args, **kwargs): global _cache_engine _cache_engine = Cache() # mem args used to build reference id for cache. mem_args = [object_name(func), ] # NOTE(cfrademan): This is important, we dont want object address, # types etc inside of the cache reference. We cannot memoize based # on args, kwargsargs provided to function containing objects other than # str, int, float, bytes, args = list(args) + list(orderdict(kwargs).items()) for arg in args: if not isinstance(arg, (str, int, float, bytes,)): mem_args.append(object_name(arg)) else: raise ValueError("Cache 'callable' not possible with" + " args/kwargsargs containing values with types" + " other than 'str', 'int', 'float', 'bytes'") # create the actual key / reference id. key = md5sum(pickle.dumps(mem_args)) cached = _cache_engine.load(key) if cached is not None: return cached result = func(*args, **kwargs) _cache_engine.store(key, result, expire) return result
def create(self, req, resp): user = obj(req, calabiyau_subscriber, hide=('password', )) self.pkg_set(req, user) if req.json.get('password'): user['password'] = md5sum(req.json['password']) user.commit() return user
def update(self, req, resp, id): user = obj(req, tradius_user, sql_id=id, hide=('password', )) if req.json.get('password'): user['password'] = md5sum(req.json['password']) if req.json.get('enabled'): if user['enabled'] is False: disconnect(user['virtual_id'], user['username']) user.commit() return user
def update(self, req, resp, id): user = obj(req, calabiyau_subscriber, sql_id=id, hide=('password', )) if req.json.get('password'): user['password'] = md5sum(req.json['password']) self.pkg_set(req, user) if req.json.get('enabled'): if user['enabled'] is False: pass #disconnect_user(user['virtual_id'], # user['username']) user.commit() return user
def etagger(*args): to_hash = b"".join([if_unicode_to_bytes(str(arg) or b'') for arg in args]) if to_hash != b'': return md5sum(to_hash)
def request(client, method, url, params={}, data=None, headers={}, stream=False, **kwargs): with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None try: if g.current_request.user_token: headers['X-Auth-Token'] = g.current_request.user_token if g.current_request.context_domain: headers['X-Domain'] = g.current_request.context_domain if g.current_request.context_tenant_id: headers['X-Tenant-Id'] = g.current_request.context_tenant_id except NoContextError: pass for kwarg in kwargs: headers[kwarg] = kwargs if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: response = Response( client._s.request(method.upper(), url, params=params, data=data, headers=headers, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: try: title = None description = None if 'error' in response.json: error = response.json['error'] try: title = error.get('title') description = error.get('description') except AttributeError: pass raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: raise HTTPError(response.status_code) if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: raise HTTPClientConnectionError(e) except requests.exceptions.ProxyError as e: raise HTTPClientProxyError(e) except requests.exceptions.SSLError as e: raise HTTPClientSSLError(e) except requests.exceptions.Timeout as e: raise HTTPClientTimeoutError(e) except requests.exceptions.ConnectTimeout as e: raise HTTPClientConnectTimeoutError(e) except requests.exceptions.ReadTimeout as e: raise HTTPClientReadTimeoutError(e) except requests.exceptions.HTTPError as e: raise HTTPError(e.response.status_code, e) _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response
def create(self, req, resp): user = obj(req, tradius_user, hide=('password', )) if req.json.get('password'): user['password'] = md5sum(req.json['password']) user.commit() return user
def request(client, method, url, params={}, data=None, headers={}, stream=False, endpoint=None, **kwargs): if endpoint is None: endpoint = url with Timer() as elapsed: method = method.upper() headers = headers.copy() params = params.copy() try: _cache_engine = Cache() except NoContextError: _cache_engine = None for kwarg in kwargs: # NOTE(cfrademan): # Generally headers have '-' not '_'. Also kwargs # cannot contain '-'. if kwargs[kwarg] is not None: header = kwarg.replace('_', '-') headers[header] = str(kwargs[kwarg]) if data is not None: if hasattr(data, 'json'): data = data.json elif isinstance(data, (dict, list, OrderedDict)): data = js.dumps(data) data = if_unicode_to_bytes(data) if isinstance(data, bytes): headers['Content-Length'] = str(len(data)) cached = None if (_cache_engine and stream is False and method == 'GET' and data is None): if isinstance(params, dict): cache_params = list(orderdict(params).values()) if isinstance(headers, dict): cache_headers = list(orderdict(headers).values()) cache_key = (method, url, cache_params, cache_headers) cache_key = str(md5sum(pickle.dumps(cache_key))) cached = _cache_engine.load(cache_key) if cached is not None: cache_control = parse_cache_control_header( cached.headers.get('Cache-Control')) max_age = cache_control.max_age date = cached.headers.get('Date') etag = cached.headers.get('Etag') date = utc(date) current = now() diff = (current - date).total_seconds() if cache_control.no_cache: # If no-cache revalidate. headers['If-None-Match'] = etag elif max_age and diff < int(max_age): # If not expired, use cache. _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Memory') return cached else: # If expired, revalidate.. headers['If-None-Match'] = etag try: # response = Response(client._s.request(method.upper(), # url, # params=params, # data=data, # headers=headers, # stream=stream)) # NOTE(cfrademan): Using prepared requests, because we need to # no Transfer Encoding chunked, and expect Content-Length... # Chunked encoding is not well supported uploading to WSGI app. prepped = client._s.prepare_request( requests.Request(method.upper(), url, params=params, data=data, headers=headers)) if 'Content-Length' in prepped.headers: if 'Transfer-Encoding' in prepped.headers: del prepped.headers['Transfer-Encoding'] response = Response(client._s.send(prepped, stream=stream)) if (_cache_engine and cached is not None and response.status_code == 304): _debug(method, url, params, data, headers, cached.headers, cached.content, cached.status_code, elapsed(), 'Validated (304)') return cached if response.status_code >= 400: if 'X-Expired-Token' in response.headers: raise TokenExpiredError() try: title = None description = None if ('json' in response.content_type.lower() and 'error' in response.json): error = response.json['error'] try: title = error.get('title') description = error.get('description') if endpoint is not None: title += " (%s)" % endpoint except AttributeError: if endpoint is not None: description = " Endpoint: %s" % endpoint else: if endpoint is not None: description = " Endpoint: %s" % endpoint if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) raise HTTPError(response.status_code, description, title) except HTTPClientContentDecodingError: if endpoint is not None: description = 'Endpoint: %s' raise HTTPError(response.status_code, description=description) from None else: raise HTTPError(response.status_code) from None if _cache_engine and stream is False and method == 'GET': if response.status_code == 200: cache_control = parse_cache_control_header( response.headers.get('Cache-Control')) if (not cache_control.no_store and cache_control.max_age and response.headers.get('Etag') and response.headers.get('Date') and data is None): _cache_engine.store(cache_key, response, 604800) except requests.exceptions.InvalidHeader as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidHeader(e) except requests.exceptions.InvalidURL as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidURL(e) except requests.exceptions.InvalidSchema as e: e = append_to_error(e, endpoint) raise HTTPClientInvalidSchema(e) except requests.exceptions.MissingSchema as e: e = append_to_error(e, endpoint) raise HTTPClientMissingSchema(e) except requests.exceptions.ConnectionError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectionError( "API Connection error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ProxyError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientProxyError("API proxy error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.SSLError as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientSSLError("API SSL error to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.Timeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientTimeoutError( "API connection timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ConnectTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientConnectTimeoutError( "API connect timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.ReadTimeout as e: e = append_to_error(e, endpoint) log.critical(e) raise HTTPClientReadTimeoutError("API read timeout to '%s' (%s)" % ( url, endpoint, )) except requests.exceptions.HTTPError as e: e = append_to_error(e, endpoint) raise HTTPError(e.response.status_code, e) if stream is True: _debug(method, url, params, data, headers, response.headers, None, response.status_code, elapsed()) else: _debug(method, url, params, data, headers, response.headers, response.content, response.status_code, elapsed()) return response