def on_request_head(self, req): url = str(req.url) frag_split = url.split('#', 2) query_split = frag_split[0].split('?', 2) path = query_split[0] if path.startswith('/'): path = path[1:] frag = frag_split[1] if len(frag_split) > 1 else '' query = query_split[1] if len(query_split) > 1 else '' target_path = os.path.join(self._root, path) if os.path.isdir(target_path): target_path = os.path.join(target_path, 'index.html') if not os.path.exists(target_path): return filtering.reply(_NOT_FOUND) resp = HttpResponse() resp.version = b'1.1' resp.status = '200 OK' resp.header('Server').values.append('pyrox/{}'.format(VERSION)) fin = open(target_path, 'r') return filtering.reply(resp, fin)
def start_response(status, headers): resp = HttpResponse() resp.status = status [resp.header(h).values.append(v) for h, v in headers] return reject(resp)
def start_response(status, headers): resp = HttpResponse() resp.status = status [resp.header(h).values.append(v) for h, v in headers] return reject(resp)
class UpstreamHandler(ProxyHandler): """ This proxy handler manages data coming from upstream of the proxy. This data usually comes from the origin service or it may come from another proxy. """ def __init__(self, downstream, upstream, filter_pl): super(UpstreamHandler, self).__init__(filter_pl) self.downstream = downstream self.upstream = upstream def on_http_version(self, major, minor): self.response = HttpResponse() self.response.version = '{}.{}'.format(major, minor) def on_status(self, status_code): self.response.status_code = str(status_code) def on_header_value(self, value): self.response.header(self.current_header_field).values.append(value) def on_headers_complete(self): action = self.filter_pl.on_response(self.response) if action.is_rejecting(): self.rejected = True self.response = action.response else: self.downstream.write(self.response.to_bytes()) def on_body(self, bytes, length, is_chunked): # Rejections simply discard the body if self.rejected: return write(self.downstream, bytes, is_chunked) def on_message_complete(self, is_chunked, should_keep_alive): if self.rejected: # Rejections do not stream the body - they discard it, therefore # we have to commit the head here. if should_keep_alive == 0: self.downstream.write( self.response.to_bytes(), callback=self.upstream.close) else: self.downstream.write(self.response.to_bytes()) elif is_chunked != 0: if should_keep_alive == 0: # Finish the last chunk. self.downstream.write( b'0\r\n\r\n', callback=self.upstream.close) else: self.downstream.write(b'0\r\n\r\n') elif should_keep_alive == 0: self.upstream.close()
class DownstreamProxyHandler(ProxyHandler): """ This proxy handler manages data coming from downstream of the proxy. This data comes from the client initiating the request against the proxy. """ def __init__(self, filter_chain, downstream, upstream, upstream_host): super(DownstreamProxyHandler, self).__init__(filter_chain, upstream) self.downstream = downstream self.upstream = upstream self.upstream_host = upstream_host def on_req_method(self, method): self.request = HttpRequest() self.response = HttpResponse() self.request.method = method def on_req_path(self, url): self.request.url = url def on_http_version(self, major, minor): self.request.version = '{}.{}'.format(major, minor) def on_header_value(self, value): # Special case for host if is_host(value): header = self.request.header(self.current_header_field) header.values.append(self.upstream_host) else: header = self.request.header(self.current_header_field) header.values.append(value) def on_headers_complete(self): action = self.filter_chain.on_request(self.request) if action.is_rejecting(): self.rejected = True self.response = action.response else: self.upstream.write(self.request.to_bytes()) def on_message_complete(self, is_chunked, should_keep_alive): if self.rejected: # Rejections do not stream the body - they discard it, therefore # we have to commit the head here. if should_keep_alive == 0: self.downstream.write( self.response.to_bytes(), callback=self.upstream.close) else: self.downstream.write(self.response.to_bytes()) elif is_chunked != 0: # Finish the last chunk. self.upstream.write(b'0\r\n\r\n')
def on_message_complete(self, is_chunked, keep_alive): callback = self._upstream.close self._upstream.handle.disable_reading() if keep_alive: self._http_msg = HttpResponse() callback = self._downstream.handle.resume_reading if self._intercepted: # Serialize our message to them self._downstream.write(self._http_msg.to_bytes(), callback) elif is_chunked or self._chunked: # Finish the last chunk. self._downstream.write(_CHUNK_CLOSE, callback) else: callback()
def on_message_complete(self, is_chunked, keep_alive): callback = self._upstream.close self._upstream.handle.disable_reading() if keep_alive: self._http_msg = HttpResponse() callback = self._downstream.handle.resume_reading if self._intercepted: # Serialize our message to them self._downstream.write(self._http_msg.to_bytes(), callback) elif is_chunked or self._chunked: # Finish the last chunk. self._downstream.write(_CHUNK_CLOSE, callback) else: callback()
def __init__(self): self.reject_response = HttpResponse() self.reject_response.status = '401 Unauthorized' self.reject_response.header('Content-Length').values.append('0') self.config = load_pyrox_config() self.redis = redis.StrictRedis(host=self.config.redis.host, port=self.config.redis.port, db=self.config.redis.db) self.admin_client = KeystoneClient( token=self.config.keystone.auth_token, timeout=self.config.keystone.timeout, endpoint=self.config.keystone.endpoint, insecure=self.config.keystone.insecure)
import os import pyrox.filtering as filtering from pyrox.http import HttpResponse from pyrox.about import VERSION _VERSION_STR = 'pyrox/{}'.format(VERSION) _NOT_FOUND = HttpResponse() _NOT_FOUND.version = b'1.1' _NOT_FOUND.status = '404 Not Found' _NOT_FOUND.header('Server').values.append(_VERSION_STR) class WebServer(filtering.HttpFilter): def __init__(self, root): self._root = root @filtering.handles_request_head def on_request_head(self, req): url = str(req.url) frag_split = url.split('#', 2) query_split = frag_split[0].split('?', 2) path = query_split[0] if path.startswith('/'): path = path[1:]
to the request """ return pass_event() def on_response(self, response_message): """ on_response will accept an HttpResponse object and implement the logic that will define the FilterActions to be applied to the request """ return pass_event() """ Default return object. This should be configurable. """ _DEFAULT_REJECT_RESP = HttpResponse() _DEFAULT_REJECT_RESP.version = b'1.1' _DEFAULT_REJECT_RESP.status_code = 400 _DEFAULT_REJECT_RESP.header('Content-Length').values.append('0') """ Default filter action singletons. """ _DEFAULT_PASS_ACTION = FilterAction(NEXT_FILTER) _DEFAULT_CONSUME_ACTION = FilterAction(CONSUME) def consume(): """ Consumes the event and does not allow any further downstream filters to see it. This effectively halts execution of the filter chain but leaves the
def on_req_method(self, method): self.request = HttpRequest() self.response = HttpResponse() self.request.method = method
ResponseParser, ParserDelegate) import traceback _LOG = get_logger(__name__) """ String representing a 0 length HTTP chunked encoding chunk. """ _CHUNK_CLOSE = b'0\r\n\r\n' """ Default return object on error. This should be configurable. """ _BAD_GATEWAY_RESP = HttpResponse() _BAD_GATEWAY_RESP.version = b'1.1' _BAD_GATEWAY_RESP.status = '502 Bad Gateway' _BAD_GATEWAY_RESP.header('Server').values.append('pyrox/{}'.format(VERSION)) _BAD_GATEWAY_RESP.header('Content-Length').values.append('0') """ Default return object on no route or upstream not responding. This should be configurable. """ _UPSTREAM_UNAVAILABLE = HttpResponse() _UPSTREAM_UNAVAILABLE.version = b'1.1' _UPSTREAM_UNAVAILABLE.status = '503 Service Unavailable' _UPSTREAM_UNAVAILABLE.header('Server').values.append('pyrox/{}'.format(VERSION)) _UPSTREAM_UNAVAILABLE.header('Content-Length').values.append('0')
class UpstreamHandler(ProxyHandler): """ This proxy handler manages data coming from upstream of the proxy. This data usually comes from the origin service or it may come from another proxy. """ def __init__(self, downstream, upstream, filter_pl, request): super(UpstreamHandler, self).__init__(filter_pl, HttpResponse()) self._downstream = downstream self._upstream = upstream self._request = request def on_status(self, status_code): self._http_msg.status = str(status_code) def on_headers_complete(self): action = self._filter_pl.on_response_head(self._http_msg, self._request) # If we are intercepting the response body do some negotiation if self._filter_pl.intercepts_resp_body(): # If there's a content length, negotiate the transfer encoding if self._http_msg.get_header('content-length'): self._chunked = True self._http_msg.remove_header('content-length') self._http_msg.remove_header('transfer-encoding') self._http_msg.header('transfer-encoding').values.append('chunked') if action.is_rejecting(): self._intercepted = True self._response_tuple = action.payload else: self._downstream.write(self._http_msg.to_bytes()) def on_body(self, bytes, length, is_chunked): # Rejections simply discard the body if not self._intercepted: accumulator = AccumulationStream() data = bytes self._filter_pl.on_response_body(data, accumulator, self._request) if accumulator.size() > 0: data = accumulator.bytes # Hold up on the upstream side until we're done sending this chunk self._upstream.handle.disable_reading() # When we write to the stream set the callback to resume # reading from upstream. _write_to_stream( self._downstream, data, is_chunked or self._chunked, self._upstream.handle.resume_reading) def on_message_complete(self, is_chunked, keep_alive): callback = self._upstream.close self._upstream.handle.disable_reading() if keep_alive: self._http_msg = HttpResponse() callback = self._downstream.handle.resume_reading if self._intercepted: # Serialize our message to them self._downstream.write(self._http_msg.to_bytes(), callback) elif is_chunked or self._chunked: # Finish the last chunk. self._downstream.write(_CHUNK_CLOSE, callback) else: callback()
request_func._handles_response_body = True return request_func class HttpFilter(object): """ HttpFilter is a marker class that may be utilized for dynamic gathering of filter logic. """ pass """ Default return object. This should be configurable. """ _DEFAULT_REJECT_RESP = HttpResponse() _DEFAULT_REJECT_RESP.version = b'1.1' _DEFAULT_REJECT_RESP.status = '400 Bad Request' _DEFAULT_REJECT_RESP.header('Content-Length').values.append('0') """ Default filter action singletons. """ _DEFAULT_PASS_ACTION = FilterAction(NEXT_FILTER) _DEFAULT_CONSUME_ACTION = FilterAction(CONSUME) def consume(): """ Consumes the event and does not allow any further downstream filters to see it. This effectively halts execution of the filter chain but leaves the request to pass through the proxy.
def __init__(self, downstream, upstream, filter_pl, request): super(UpstreamHandler, self).__init__(filter_pl, HttpResponse()) self._downstream = downstream self._upstream = upstream self._request = request
def on_http_version(self, major, minor): self.response = HttpResponse() self.response.version = '{}.{}'.format(major, minor)
class UpstreamHandler(ProxyHandler): """ This proxy handler manages data coming from upstream of the proxy. This data usually comes from the origin service or it may come from another proxy. """ def __init__(self, downstream, upstream, filter_pl): super(UpstreamHandler, self).__init__(filter_pl, HttpResponse()) self._downstream = downstream self._upstream = upstream def on_status(self, status_code): self._http_msg.status = str(status_code) def on_headers_complete(self): action = self._filter_pl.on_response_head(self._http_msg) # If we are intercepting the response body do some negotiation if self._filter_pl.intercepts_resp_body(): # If there's a content length, negotiate the tansfer encoding if self._http_msg.get_header('content-length'): self._chunked = True self._http_msg.remove_header('content-length') self._http_msg.remove_header('transfer-encoding') self._http_msg.header('transfer-encoding').values.append('chunked') if action.is_rejecting(): self._intercepted = True self._response_tuple = action.payload else: self._downstream.write(self._http_msg.to_bytes()) def on_body(self, bytes, length, is_chunked): # Rejections simply discard the body if not self._intercepted: accumulator = AccumulationStream() data = bytes self._filter_pl.on_response_body(data, accumulator) if accumulator.size() > 0: data = accumulator.bytes # Hold up on the upstream side until we're done sending this chunk self._upstream.handle.disable_reading() # When we write to the stream set the callback to resume # reading from upstream. _write_to_stream( self._downstream, data, is_chunked or self._chunked, self._upstream.handle.resume_reading) def on_message_complete(self, is_chunked, keep_alive): callback = self._upstream.close self._upstream.handle.disable_reading() if keep_alive: self._http_msg = HttpResponse() callback = self._downstream.handle.resume_reading if self._intercepted: # Serialize our message to them self._downstream.write(self._http_msg.to_bytes(), callback) elif is_chunked or self._chunked: # Finish the last chunk. self._downstream.write(_CHUNK_CLOSE, callback) else: callback()
ResponseParser, ParserDelegate) import traceback _LOG = get_logger(__name__) """ String representing a 0 length HTTP chunked encoding chunk. """ _CHUNK_CLOSE = b'0\r\n\r\n' """ Default return object on error. This should be configurable. """ _BAD_GATEWAY_RESP = HttpResponse() _BAD_GATEWAY_RESP.version = b'1.1' _BAD_GATEWAY_RESP.status = '502 Bad Gateway' _BAD_GATEWAY_RESP.header('Server').values.append('pyrox/{}'.format(VERSION)) _BAD_GATEWAY_RESP.header('Content-Length').values.append('0') """ Default return object on no route or upstream not responding. This should be configurable. """ _UPSTREAM_UNAVAILABLE = HttpResponse() _UPSTREAM_UNAVAILABLE.version = b'1.1' _UPSTREAM_UNAVAILABLE.status = '503 Service Unavailable' _UPSTREAM_UNAVAILABLE.header('Server').values.append('pyrox/{}'.format(VERSION)) _UPSTREAM_UNAVAILABLE.header('Content-Length').values.append('0')
request_func._handles_response_body = True return request_func class HttpFilter(object): """ HttpFilter is a marker class that may be utilized for dynamic gathering of filter logic. """ pass """ Default return object. This should be configurable. """ _DEFAULT_REJECT_RESP = HttpResponse() _DEFAULT_REJECT_RESP.version = b'1.1' _DEFAULT_REJECT_RESP.status = '400 Bad Request' _DEFAULT_REJECT_RESP.header('Content-Length').values.append('0') """ Default filter action singletons. """ _DEFAULT_PASS_ACTION = FilterAction(NEXT_FILTER) _DEFAULT_CONSUME_ACTION = FilterAction(CONSUME) def consume(): """ Consumes the event and does not allow any further downstream filters to see it. This effectively halts execution of the filter chain but leaves the
class KeystoneTokenValidationFilter(filtering.HttpFilter): def __init__(self): self.reject_response = HttpResponse() self.reject_response.status = '401 Unauthorized' self.reject_response.header('Content-Length').values.append('0') self.config = load_pyrox_config() self.redis = redis.StrictRedis(host=self.config.redis.host, port=self.config.redis.port, db=self.config.redis.db) self.admin_client = KeystoneClient( token=self.config.keystone.auth_token, timeout=self.config.keystone.timeout, endpoint=self.config.keystone.endpoint, insecure=self.config.keystone.insecure) def _cache_set_token(self, token, tenant_id): self.redis.set(token, self.config.redis.ttl, tenant_id) def _cache_get_tenant_id(self, token): return self.redis.get(token) def _cached_token_exists(self, token): if self.redis.get(token) is not None: return True return False def _prepare_route(self, request, tenant_id): request.remove_header(X_AUTH_TOKEN) request.remove_header(X_TENANT_NAME) return '{0}{1}'.format( self.config.route_to, request.url.replace( self.config.keystone.url_replacement, tenant_id)) @filtering.handles_request_head def on_request_head(self, request_head): try: token_hdr = request_head.get_header(X_AUTH_TOKEN) tenant_name_hdr = request_head.get_header(X_TENANT_NAME) token = token_hdr.values[0] tenant_name = tenant_name_hdr.values[0] if len(token) >= 1 and len(tenant_name) >= 1: # Does the token exist in the cache? token_in_cache = self._cached_token_exists(token) if not token_in_cache: auth_result = self.admin_client.tokens.authenticate( token=token, tenant_name=tenant_name) if auth_result: tenant_id = auth_result.tenant.get('id', None) self._cache_set_token(token, tenant_id) return filtering.route(self._prepare_route( request_head, tenant_id)) if token_in_cache: return filtering.route(self._prepare_route( request_head, self._cache_get_tenant_id(token))) except Unauthorized: filtering.reject(response=self.reject_response) except Exception as ex: _LOG.exception(ex) return filtering.reject(response=self.reject_response)