def testHopByHop(self): for hop in ( "Connection Keep-Alive Proxy-Authenticate Proxy-Authorization " "TE Trailers Transfer-Encoding Upgrade" ).split(): for alt in hop, hop.title(), hop.upper(), hop.lower(): self.assertTrue(util.is_hop_by_hop(alt)) # Not comprehensive, just a few random header names for hop in ("Accept Cache-Control Date Pragma Trailer Via Warning").split(): for alt in hop, hop.title(), hop.upper(), hop.lower(): self.assertFalse(util.is_hop_by_hop(alt))
def testHopByHop(self): for hop in ( "Connection Keep-Alive Proxy-Authenticate Proxy-Authorization " "TE Trailers Transfer-Encoding Upgrade").split(): for alt in hop, hop.title(), hop.upper(), hop.lower(): self.assertTrue(util.is_hop_by_hop(alt)) # Not comprehensive, just a few random header names for hop in ("Accept Cache-Control Date Pragma Trailer Via Warning" ).split(): for alt in hop, hop.title(), hop.upper(), hop.lower(): self.assertFalse(util.is_hop_by_hop(alt))
def start_response(self, status, headers, exc_info=None): if exc_info: # Log the error. self._request.app.logger.error("Unexpected error", exc_info=exc_info) # Attempt to modify the response. try: if self._response and self._response.started: raise exc_info[1].with_traceback(exc_info[2]) self._response = None finally: exc_info = None # Cannot start response twice. assert not self._response, "Cannot call start_response() twice" # Parse the status. assert isinstance(status, str), "Response status should be str" status_code, reason = status.split(None, 1) status_code = int(status_code) # Store the response. self._response = StreamResponse( status=status_code, reason=reason, ) # Store the headers. for header_name, header_value in headers: assert not is_hop_by_hop( header_name), "Hop-by-hop headers are forbidden" self._response.headers.add(header_name, header_value) # Return the stream writer interface. return self.write
def start_response(self, status, response_headers, exc_info=None): """ Method to be passed to WSGI application object to start the response. """ if exc_info: try: raise Exception(exc_info) # raise exc_info[0], exc_info[1], exc_info[2] finally: exc_info = None elif self.response_dict: #Will be caught by _error raise WsgiAppError('start_response called a second time without exc_info! See PEP 333.') #PEP 333 requires that an application NOT send any hop-by-hop headers. #Therefore, we check for any of them in the headers the application #returns. If so, an exception is raised to be caught by _error. for key,value in response_headers: if is_hop_by_hop(key): raise WsgiAppError('Hop by hop header specified') self.response_dict['headers'] = copy.copy(response_headers) self.response_dict['statuscode'] = status return self.write
def _do_request(self, method, *args, **kwargs): r = method(verify=False, *args, **kwargs) for header in r.headers.keys(): if is_hop_by_hop(header): del r.headers[header] logger.debug('Safewalk response %s' % r.content) return r
def _download_remote_file(self, channel_name, storage_address, asset): node_id = asset['owner'] auth = authenticate_outgoing_request(node_id) r = get_remote_file(channel_name, storage_address, auth, stream=True, headers={HTTP_HEADER_PROXY_ASSET: 'True'}) if not r.ok: return Response( { 'message': f'Cannot proxify asset from node {asset["owner"]}: {str(r.text)}' }, status=r.status_code) response = CustomFileResponse( streaming_content=(chunk for chunk in r.iter_content(512 * 1024)), status=r.status_code) for header in r.headers: # We don't use hop_by_hop headers since they are incompatible # with WSGI if not is_hop_by_hop(header): response[header] = r.headers.get(header) return response
def Home(): resp = bottle.response qry = bottle.request.query url, k, timeout = qry.u, qry.k, int(qry.get('t', '30')) if k and k != ALLOW_KEYS: return 'Auth Key is invalid!' if url and k: url = urllib.parse.unquote(url).replace(' ', r'%20') try: req = urllib.request.Request(url) req.add_header('User-Agent', "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36") req.add_header('Accept', "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") ret = urllib.request.urlopen(req, timeout=timeout) content = ret.read() headers = [(n,v) for n,v in ret.info().items() if not is_hop_by_hop(n)] cookieAdded = False for n, v in headers: if n == 'Set-Cookie' and cookieAdded: resp.add_header(n, v) else: resp.set_header(n, v) if n == 'Set-Cookie': cookieAdded = True return content except socket.timeout: pass except Exception as e: import traceback print(traceback.format_exc()) print("ERR : %s : %s" % (type(e), str(e))) bottle.abort(400) else: return "<html><head><title>Forwarder Url</title></head><body>Forwarder(%s) : thisurl?k=AUTHKEY&t=timeout&u=url</body></html>" % __Version__
def Home(): resp = bottle.response qry = bottle.request.query url, k, timeout = qry.u, qry.k, int(qry.get('t', '30')) if k and k != ALLOW_KEYS: return 'Auth Key is invalid!' if url and k: url = urllib.unquote(url) try: req = urllib2.Request(url) req.add_header('User-Agent', "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)") req.add_header('Accept', "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") ret = urllib2.urlopen(req, timeout=timeout) content = ret.read() headers = [(n,v) for n,v in ret.info().items() if not is_hop_by_hop(n)] cookieAdded = False for n, v in headers: if n == 'Set-Cookie' and cookieAdded: resp.add_header(n, v) else: resp.set_header(n, v) if n == 'Set-Cookie': cookieAdded = True return content except socket.timeout: pass except Exception as e: print("ERR : %s : %s" % (type(e), str(e))) bottle.abort(400) else: return "<html><head><title>Forwarder Url</title></head><body>Forwarder(%s) : thisurl?k=AUTHKEY&t=timeout&u=url</body></html>" % __Version__
def main(): response = flask.Response() url, key, timeout = (request.args.get('u', ''), request.args.get('k', ''), int(request.args.get('t', '30'))) if key and key not in ALLOW_KEYS: return 'Invalid key!' if url and key: try: header = {"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; \ rv:30.0) Gecko/20100101 Firefox/30.0", "Accept": "text/html,application/xhtml+xml,\ application/xml;q=0.9,*/*;q=0.8"} r = requests.get(url, timeout=timeout, headers=header) header = {header: value for header, value in r.headers.items() if not is_hop_by_hop(header)} cookie_added = False for header_name, header_value in header.items(): if header_name == 'Set-Cookie' and cookie_added: response.headers['Set-Cookie'] = header_value else: response.headers['Set-Cookie'] = header_value if header_name == 'Set-Cookie': cookie_added = True return r.text except socket.timeout: pass except Exception as e: return repr(e) else: return "I need KEY and URL, check your fomat :("
def start_response(self, status, headers, exc_info=None): """'start_response()' callable as specified by PEP 3333""" if exc_info: try: if self.headers_sent: # Re-raise original exception if headers sent raise exc_info[0](exc_info[1]).with_traceback(exc_info[2]) finally: exc_info = None # avoid dangling circular ref elif self.headers is not None: raise AssertionError("Headers already set!") self.status = status self.headers = self.headers_class(headers) status = self._convert_string_type(status, "Status") assert len(status) >= 4, "Status must be at least 4 characters" assert status[:3].isdigit(), "Status message must begin w/3-digit code" assert status[3] == " ", "Status message must have a space after code" if __debug__: for name, val in headers: name = self._convert_string_type(name, "Header name") val = self._convert_string_type(val, "Header value") assert not is_hop_by_hop( name), "Hop-by-hop headers not allowed" return self.write
def start_response(self, status, headers,exc_info=None): """'start_response()' callable as specified by PEP 333""" if exc_info: try: if self.headers_sent: # Re-raise original exception if headers sent raise exc_info[0], exc_info[1], exc_info[2] finally: exc_info = None # avoid dangling circular ref elif self.headers is not None: raise AssertionError("Headers already set!") assert type_is(status, StringType),"Status must be a string" assert len(status)>=4,"Status must be at least 4 characters" assert int(status[:3]),"Status message must begin w/3-digit code" assert status[3]==" ", "Status message must have a space after code" if __debug__: for name,val in headers: assert type_is(name, StringType),"Header names must be strings" assert type_is(val, StringType),"Header values must be strings" assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed" self.status = status self.headers = self.headers_class(headers) return self.write
def Home(): resp = bottle.response qry = bottle.request.query k, timeout = qry.k, 30 indexofu=bottle.request.url.index('u=') url=bottle.request.url[indexofu+2:] if k and k != ALLOW_KEYS: return 'Auth Key is invalid!' if url and k: url = urllib.unquote(url.encode('utf-8')).replace(' ', r'%20') try: req = urllib2.Request(url) req.add_header('User-Agent', "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)") req.add_header('Accept', "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") ret = urllib2.urlopen(req, timeout=timeout) content = ret.read() headers = [(n,v) for n,v in ret.info().items() if not is_hop_by_hop(n)] cookieAdded = False for n, v in headers: if n == 'Set-Cookie' and cookieAdded: resp.add_header(n, v) else: resp.set_header(n, v) if n == 'Set-Cookie': cookieAdded = True resp.set_header("Access-Control-Allow-Origin", "*") return content except socket.timeout: pass except Exception as e: print("ERR : %s : %s" % (type(e), str(e))) bottle.abort(400) else: return "<html><head><title>Forwarder Url</title></head><body>Forwarder(%s) : thisurl?k=AUTHKEY&t=timeout&u=url</body></html>" % __Version__
def start_response(self, status, headers, exc_info=None): if exc_info: # Log the error. self._request.app.logger.error("Unexpected error", exc_info=exc_info) # Attempt to modify the response. try: if self._response and self._response.prepared: raise exc_info[1].with_traceback(exc_info[2]) self._response = None finally: exc_info = None # Cannot start response twice. assert not self._response, "Cannot call start_response() twice" # Parse the status. assert isinstance(status, str), "Response status should be str" status_code, reason = status.split(None, 1) status_code = int(status_code) # Store the response. self._response = StreamResponse( status = status_code, reason = reason, ) # Store the headers. for header_name, header_value in headers: assert not is_hop_by_hop(header_name), "Hop-by-hop headers are forbidden" self._response.headers.add(header_name, header_value) # Return the stream writer interface. return self.write
def __assembleResponseHeaders(self, headers): responseHeaders = [] for key, value in headers.iteritems(): if not is_hop_by_hop(key): key = key.title() responseHeaders.append((key, value)) return responseHeaders
def set_response_headers(response, response_headers): for header, value in response_headers.items(): if is_hop_by_hop(header) or header.lower() == 'set-cookie': continue response[header.title()] = value logger.debug('Response headers: %s', getattr(response, '_headers'))
def _get_environ(self, request, body, content_length): # Resolve the path info. path_info = request.match_info["path_info"] script_name = request.rel_url.path[:len(request.rel_url.path) - len(path_info)] # Special case: If the app was mounted on the root, then the script name will # currently be set to "/", which is illegal in the WSGI spec. The script name # could also end with a slash if the WSGIHandler was mounted as a route # manually with a trailing slash before the path_info. In either case, we # correct this according to the WSGI spec by transferring the trailing slash # from script_name to the start of path_info. if script_name.endswith("/"): script_name = script_name[:-1] path_info = "/" + path_info # Parse the connection info. server_name, server_port = parse_sockname( request.transport.get_extra_info("sockname")) remote_addr, remote_port = parse_sockname( request.transport.get_extra_info("peername")) # Detect the URL scheme. url_scheme = self._url_scheme if url_scheme is None: url_scheme = "http" if request.transport.get_extra_info( "sslcontext") is None else "https" # Create the environ. environ = { "REQUEST_METHOD": request.method, "SCRIPT_NAME": script_name, "PATH_INFO": path_info, "QUERY_STRING": request.rel_url.query_string, "CONTENT_TYPE": request.headers.get("Content-Type", ""), "CONTENT_LENGTH": str(content_length), "SERVER_NAME": server_name, "SERVER_PORT": server_port, "REMOTE_ADDR": remote_addr, "REMOTE_HOST": remote_addr, "REMOTE_PORT": remote_port, "SERVER_PROTOCOL": "HTTP/{}.{}".format(*request.version), "wsgi.version": (1, 0), "wsgi.url_scheme": url_scheme, "wsgi.input": body, "wsgi.errors": self._stderr, "wsgi.multithread": True, "wsgi.multiprocess": False, "wsgi.run_once": False, "asyncio.loop": self._loop, "asyncio.executor": self._executor, "aiohttp.request": request, } # Add in additional HTTP headers. for header_name in request.headers: header_name = header_name.upper() if not (is_hop_by_hop(header_name)) and header_name not in ( "CONTENT-LENGTH", "CONTENT-TYPE"): header_value = ",".join(request.headers.getall(header_name)) environ["HTTP_" + header_name.replace("-", "_")] = header_value # All done! return environ
def filter_request_headers(req, allow_host=False): ret = [(k, v) for (k, v) in req.headers.items()\ if not is_hop_by_hop(k)] if allow_host: remove_headers = ['content-length'] else: remove_headers = ['content-length', 'host'] ret = dict(filter(lambda x: x[0].lower() not in remove_headers, ret)) return ret
def __assembleRequestHeaders(self): headers = {} for key, value in self.environ.iteritems(): if key.startswith('HTTP_'): key = key[5:].replace('_', '-').title() if not is_hop_by_hop(key): headers[key] = value headers['Host'] = self.__backend['host'] return headers
def do_proxy(self, method): orig_url = self.request.url orig_body = self.request.body (scm, netloc, path, params, query, _) = urlparse.urlparse(orig_url) if 'Authorization' not in self.request.headers : headers = {} else: auth_header = self.request.headers['Authorization'] auth_parts = auth_header.split(' ') user_pass_parts = base64.b64decode(auth_parts[1]).split(':') username = user_pass_parts[0] password = user_pass_parts[1] base64string = base64.encodestring('%s:%s' % (username, password))[:-1] headers = {'Authorization': "Basic %s" % base64string} path_parts = path.split('/') if path_parts[1] == 'gtap': path_parts = path_parts[2:] path_parts.insert(0,'') path = '/'.join(path_parts).replace('//','/') path_parts = path.split('/') if path_parts[1] == 'api' or path_parts[1] == 'search': sub_head = path_parts[1] path_parts = path_parts[2:] path_parts.insert(0,'') new_path = '/'.join(path_parts).replace('//','/') new_netloc = sub_head + '.twitter.com' else: new_path = path new_netloc = 'twitter.com' if new_path == '/' or new_path == '': global gtap_message gtap_message = gtap_message.replace('#app_url#', netloc) gtap_message = gtap_message.replace('#gtap_version#', gtap_vrsion) self.my_output( 'text/html', gtap_message ) else: new_url = urlparse.urlunparse(('https', new_netloc, new_path.replace('//','/'), params, query, '')) logging.debug(new_url) logging.debug(orig_body) data = urlfetch.fetch(new_url, payload=orig_body, method=method, headers=headers, allow_truncated=True) logging.debug(data.headers) try : self.response.set_status(data.status_code) except Exception: logging.debug(data.status_code) self.response.set_status(503) self.response.headers.add_header('GTAP-Version', gtap_vrsion) for res_name, res_value in data.headers.items(): if is_hop_by_hop(res_name) is False and res_name!='status': self.response.headers.add_header(res_name, res_value) self.response.out.write(data.content)
def remap_response_headers(self, headers): remapped_headers = {} for header_name in headers: if header_name.lower() in ['content-length' ] or is_hop_by_hop(header_name): continue remapped_headers[header_name] = headers[header_name] return remapped_headers
def parse_requests_result(result): headers = result.headers for key, val in headers.iteritems(): if is_hop_by_hop(key): headers.pop(key) elif key.lower() == 'content-encoding' and 'zip' in val: headers.pop(key) status_code = result.status_code output = result.content # 重新排序header,保持与缓存返回的一致性 headers = sort_headers(headers) return status_code, headers, output
def _get_environ(self, request): # Resolve the path info. path_info = request.match_info["path_info"] script_name = request.path[:len(request.path)-len(path_info)] # Special case: If the app was mounted on the root, then the script name will # currently be set to "/", which is illegal in the WSGI spec. The script name # could also end with a slash if the WSGIHandler was mounted as a route # manually with a trailing slash before the path_info. In either case, we # correct this according to the WSGI spec by transferring the trailing slash # from script_name to the start of path_info. if script_name.endswith("/"): script_name = script_name[:-1] path_info = "/" + path_info # Read the body. body = (yield from request.read()) # Parse the connection info. server_name, server_port = parse_sockname(request.transport.get_extra_info("sockname")) remote_addr, remote_port = parse_sockname(request.transport.get_extra_info("peername")) # Detect the URL scheme. url_scheme = self._url_scheme if url_scheme is None: url_scheme = "http" if request.transport.get_extra_info("sslcontext") is None else "https" # Create the environ. environ = { "REQUEST_METHOD": request.method, "SCRIPT_NAME": quote(script_name), # WSGI spec expects URL-quoted path components. "PATH_INFO": quote(path_info), # WSGI spec expects URL-quoted path components. "QUERY_STRING": request.query_string, "CONTENT_TYPE": request.headers.get("Content-Type", ""), "CONTENT_LENGTH": str(len(body)), "SERVER_NAME": server_name, "SERVER_PORT": server_port, "REMOTE_ADDR": remote_addr, "REMOTE_HOST": remote_addr, "REMOTE_PORT": remote_port, "SERVER_PROTOCOL": "HTTP/{}.{}".format(*request.version), "wsgi.version": (1, 0), "wsgi.url_scheme": url_scheme, "wsgi.input": io.BytesIO(body), "wsgi.errors": self._stderr, "wsgi.multithread": True, "wsgi.multiprocess": False, "wsgi.run_once": False, } # Add in additional HTTP headers. for header_name in request.headers: header_name = header_name.upper() if not(is_hop_by_hop(header_name)) and not header_name in ("CONTENT-LENGTH", "CONTENT-TYPE"): header_value = ",".join(request.headers.getall(header_name)) environ["HTTP_" + header_name.replace("-", "_")] = header_value # All done! return environ
def parse_requests_result(result): headers = result.headers for key, val in headers.iteritems(): if is_hop_by_hop(key): headers.pop(key) elif key.lower() == 'content-encoding' and 'zip' in val: headers.pop(key) status_code = result.status_code output = result.content if 'Content-Length' in headers: # 让wsgi模块自行计算解压之后的字节大小 headers.pop('Content-Length') return status_code, headers, output
def proxy_app(environ, start_response): """Formards the request to the real server """ # Rebuilding the request headers from environ req_headers = {} not_relayed_headers = ('HTTP_ACCEPT_ENCODING', 'HTTP_HOST', 'HTTP_PROXY_CONNECTION') for name, value in ((name, value) for name, value in environ.iteritems() if name.startswith('HTTP_') and name not in not_relayed_headers): # HTTP_XX_XX -> xx-xx name = '-'.join(w.lower() for w in name[5:].split('_')) req_headers[name] = value # Some headers are not prefixed with HTTP for name in ('CONTENT_TYPE', 'CONTENT_LENGTH'): value = environ.get(name, None) if value: name = '-'.join(w.lower() for w in name.split('_')) req_headers[name] = value # Add or change headers custom_headers.pre_hook(req_headers) # Proxying to the destination server scheme = environ.get('wsgi.url_scheme', 'http') connection_class = scheme2connection.get(scheme, httplib.HTTPConnection) conn = connection_class(environ['HTTP_HOST']) req_meth = environ['REQUEST_METHOD'] conn.request(req_meth, '{0[PATH_INFO]}?{0[QUERY_STRING]}'.format(environ), headers=req_headers) if req_meth == 'POST': # We need to relay the body too input_ = environ['wsgi.input'] length = int(environ.get('CONTENT_LENGTH', '0')) payload = input_.read(length) # Oops, could be a biiiig file conn.send(payload) # Transform / relay the response response = conn.getresponse() txt_status = httplib.responses.get(response.status, "Unknown status {0}".format(response.status)) status = '{0} {1}'.format(response.status, txt_status) # Remove so-called "hop by hop" headers resp_headers = [(n, v) for n, v in response.getheaders() if not is_hop_by_hop(n)] # Notify response headers if required custom_headers.post_hook(resp_headers) # Replying to browser start_response(status, resp_headers) return iterstreamer(response)
def start_response(status, headers, exc_info=None): nonlocal response_status, response_reason, response_headers, response_body status_code, reason = status.split(None, 1) status_code = int(status_code) # Check the headers. for header_name, header_value in headers: assert not is_hop_by_hop( header_name), "hop-by-hop headers are forbidden" # Start the response. response_status = status_code response_reason = reason response_headers = headers del response_body[:] return response_body.append
def authenticate(cls, service_url, authentication_access_token, username, password): payload = {'username': username, 'password': password} url = service_url + '/api/v1/auth/authenticate/' headers = {'AUTHORIZATION': 'Bearer {}'.format(authentication_access_token)} r = requests.post(url, payload, verify=settings.VERIFY_SSL, headers=headers) for header in r.headers.keys(): if is_hop_by_hop(header): # logger.debug('hop_by_hop headers not supported. Deleting %s' %(header)) del r.headers[header] if r.status_code == 200: return SafewalkClient(service_url, r.json()['access-token'], username=r.json()['username']) else: message = r.json().get('reply-message', 'Safewalk authentication service is not properly configured. Invalid safewalk oauth2 access token' if r.status_code == 401 else r.content) username = r.json().get('username') raise AuthenticationException(message, username)
def _get_environ(self, request): # Resolve the path info. path_info = request.path if path_info.startswith(self._script_name): path_info = path_info[len(self._script_name):] # Read the body. body = (yield from request.read()) # Parse the connection info. server_name, server_port = parse_sockname( request.transport.get_extra_info("sockname")) remote_addr, remote_port = parse_sockname( request.transport.get_extra_info("peername")) # Detect the URL scheme. url_scheme = self._url_scheme if url_scheme is None: url_scheme = "http" if request.transport.get_extra_info( "sslcontext") is None else "https" # Create the environ. environ = { "REQUEST_METHOD": request.method, "SCRIPT_NAME": self._script_name, "PATH_INFO": path_info, "QUERY_STRING": request.query_string, "CONTENT_TYPE": request.headers.get("Content-Type", ""), "CONTENT_LENGTH": str(len(body)), "SERVER_NAME": server_name, "SERVER_PORT": server_port, "REMOTE_ADDR": remote_addr, "REMOTE_HOST": remote_addr, "REMOTE_PORT": remote_port, "SERVER_PROTOCOL": "HTTP/{}.{}".format(*request.version), "wsgi.version": (1, 0), "wsgi.url_scheme": url_scheme, "wsgi.input": io.BytesIO(body), "wsgi.errors": self._stderr, "wsgi.multithread": True, "wsgi.multiprocess": False, "wsgi.run_once": False, } # Add in additional HTTP headers. for header_name in request.headers: header_name = header_name.upper() if not (is_hop_by_hop(header_name)) and not header_name in ( "CONTENT-LENGTH", "CONTENT-TYPE"): header_value = ",".join(request.headers.getall(header_name)) environ["HTTP_" + header_name.replace("-", "_")] = header_value # All done! return environ
def proxy(): url = request.GET.get("url") headers = ["{0}: {1}".format(k,v) for k,v in request.headers.iteritems() if k.lower() not in STRIP_HEADERS] try: headtuple, content = curl_http(url, headers) import ipdb;ipdb.set_trace() headret = [(k,v) for k,v in headtuple if not is_hop_by_hop(k)] html = content.replace("<body>", "<body><!-- test -->") log.debug(html) raise HTTPResponse(output = content, header = headret) except pycurl.error, e: log.debug("curl error: " + str(e)) markurl(url) redirect(url)
def get_response_dict(environ, **kwargs): ''' Get a response dict from a wsgi environ. The following handlers in kwargs are supported: url_handler(scheme, host, path) For modifying the remote url, returns a string as the new url. request_handler(request_env_dict) For modifying the current request dict(headers and body), returns a new dict object with the same structure as input. get_response_dict function returns a dict object. ''' req_env_dict = environ2dict(environ, **kwargs) request_handler = kwargs.get('request_handler') if not request_handler: req_dict = req_env_dict else: req_dict = request_handler(req_env_dict) method = req_dict.pop('method') url = req_dict.pop('url') resp = requests.request(method, url, **req_dict) resp_dict = { "status_code": resp.status_code, "headers": {}, "original_headers": {k: v for k, v in resp.headers.items()} } # Filter out headers not needed filtered_headers = ['content-length'] for k, v in resp.headers.items(): if is_hop_by_hop(k) or k.lower() in filtered_headers: continue resp_dict['headers'][k.lower()] = v if 'gzip' in resp.headers.get('content-encoding', set()): fileobj = six.BytesIO() gzipper = gzip.GzipFile(fileobj=fileobj, mode='w') gzipper.write(resp.content) gzipper.flush() resp_dict['body'] = fileobj.getvalue() else: resp_dict['body'] = resp.content return resp_dict
def proxy_request(): """ Called from a bottle request context. Proxies the call transparently to the graphite server. """ url = config.graphite_url+request.path+'?'+request.query_string if request.method == 'POST': r = requests.post(url, data=request.body.read(), headers=request.headers, stream=True) else: r = requests.get(url, headers=request.headers, stream=True) # Relay headers intact for k,v in r.headers.items(): if not is_hop_by_hop(k): response.set_header(k,v) response.status = r.status_code return r.raw.read()
def test_response_headers_are_not_in_hop_by_hop_headers(self): path = "/" request = self.factory.get(path) headers = { 'connection': '0', 'proxy-authorization': 'allow', 'content-type': 'text/html', } urlopen_mock = get_urlopen_mock(headers=headers) with patch(URLOPEN, urlopen_mock): response = CustomProxyView.as_view()(request, path) response_headers = response._headers for header in response_headers: self.assertFalse(is_hop_by_hop(header))
def download_file(self, request, django_field, ledger_field=None): lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field pk = self.kwargs[lookup_url_kwarg] try: asset = get_object_from_ledger(pk, self.ledger_query_call) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) if not self._has_access(request.user, asset): return Response({'message': 'Unauthorized'}, status=status.HTTP_403_FORBIDDEN) if get_owner() == asset['owner']: obj = self.get_object() data = getattr(obj, django_field) response = CustomFileResponse( open(data.path, 'rb'), as_attachment=True, filename=os.path.basename(data.path) ) else: node_id = asset['owner'] auth = authenticate_outgoing_request(node_id) if not ledger_field: ledger_field = django_field r = get_remote_file(asset[ledger_field]['storageAddress'], auth, stream=True) if not r.ok: return Response({ 'message': f'Cannot proxify asset from node {asset["owner"]}: {str(r.text)}' }, status=r.status_code) response = CustomFileResponse( streaming_content=(chunk for chunk in r.iter_content(512 * 1024)), status=r.status_code) for header in r.headers: # We don't use hop_by_hop headers since they are incompatible # with WSGI if not is_hop_by_hop(header): response[header] = r.headers.get(header) return response
def _get_environ(self, request): # Resolve the path info. path_info = request.match_info["path_info"] script_name = request.path[:len(request.path)-len(path_info)] # Read the body. body = (yield from request.read()) # Parse the connection info. server_name, server_port = parse_sockname(request.transport.get_extra_info("sockname")) remote_addr, remote_port = parse_sockname(request.transport.get_extra_info("peername")) # Detect the URL scheme. url_scheme = self._url_scheme if url_scheme is None: url_scheme = "http" if request.transport.get_extra_info("sslcontext") is None else "https" # Create the environ. environ = { "REQUEST_METHOD": request.method, "SCRIPT_NAME": script_name, "PATH_INFO": path_info, "QUERY_STRING": request.query_string, "CONTENT_TYPE": request.headers.get("Content-Type", ""), "CONTENT_LENGTH": str(len(body)), "SERVER_NAME": server_name, "SERVER_PORT": server_port, "REMOTE_ADDR": remote_addr, "REMOTE_HOST": remote_addr, "REMOTE_PORT": remote_port, "SERVER_PROTOCOL": "HTTP/{}.{}".format(*request.version), "wsgi.version": (1, 0), "wsgi.url_scheme": url_scheme, "wsgi.input": io.BytesIO(body), "wsgi.errors": self._stderr, "wsgi.multithread": True, "wsgi.multiprocess": False, "wsgi.run_once": False, } # Add in additional HTTP headers. for header_name in request.headers: header_name = header_name.upper() if not(is_hop_by_hop(header_name)) and not header_name in ("CONTENT-LENGTH", "CONTENT-TYPE"): header_value = ",".join(request.headers.getall(header_name)) environ["HTTP_" + header_name.replace("-", "_")] = header_value # All done! return environ
def set_response_headers(response, response_headers): # check for Django 3.2 headers interface # https://code.djangoproject.com/ticket/31789 # check and set pointer before loop to improve efficiency if hasattr(response, 'headers'): headers = response.headers else: headers = response for header, value in response_headers.items(): if is_hop_by_hop(header) or header.lower() == 'set-cookie': continue headers[header] = value if hasattr(response, 'headers'): logger.debug('Response headers: %s', response.headers) else: logger.debug('Response headers: %s', getattr(response, '_headers'))
def proxy_request(): """ Called from a bottle request context. Proxies the call transparently to the graphite server. """ url = config.graphite_url + request.path + '?' + request.query_string if request.method == 'POST': r = requests.post(url, data=request.body.read(), headers=request.headers, stream=True) else: r = requests.get(url, headers=request.headers, stream=True) # Relay headers intact for k, v in r.headers.items(): if not is_hop_by_hop(k): response.set_header(k, v) response.status = r.status_code return r.raw.read()
def to_rest_response(self, response): content_type = response.headers.get('Content-Type', '').lower() if content_type.startswith('application/json'): data = response.json() else: data = response.content headers = { key: response.headers[key] for key in response.headers if not is_hop_by_hop(key) } cookie_header = headers.pop(self.SET_COOKIE_NAME, None) raw_cookies = response.raw.headers.getlist(self.SET_COOKIE_NAME) response = Response(data=data, status=response.status_code, headers=headers) for raw_cookie in raw_cookies: raw_cookie = self.regex.sub('', raw_cookie) response.cookies[raw_cookie] = StringMorsel(raw_cookie) return response
def __call__( self, environ: dict, start_response: t.Callable[[str, t.List[t.Tuple[str, str]]], None], ) -> None: req = Request(environ) if req.method == "CONNECT": start_response(f'200 OK', []) return [b'Connection Established'] response = self.request(req) for k in list(response.headers.keys()): if is_hop_by_hop(k): response.headers.pop(k) if response.status_code == 200: content = self.response(response) else: content = response.content start_response(f'{response.status_code} {response.reason}', list(response.headers.items())) return [content]
def proxy_request(self, environ, start_response): method = environ['REQUEST_METHOD'] path = self.hack(environ['PATH_INFO'], remove_qs=False) if method.upper() in ["POST", "PUT"]: body = environ['wsgi.input'].read(int(environ['CONTENT_LENGTH'])) else: body = None headers = dict([ (h[5:], environ[h]) for h in environ if h.startswith("HTTP_") ]) self.couch.request(method, path, body, headers) resp = self.couch.getresponse() status = "%s %s" % (resp.status, resp.reason) headers = filter(lambda x: not is_hop_by_hop(x[0]), resp.getheaders()) body = resp.read() start_response(status, headers) return [body]
def Home(): resp = bottle.response qry = bottle.request.query url, k, timeout = qry.u, qry.k, int(qry.get('t', '30')) if k and k not in ALLOW_KEYS: return 'Auth Key is invalid!' if url and k: url = urllib.unquote(url) try: req = urllib2.Request(url) req.add_header( 'User-Agent', "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)" ) req.add_header( 'Accept', "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" ) ret = urllib2.urlopen(req, timeout=timeout) content = ret.read() headers = [(n, v) for n, v in ret.info().items() if not is_hop_by_hop(n)] cookieadded = False for n, v in headers: if n == 'Set-Cookie' and cookieadded: resp.add_header(n, v) else: resp.set_header(n, v) if n == 'Set-Cookie': cookieadded = True return content except socket.timeout: pass except Exception as e: print("ERR:%s:%s" % (type(e), str(e))) bottle.abort(400) else: return "<html><head><title>Forwarder Url</title></head><body>Forwarder : thisurl?u=url&k=AUTHKEY&t=timeout</body></html>"
def upload_from_url(request, url, cookies): scheme, netloc, _, _, _, _ = urlparse(url.lower()) if not scheme or not netloc: return JsonResponseBadRequest("Invalid URL") if scheme not in ("http", "https"): return JsonResponseBadRequest("Unsupported URL scheme") if netloc == request.get_host().lower(): return JsonResponseBadRequest("Loopback is not allowed") if len(cookies) > 10240: return JsonResponseBadRequest("Cookie is too big") headers = { key[5:].replace("_", "-"): value for key, value in request.META.items() if key.startswith("HTTP_") and not key.startswith("HTTP_X_") } excluded_headers = [ "ACCEPT", "ACCEPT-ENCODING", "COOKIE", "REFERER", "HOST" ] for key in list(headers.keys()): if is_hop_by_hop(key) or key in excluded_headers: del headers[key] headers["Cookie"] = cookies try: resp = requests.get( url, headers=headers, timeout=10, ) resp.raise_for_status() size = int(resp.headers["content-length"]) if size > 3 * 1024 * 1024: return JsonResponseBadRequest("The remote file is too big") content = resp.content except (socket.timeout, requests.Timeout) as e: # 504 Gateway Timeout return JsonResponseError(str(e), status=504) except requests.HTTPError as e: return JsonResponseError( str(e), code=e.response.status_code, status=502, ) except (socket.error, requests.RequestException) as e: # 502 Bad Gateway return JsonResponseError(str(e), status=502) if len(content) != size: return JsonResponseError( "Connection terminated while downloading the file", status=502) file = File(BytesIO(content), name="") file.size = size return handle_image(file)
def doProxy(self, method): origUrl = self.request.url origBody = self.request.body (scm, netloc, path, params, query, _) = urlparse(origUrl) if path == '/': self.myOutput('text/html', 'here is the proxy of \"twitter.com\" by GTAP %s !' % (gtapVersion)) else: #logging.debug(self.request.headers) auth_header = None if 'Authorization' in self.request.headers : auth_header = self.request.headers['Authorization'] elif 'X-Authorization' in self.request.headers : auth_header = self.request.headers['X-Authorization'] headers = {} use_oauth = False if auth_header != None : #auth_header = self.request.headers['Authorization'] auth_parts = auth_header.split(' ') if auth_parts[0].lower() == 'basic': user_pass_parts = base64.b64decode(auth_parts[1]).split(':') oauth_name = user_pass_parts[0] oauth_key = user_pass_parts[1] use_oauth = True path_parts = path.split('/') path_parts = path_parts[2:] path_parts.insert(0,'') #logging.debug(path_parts) path = '/'.join(path_parts).replace('//','/') if path_parts[1] == 'search': netloc = 'search.twitter.com' newpath = path[7:] elif path_parts[1] == 'api' and path_parts[2] == '1': netloc = 'api.twitter.com' newpath = path[4:] elif path_parts[1] == 'api': netloc = 'twitter.com' newpath = path[4:] elif path_parts[1] == '1': netloc = 'api.twitter.com' newpath = path else: netloc = 'twitter.com' newpath = path if newpath == '/' or newpath == '': self.myOutput('text/html', 'here is the proxy of \"'+ netloc + '\" by GTAP %s !' % (gtapVersion)) else: if use_oauth: newUrl = urlunparse(("https", netloc, newpath, params, '', '')) client = OAuthClient('twitter', self) client.key_name = oauth_key client.specifier = oauth_name #logging.info(('method',method)) if method.upper() == 'POST': #logging.info(dict([(k,v) for k,v in parse_qsl(origBody)])) data = client.post_raw(newUrl,method,**dict([(k,v) for k,v in parse_qsl(origBody)])) else: data = client.get_raw(newUrl,method,**dict([(k,v) for k,v in parse_qsl(query)])) else: newUrl = urlunparse(("https", netloc, newpath, params, query, '')) data = urlfetch.fetch(newUrl, payload=origBody, method=method, headers=headers) skipped_headers = ['status', 'via'] for k in data.headers: if is_hop_by_hop(k): continue if k.lower() in skipped_headers: continue k = k \ .replace('x-transaction', 'X-Transaction') \ .replace('x-ratelimit-limit', 'X-RateLimit-Limit') \ .replace('x-ratelimit-remaining', 'X-RateLimit-Remaining') \ .replace('x-runtime', 'X-Runtime') \ .replace('x-ratelimit-class', 'X-RateLimit-Class') \ .replace('x-revision', 'X-Revision') \ .replace('x-ratelimit-reset', 'X-RateLimit-Reset') del self.response.headers[k] self.response.headers[k] = data.headers[k]; self.response.set_status(data.status_code) #logging.debug(headers) #logging.debug(data.status_code) #logging.debug(data.headers) #logging.debug(self.response.headers) #logging.debug(data.content) self.response.out.write(data.content)
def get_mocked_response(self, suffix, req_spec, resp_spec): resp_type = resp_spec['type'] ret = {'headers': defaultdict(set)} if resp_type == 'content': body = resp_spec.get('body') or '' if isinstance(body, six.string_types): ret['body'] = body else: ret['body'] = json.dumps(body) ret['headers']['content-type'] = 'application/json' ret['body'] = resp_spec.get('body') or '' elif resp_type == 'file': ret['body'] = open(resp_spec['body']).read() mime_type, __ = mimetypes.guess_type(resp_spec['body']) if mime_type: ret['headers']['content-type'] = mime_type elif resp_type == 'remote': if req_spec['type'] == 'prefix': pos = suffix.find(req_spec['path']) url = util.concat_path(resp_spec['url'], suffix[pos + len(req_spec['path']):]) elif req_spec['type'] == 'exact': url = resp_spec['url'] else: raise ValueError("Unknown request type %s." % req_spec['type']) http_params = bottle.request.query_string http_body = bottle.request.body.read() http_method = resp_spec.get( 'method') or bottle.request.method or 'GET' http_timeout = req_spec.get('timeout') or DEFAULT_TIMEOUT http_headers = {k.lower(): v for k, v in\ util.filter_request_headers(bottle.request, allow_host=self.allow_host).items()} http_headers.update(req_spec.get('headers') or {}) sess = session_pool.get_session() resp_obj = sess.request(http_method, url, params=http_params, data=http_body, headers=http_headers, timeout=http_timeout, allow_redirects=False) ret['body'] = resp_obj.content for k, v in resp_obj.headers.items(): if (not is_hop_by_hop(k) ) and not k.lower() in ['content-length']: ret['headers'][k.lower()] = v if ret['headers'].get('location'): ret['headers']['location'] = util.replace_location_host( ret['headers']['location'], url, bottle.request.urlparts.netloc) set_cookie = ret['headers'].pop('set-cookie', None) if set_cookie: ret['headers']['set-cookie'] =\ set([i.strip() for i in re.split(r",(?![^=]+;)", set_cookie) if i.strip()]) ret['status'] = resp_obj.status_code elif resp_type in handlers.HANDLERS: handler_object = handlers.get_handler_object( req_spec, resp_spec, root_prefix=self.prefix) pos = suffix.find(req_spec['path']) ret = handler_object.dispatch(suffix[pos + len(req_spec['path']):]) else: raise ValueError("Unknown mock response type '%s'." % resp_type) return wrap_response(ret, resp_spec)
additional_params = dict([(k,v) for k,v in parse_qsl(orig_body)]) use_method = urlfetch.GET if method=='GET' else urlfetch.POST try : data = client.make_request(url=new_url, token=user_access_token, secret=user_access_secret, method=use_method, protected=protected, additional_params = additional_params) except Exception,error_message: logging.debug( error_message ) error_output(self, content=error_message) else : #logging.debug(data.headers) self.response.headers.add_header('GTAP-Version', gtap_version) for res_name, res_value in data.headers.items(): if is_hop_by_hop(res_name) is False and res_name!='status': self.response.headers.add_header(res_name, res_value) self.response.out.write(data.content) def post(self): self.do_proxy('POST') def get(self): self.do_proxy('GET') class OauthPage(webapp.RequestHandler): def get(self, mode=""): callback_url = "%s/oauth/verify" % self.request.host_url client = oauth.TwitterClient(CONSUMER_KEY, CONSUMER_SECRET, callback_url)
def upload_from_url(request, url, cookies): scheme, netloc, _, _, _, _ = urlparse(url.lower()) if not scheme or not netloc: return JsonResponseBadRequest("Invalid URL") if scheme not in ("http", "https"): return JsonResponseBadRequest("Unsupported URL scheme") if netloc == request.get_host().lower(): return JsonResponseBadRequest("Loopback is not allowed") if len(cookies) > 10240: return JsonResponseBadRequest("Cookie is too big") headers = { key[5:].replace("_", "-"): value for key, value in request.META.items() if key.startswith("HTTP_") and not key.startswith("HTTP_X_") } excluded_headers = [ "ACCEPT", "ACCEPT-ENCODING", "COOKIE", "REFERER", "HOST" ] for key in list(headers.keys()): if is_hop_by_hop(key) or key in excluded_headers: del headers[key] headers["Cookie"] = cookies try: resp = requests.get( url, headers=headers, timeout=10, ) resp.raise_for_status() size = int(resp.headers["content-length"]) if size > 3 * 1024 * 1024: return JsonResponseBadRequest("The remote file is too big") content = resp.content except (socket.timeout, requests.Timeout) as e: # 504 Gateway Timeout return JsonResponseError(str(e), status=504) except requests.HTTPError as e: return JsonResponseError( str(e), code=e.response.status_code, status=502, ) except (socket.error, requests.RequestException) as e: # 502 Bad Gateway return JsonResponseError(str(e), status=502) if len(content) != size: return JsonResponseError("Connection terminated while downloading the file", status=502) file = File(BytesIO(content), name="") file.size = size return handle_image(file)
def pki_request(request, resource_url=None): """ App's main view :param request: Django request object :type request: django.http.HttpRequest :param resource_url: Remainder of parsed path, e.g. '/pki/<resource_url>' :rtype: HttpResponse """ # Limit to allowed host calls, e.g. when coming from local Py packages req_host = request.get_host() if not req_host: return HttpResponse("Host missing for service request.", status=403, content_type="text/plain") else: req_host = req_host.split(':')[0] # remove any port logger.debug("request host: {0}".format(req_host)) site_url = urlsplit(settings.SITEURL) exch_url = urlsplit(settings.SITE_LOCAL_URL) allowed_hosts = [ 'localhost', '127.0.0.1', '[::1]', 'testserver', site_url.hostname, exch_url.hostname ] logger.debug("allowed_hosts: {0}".format(allowed_hosts)) if not validate_host(req_host, allowed_hosts): return HttpResponse("Host requesting service is not allowed.", status=403, content_type="text/plain") if not resource_url: return HttpResponse('Resource URL missing for PKI request', status=400, content_type='text/plain') logger.info( "PKI view starting with resource url: {0}".format(resource_url)) # Manually copy over headers, skipping unwanted ones logger.info("PKI view request.COOKIES: {0}".format(request.COOKIES)) logger.info("PKI view request.META: {0}".format(request.META)) # IMPORTANT: Don't pass any cookies or OAuth2 headers to remote resource headers = {} if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META: headers["Content-Type"] = request.META["CONTENT_TYPE"] # Pass through HTTP_ACCEPT* headers accepts = ['Accept', 'Accept-Encoding', 'Accept-Language'] # Why no support for Accept-Charset, i.e. no HTTP_ACCEPT_CHARSET? http_accepts = [ 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE' ] for accept, http_accept in zip(accepts, http_accepts): if http_accept in request.META: headers[accept] = request.META[http_accept] # TODO: Passthru HTTP_REFERER? # Strip our bearer token header! auth_header = request.META.get('HTTP_AUTHORIZATION', None) # TODO: Passing a bearer token for a remote service should be considered; # add a comparison of our get_bearer_token(), delete only on match if auth_header and 'bearer' in auth_header.lower(): del request.META['HTTP_AUTHORIZATION'] # Strip our bearer token token from query params! # TODO: Migrate to request.GET QueryDict parsing? # Unsure if keep_blank_values and doseq are supported query_str = request.META['QUERY_STRING'] query = None if query_str != '': # Keep keys with empty values and maintain order params = parse_qsl(query_str.strip(), keep_blank_values=True) clean_params = [(k, v) for k, v in params if k.lower() != 'access_token'] query = urlencode(clean_params, doseq=True) # Turn the remainder of path back into original URL r_url = unquote(resource_url) # NOTE: Since no origin scheme is recorded (could be in rewritten pki # proxy path), assume https url = 'https://' + r_url + (('?' + query) if query else '') logger.info("PKI view starting remote connection to url: {0}".format(url)) # Do remote request logger.info("PKI view 'requests' request headers:\n{0}".format(headers)) req_res = https_client.request( method=request.method, url=url, headers=headers, data=request.body, ) """:type: requests.Response""" if not req_res: return HttpResponse('Remote service did not return content.', status=400, content_type='text/plain') # TODO: Capture errors and signal to web UI for reporting to user. # Don't let errors just raise exceptions logger.info("PKI view 'requests' response status code: {0}".format( req_res.status_code)) logger.info("PKI view 'requests' response headers:\n{0}".format( req_res.headers)) if query and ('f=pjson' in query or 'f=json' in query): # Sometimes arcrest servers don't return proper content type content_type = 'application/json' elif 'Content-Type' in req_res.headers: content_type = req_res.headers['Content-Type'] else: content_type = 'text/plain' req_transfer_encodings = ['gzip', 'deflate'] # If we get a redirect, beyond # allowed in config, add a useful message. if req_res.status_code in (301, 302, 303, 307): response = HttpResponse( 'This proxy does not support more than the configured redirects. ' 'The server in "{0}" asked for this recent redirect: "{1}"'.format( url, req_res.headers.get('Location')), status=req_res.status_code, content_type=content_type) response['Location'] = req_res.headers['Location'] else: # logger.debug("pki requests response content (first 2000 chars):\n{0}" # .format(req_res.content[:2000])) txt_content = 'Not textual content' txt_types = ['text', 'json', 'xml'] if any([t in content_type.lower() for t in txt_types]): txt_content = req_res.content # Format JSON as needed for client log output if ('json' in content_type.lower() and logger.getEffectiveLevel() <= logging.INFO and callable(logging_timer_expired) and not logging_timer_expired()): txt_content = json.dumps(json.loads(txt_content), indent=2) logger.info( "PKI view 'requests' response content:\n{0}".format(txt_content)) if req_res.headers.get('content-encoding') in req_transfer_encodings: # Change content length to reflect requests auto-decompression req_res.headers['content-length'] = len(req_res.content) response = HttpResponse( content=req_res.content, status=req_res.status_code, reason=req_res.reason, content_type=content_type, ) # TODO: Should we be sniffing encoding/charset and passing back? # Passthru headers from remote service, but don't overwrite defined headers def skip_content_encoding(hdr, val): # requests automatically decodes gzip and deflate transfer encodings; # ensure header is not passed through to client # http://docs.python-requests.org/en/master/user/quickstart/ # #binary-response-content if hdr.lower() == 'content-encoding' and val in req_transfer_encodings: return True return False skip_headers = ['content-type'] # add any as lowercase for h, v in req_res.headers.items(): if h.lower() not in skip_headers \ and not skip_content_encoding(h, v) \ and not wsgiref_util.is_hop_by_hop(h)\ and h not in response: response[h] = v logger.info("PKI view Django response headers:\n{0}".format( response.serialize_headers())) return response
data = client.make_request( url=new_url, token=user_access_token, secret=user_access_secret, method=use_method, protected=protected, additional_params=additional_params, ) except Exception, error_message: logging.debug(error_message) error_output(self, content=error_message) else: # logging.debug(data.headers) self.response.headers.add_header("GTAP-Version", gtap_version) for res_name, res_value in data.headers.items(): if is_hop_by_hop(res_name) is False and res_name != "status": self.response.headers.add_header(res_name, res_value) self.response.out.write(data.content) logging.debug(data.headers) logging.debug("data.content:" + data.content) def post(self): self.do_proxy("POST") def get(self): self.do_proxy("GET") class OauthPage(webapp.RequestHandler): def get(self, mode=""): callback_url = "%s/oauth/verify" % self.request.host_url
else: raise e except socket.timeout: return error_50x(environ, start_response, '504', "Gateway Timeout") except Exception, e: return error_50x(environ, start_response, '503', "Service Unavailable") else: # Do the actual proxying data_sent = 0 req_start = time.time() content_length = fp.info().getheader("Content-Length", 0) try: response_headers = [(k, v) for k, v in fp.info().items() if not is_hop_by_hop(k)] write = start_response("200 OK", response_headers) while True: chunk = fp.read(_config_chunkSize) if not chunk: break write(chunk) data_sent += len(chunk) gatewayLog.debug("Finished proxied request of %s, elapsed: %.02fs, transfer: %s bytes.", fileName, time.time() - req_start, data_sent) except Exception, e: gatewayLog.exception("Error transfering proxied content... %s, sent %s of %s, elapsed %.02f", fileName, data_sent, content_length, time.time() - req_start)
try: data = client.make_request(url=new_url, token=user_access_token, secret=user_access_secret, method=use_method, protected=protected, additional_params=additional_params) except Exception, error_message: logging.debug(error_message) error_output(self, content=error_message) else: #logging.debug(data.headers) self.response.headers.add_header('GTAP-Version', gtap_version) for res_name, res_value in data.headers.items(): if is_hop_by_hop(res_name) is False and res_name != 'status': self.response.headers.add_header(res_name, res_value) self.response.out.write(data.content) def post(self): self.do_proxy('POST') def get(self): self.do_proxy('GET') class OauthPage(webapp.RequestHandler): def get(self, mode=""): consumer_key, consumer_secret = oauth.get_consumer_info() callback_url = "%s/oauth/verify" % self.request.host_url client = oauth.TwitterClient(consumer_key, consumer_secret,