def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None): """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request. See http://oauth.net/core/1.0a/#signing_process """ parts = urlparse.urlparse(url) scheme, netloc, path = parts[:3] normalized_url = scheme.lower() + "://" + netloc.lower() + path base_elems = [] base_elems.append(method.upper()) base_elems.append(normalized_url) base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items()))) base_string = "&".join(_oauth_escape(e) for e in base_elems) key_elems = [ escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~')) ] key_elems.append( escape.utf8( urllib_parse.quote(token["secret"], safe='~') if token else "")) key = b"&".join(key_elems) hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) return binascii.b2a_base64(hash.digest())[:-1]
def get_authenticated_user(self, callback, http_client=None): """Gets the OAuth authorized user and access token. This method should be called from the handler for your OAuth callback URL to complete the registration process. We run the callback with the authenticated user dictionary. This dictionary will contain an ``access_key`` which can be used to make authorized requests to this service on behalf of the user. The dictionary will also contain other fields such as ``name``, depending on the service used. """ future = callback request_key = escape.utf8(self.get_argument("oauth_token")) oauth_verifier = self.get_argument("oauth_verifier", None) request_cookie = self.get_cookie("_oauth_request_token") if not request_cookie: future.set_exception( AuthError("Missing OAuth request token cookie")) return self.clear_cookie("_oauth_request_token") cookie_key, cookie_secret = [ base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|") ] if cookie_key != request_key: future.set_exception( AuthError("Request token does not match cookie")) return token = dict(key=cookie_key, secret=cookie_secret) if oauth_verifier: token["verifier"] = oauth_verifier if http_client is None: http_client = self.get_auth_http_client() http_client.fetch(self._oauth_access_token_url(token), functools.partial(self._on_access_token, callback))
def render_xml(self, value): assert isinstance(value, dict) and len(value) == 1 self.set_header("Content-Type", "application/xml; charset=UTF-8") name = value.keys()[0] parts = [] parts.append('<' + escape.utf8(name) + ' xmlns="http://doc.s3.amazonaws.com/2006-03-01">') self._render_parts(value.values()[0], parts) parts.append('</' + escape.utf8(name) + '>') self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' + ''.join(parts))
def test_unicode_literal_expression(self): # Unicode literals should be usable in templates. Note that this # test simulates unicode characters appearing directly in the # template file (with utf8 encoding), i.e. \u escapes would not # be used in the template file itself. if str is unicode_type: # python 3 needs a different version of this test since # 2to3 doesn't run on template internals template = Template(utf8(u'{{ "\u00e9" }}')) else: template = Template(utf8(u'{{ u"\u00e9" }}')) self.assertEqual(template.generate(), utf8(u"\u00e9"))
def test_utf8_logging(self): with ignore_bytes_warning(): self.logger.error(u"\u00e9".encode("utf8")) if issubclass(bytes, basestring_type): # on python 2, utf8 byte strings (and by extension ascii byte # strings) are passed through as-is. self.assertEqual(self.get_output(), utf8(u"\u00e9")) else: # on python 3, byte strings always get repr'd even if # they're ascii-only, so this degenerates into another # copy of test_bytes_logging. self.assertEqual(self.get_output(), utf8(repr(utf8(u"\u00e9"))))
def test_url_escape_quote_plus(self): unescaped = '+ #%' plus_escaped = '%2B+%23%25' escaped = '%2B%20%23%25' self.assertEqual(url_escape(unescaped), plus_escaped) self.assertEqual(url_escape(unescaped, plus=False), escaped) self.assertEqual(url_unescape(plus_escaped), unescaped) self.assertEqual(url_unescape(escaped, plus=False), unescaped) self.assertEqual(url_unescape(plus_escaped, encoding=None), utf8(unescaped)) self.assertEqual(url_unescape(escaped, encoding=None, plus=False), utf8(unescaped))
def test_url_unescape_unicode(self): tests = [ ('%C3%A9', u'\u00e9', 'utf8'), ('%C3%A9', u'\u00c3\u00a9', 'latin1'), ('%C3%A9', utf8(u'\u00e9'), None), ] for escaped, unescaped, encoding in tests: # input strings to url_unescape should only contain ascii # characters, but make sure the function accepts both byte # and unicode strings. self.assertEqual(url_unescape(to_unicode(escaped), encoding), unescaped) self.assertEqual(url_unescape(utf8(escaped), encoding), unescaped)
def test_xhtml_escape(self): tests = [ ("<foo>", "<foo>"), (u"<foo>", u"<foo>"), (b"<foo>", b"<foo>"), ("<>&\"'", "<>&"'"), ("&", "&amp;"), (u"<\u00e9>", u"<\u00e9>"), (b"<\xc3\xa9>", b"<\xc3\xa9>"), ] for unescaped, escaped in tests: self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped)) self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped)))
def test_all_methods(self): for method in ['GET', 'DELETE', 'OPTIONS']: response = self.fetch('/all_methods', method=method) self.assertEqual(response.body, utf8(method)) for method in ['POST', 'PUT', 'PATCH']: response = self.fetch('/all_methods', method=method, body=b'') self.assertEqual(response.body, utf8(method)) response = self.fetch('/all_methods', method='HEAD') self.assertEqual(response.body, b'') response = self.fetch('/all_methods', method='OTHER', allow_nonstandard_methods=True) self.assertEqual(response.body, b'OTHER')
def resolve(self, host, port, family=0): # getHostByName doesn't accept IP addresses, so if the input # looks like an IP address just return it immediately. if twisted.internet.abstract.isIPAddress(host): resolved = host resolved_family = socket.AF_INET elif twisted.internet.abstract.isIPv6Address(host): resolved = host resolved_family = socket.AF_INET6 else: deferred = self.resolver.getHostByName(utf8(host)) resolved = yield gen.Task(deferred.addBoth) if isinstance(resolved, failure.Failure): try: resolved.raiseException() except twisted.names.error.DomainError as e: raise IOError(e) elif twisted.internet.abstract.isIPAddress(resolved): resolved_family = socket.AF_INET elif twisted.internet.abstract.isIPv6Address(resolved): resolved_family = socket.AF_INET6 else: resolved_family = socket.AF_UNSPEC if family != socket.AF_UNSPEC and family != resolved_family: raise Exception('Requested socket family %d but got %d' % (family, resolved_family)) result = [ (resolved_family, (resolved, port)), ] raise gen.Return(result)
def parse_body_arguments(content_type, body, arguments, files, headers=None): """Parses a form request body. Supports ``application/x-www-form-urlencoded`` and ``multipart/form-data``. The ``content_type`` parameter should be a string and ``body`` should be a byte string. The ``arguments`` and ``files`` parameters are dictionaries that will be updated with the parsed contents. """ if headers and 'Content-Encoding' in headers: gen_log.warning("Unsupported Content-Encoding: %s", headers['Content-Encoding']) return if content_type.startswith("application/x-www-form-urlencoded"): try: uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) except Exception as e: gen_log.warning('Invalid x-www-form-urlencoded body: %s', e) uri_arguments = {} for name, values in uri_arguments.items(): if values: arguments.setdefault(name, []).extend(values) elif content_type.startswith("multipart/form-data"): try: fields = content_type.split(";") for field in fields: k, sep, v = field.strip().partition("=") if k == "boundary" and v: parse_multipart_form_data(utf8(v), body, arguments, files) break else: raise ValueError("multipart boundary not found") except Exception as e: gen_log.warning("Invalid multipart/form-data: %s", e)
def finish(self): response_body = utf8(json_encode(self.chunk_lengths)) self.connection.write_headers( ResponseStartLine('HTTP/1.1', 200, 'OK'), HTTPHeaders({'Content-Length': str(len(response_body))})) self.connection.write(response_body) self.connection.finish()
def test_body_encoding(self): unicode_body = u"\xe9" byte_body = binascii.a2b_hex(b"e9") # unicode string in body gets converted to utf8 response = self.fetch("/echopost", method="POST", body=unicode_body, headers={"Content-Type": "application/blah"}) self.assertEqual(response.headers["Content-Length"], "2") self.assertEqual(response.body, utf8(unicode_body)) # byte strings pass through directly response = self.fetch("/echopost", method="POST", body=byte_body, headers={"Content-Type": "application/blah"}) self.assertEqual(response.headers["Content-Length"], "1") self.assertEqual(response.body, byte_body) # Mixing unicode in headers and byte string bodies shouldn't # break anything response = self.fetch("/echopost", method="POST", body=byte_body, headers={"Content-Type": "application/blah"}, user_agent=u"foo") self.assertEqual(response.headers["Content-Length"], "1") self.assertEqual(response.body, byte_body)
def _render_parts(self, value, parts=[]): if isinstance(value, (unicode, bytes)): parts.append(escape.xhtml_escape(value)) elif isinstance(value, int) or isinstance(value, long): parts.append(str(value)) elif isinstance(value, datetime.datetime): parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z")) elif isinstance(value, dict): for name, subvalue in value.iteritems(): if not isinstance(subvalue, list): subvalue = [subvalue] for subsubvalue in subvalue: parts.append('<' + escape.utf8(name) + '>') self._render_parts(subsubvalue, parts) parts.append('</' + escape.utf8(name) + '>') else: raise Exception("Unknown S3 value type %r", value)
def test_json_decode(self): # json_decode accepts both bytes and unicode, but strings it returns # are always unicode. self.assertEqual(json_decode(b'"foo"'), u"foo") self.assertEqual(json_decode(u'"foo"'), u"foo") # Non-ascii bytes are interpreted as utf8 self.assertEqual(json_decode(utf8(u'"\u00e9"')), u"\u00e9")
def compute_accept_value(key): """Computes the value for the Sec-WebSocket-Accept header, given the value for Sec-WebSocket-Key. """ sha1 = hashlib.sha1() sha1.update(utf8(key)) sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value return native_str(base64.b64encode(sha1.digest()))
def handle_read(self, data): logging.info("handle_read") data = to_unicode(data) if data == data.upper(): self.stream.write(b"error\talready capitalized\n") else: # data already has \n self.stream.write(utf8("ok\t%s" % data.upper())) self.stream.close()
def test_json_encode(self): # json deals with strings, not bytes. On python 2 byte strings will # convert automatically if they are utf8; on python 3 byte strings # are not allowed. self.assertEqual(json_decode(json_encode(u"\u00e9")), u"\u00e9") if bytes is str: self.assertEqual(json_decode(json_encode(utf8(u"\u00e9"))), u"\u00e9") self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
def _on_request_token(self, authorize_url, callback_uri, callback, response): if response.error: raise Exception("Could not get request token: %s" % response.error) request_token = _oauth_parse_response(response.body) data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" + base64.b64encode(escape.utf8(request_token["secret"]))) self.set_cookie("_oauth_request_token", data) args = dict(oauth_token=request_token["key"]) if callback_uri == "oob": self.finish(authorize_url + "?" + urllib_parse.urlencode(args)) callback() return elif callback_uri: args["oauth_callback"] = urlparse.urljoin(self.request.full_url(), callback_uri) self.redirect(authorize_url + "?" + urllib_parse.urlencode(args)) callback()
def generate(self, writer): value = self.value # Compress whitespace if requested, with a crude heuristic to avoid # altering preformatted whitespace. if "<pre>" not in value: value = filter_whitespace(self.whitespace, value) if value: writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
def test_try(self): template = Template( utf8("""{% try %} try{% set y = 1/x %} {% except %}-except {% else %}-else {% finally %}-finally {% end %}""")) self.assertEqual(template.generate(x=1), b"\ntry\n-else\n-finally\n") self.assertEqual(template.generate(x=0), b"\ntry-except\n-finally\n")
def post_gzip(self, body): bytesio = BytesIO() gzip_file = gzip.GzipFile(mode='w', fileobj=bytesio) gzip_file.write(utf8(body)) gzip_file.close() compressed_body = bytesio.getvalue() return self.fetch('/', method='POST', body=compressed_body, headers={'Content-Encoding': 'gzip'})
def raw_fetch(self, headers, body, newline=b"\r\n"): with closing(IOStream(socket.socket())) as stream: stream.connect(('127.0.0.1', self.get_http_port()), self.stop) self.wait() stream.write( newline.join(headers + [utf8("Content-Length: %d" % len(body))]) + newline + newline + body) read_stream_body(stream, self.stop) headers, body = self.wait() return body
def test_break_in_apply(self): # This test verifies current behavior, although of course it would # be nice if apply didn't cause seemingly unrelated breakage try: Template( utf8( "{% for i in [] %}{% apply foo %}{% break %}{% end %}{% end %}" )) raise Exception("Did not get expected exception") except ParseError: pass
def capitalize(self, request_data, callback): logging.info('capitalize') stream = IOStream(socket.socket(), io_loop=self.io_loop) logging.info('connecting') yield gen.Task(stream.connect, ('127.0.0.1', self.port)) stream.write(utf8(request_data + '\n')) logging.info('reading') data = yield gen.Task(stream.read_until, b'\n') logging.info('returning') stream.close() callback(self.process_response(data))
def f(): # This is simpler than the non-coroutine version, but it cheats # by reading the body in one blob instead of streaming it with # a Protocol. client = Agent(self.reactor) response = yield client.request(b'GET', utf8(url)) with warnings.catch_warnings(): # readBody has a buggy DeprecationWarning in Twisted 15.0: # https://twistedmatrix.com/trac/changeset/43379 warnings.simplefilter('ignore', category=DeprecationWarning) body[0] = yield readBody(response) self.stop_loop()
def get(self): realm = 'test' opaque = 'asdf' # Real implementations would use a random nonce. nonce = "1234" username = '******' password = '******' auth_header = self.request.headers.get('Authorization', None) if auth_header is not None: auth_mode, params = auth_header.split(' ', 1) assert auth_mode == 'Digest' param_dict = {} for pair in params.split(','): k, v = pair.strip().split('=', 1) if v[0] == '"' and v[-1] == '"': v = v[1:-1] param_dict[k] = v assert param_dict['realm'] == realm assert param_dict['opaque'] == opaque assert param_dict['nonce'] == nonce assert param_dict['username'] == username assert param_dict['uri'] == self.request.path h1 = md5(utf8('%s:%s:%s' % (username, realm, password))).hexdigest() h2 = md5( utf8('%s:%s' % (self.request.method, self.request.path))).hexdigest() digest = md5(utf8('%s:%s:%s' % (h1, nonce, h2))).hexdigest() if digest == param_dict['response']: self.write('ok') else: self.write('fail') else: self.set_status(401) self.set_header( 'WWW-Authenticate', 'Digest realm="%s", nonce="%s", opaque="%s"' % (realm, nonce, opaque))
def _format_chunk(self, chunk): if self._expected_content_remaining is not None: self._expected_content_remaining -= len(chunk) if self._expected_content_remaining < 0: # Close the stream now to stop further framing errors. self.stream.close() raise httputil.HTTPOutputError( "Tried to write more data than Content-Length") if self._chunking_output and chunk: # Don't write out empty chunks because that means END-OF-STREAM # with chunked encoding return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n" else: return chunk
def environ(request): """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment. """ hostport = request.host.split(":") if len(hostport) == 2: host = hostport[0] port = int(hostport[1]) else: host = request.host port = 443 if request.protocol == "https" else 80 environ = { "REQUEST_METHOD": request.method, "SCRIPT_NAME": "", "PATH_INFO": to_wsgi_str( escape.url_unescape(request.path, encoding=None, plus=False)), "QUERY_STRING": request.query, "REMOTE_ADDR": request.remote_ip, "SERVER_NAME": host, "SERVER_PORT": str(port), "SERVER_PROTOCOL": request.version, "wsgi.version": (1, 0), "wsgi.url_scheme": request.protocol, "wsgi.input": BytesIO(escape.utf8(request.body)), "wsgi.errors": sys.stderr, "wsgi.multithread": False, "wsgi.multiprocess": True, "wsgi.run_once": False, } if "Content-Type" in request.headers: environ["CONTENT_TYPE"] = request.headers.pop("Content-Type") if "Content-Length" in request.headers: environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length") for key, value in request.headers.items(): environ["HTTP_" + key.replace("-", "_").upper()] = value return environ
def handle_request(request): self.http1 = request.version.startswith("HTTP/1.") if not self.http1: # This test will be skipped if we're using HTTP/2, # so just close it out cleanly using the modern interface. request.connection.write_headers( ResponseStartLine('', 200, 'OK'), HTTPHeaders()) request.connection.finish() return message = b"Hello world" request.write( utf8("HTTP/1.1 200 OK\r\n" "Content-Length: %d\r\n\r\n" % len(message))) request.write(message) request.finish()