def test_content_disposition(): p = rfc6266.content_disposition assert parse(p, b'Attachment; filename=example.html') == \ Parametrized(u'attachment', MultiDict([(u'filename', u'example.html')])) assert parse(p, b'INLINE; FILENAME= "an example.html"') == \ Parametrized(u'inline', MultiDict([(u'filename', u'an example.html')])) assert parse(p, b"attachment; filename*= UTF-8''%e2%82%ac%20rates") == \ Parametrized( u'attachment', MultiDict([(u'filename*', ExtValue(u'UTF-8', None, u'€ rates'.encode('utf-8')))]) ) assert parse( p, b'attachment; filename="EURO rates"; ' b"filename*=utf-8''%e2%82%ac%20rates" ) == \ Parametrized( u'attachment', MultiDict([(u'filename', u'EURO rates'), (u'filename*', ExtValue(u'utf-8', None, u'€ rates'.encode('utf-8')))]) ) no_parse(p, b'attachment; filename*=example.html')
def test_alt_svc(): [exch1] = load_from_file('funny_alt_svc') assert exch1.responses[0].headers.alt_svc.value == [ Parametrized((b'http/1.1', u'foo:443'), MultiDict([(altsvc.ma, 3600), (altsvc.persist, True)])), Parametrized((b'h2', u':8000'), MultiDict([(u'foo', u'bar')])), ]
def test_transfer_coding(): p = rfc7230.transfer_coding() assert parse(p, b'chunked') == Parametrized(tc.chunked, MultiDict()) assert parse(p, b'foo') == Parametrized(u'foo', MultiDict()) assert parse(p, b'foo ; bar = baz ; qux = "\\"xyzzy\\""') == \ Parametrized(u'foo', MultiDict([(u'bar', u'baz'), (u'qux', u'"xyzzy"')])) no_parse(p, b'') no_parse(p, b'foo;???') no_parse(p, b'foo;"bar"="baz"') p = rfc7230.t_codings assert parse(p, b'gzip;q=0.345') == \ Parametrized(Parametrized(tc.gzip, MultiDict()), 0.345) assert parse(p, b'gzip; Q=1.0') == \ Parametrized(Parametrized(tc.gzip, MultiDict()), 1) assert parse(p, b'trailers') == u'trailers' no_parse(p, b'gzip;q=2.0')
def test_www_authenticate(): [exch1] = load_from_file('funny_www_authenticate') assert exch1.responses[0].headers.www_authenticate.value == [ Parametrized(u'Basic', MultiDict([(u'realm', u'my "magical" realm')])), Parametrized(u'Foo', MultiDict()), Parametrized(u'Bar', u'jgfCGSU8u=='), Parametrized(u'Baz', MultiDict()), Unavailable, Parametrized(u'Scheme1', MultiDict([(u'foo', u'bar'), (u'baz', u'qux')])), Parametrized(u'Scheme2', MultiDict()), Parametrized(u'Newauth', MultiDict([(u'realm', u'apps'), (u'type', u'1'), (u'title', u'Login to "apps"')])), Parametrized(auth.basic, MultiDict([(u'realm', u'simple')])), ]
def _process_params(complain, params): r = [] seen = set() for (name, value) in params: if name in seen: # The spec says "occurrences after the first must be ignored" # for ``rel``, ``title``, and ``title*``, # but not for ``media`` and ``type``. if name in [u'rel', u'title', u'title*']: complain(1225, name=name) continue elif name in [u'media', u'type']: complain(1225, name=name) seen.add(name) r.append((name, value)) if name == u'rev': complain(1226) return MultiDict(r)
def _process_params(complain, params): r = [] seen = set() for (name, value) in params: if name in [u'rel', u'media', u'title', u'title*', u'type']: if name in seen: complain(1225, name=name) # "occurrences after the first MUST be ignored by parsers" continue seen.add(name) if value is not None: (parsed_as, value) = value if name == u'title' and parsed_as is token: complain(1307) if name == u'hreflang' and parsed_as is quoted_string: complain(1308) symbol = { u'anchor': URI_Reference, u'rel': rel, u'rev': rev, u'hreflang': hreflang, u'type': type_, u'title*': ext_value, }.get(name) if symbol is not None: value = parse(value, symbol, complain, 1158, name=name, value=value) r.append((name, value)) if name == u'rev': complain(1226) if u'rel' not in seen: complain(1309) return MultiDict(r)
def _prepend_q(q, xs): return MultiDict([(CaseInsensitive(u'q'), q)] + xs)
qvalue = (float << '0' + maybe_str('.' + string_times(0, 3, DIGIT)) | float << '1' + maybe_str('.' + string_times(0, 3, '0'))) > pivot weight = skip(OWS * ';' * OWS * 'q=') * qvalue > pivot accept_ext = (skip(OWS * ';' * OWS) * token * maybe(skip('=') * (token | quoted_string))) > pivot def _prepend_q(q, xs): return MultiDict([(CaseInsensitive(u'q'), q)] + xs) accept_params = _prepend_q << weight * many(accept_ext) > pivot Accept = comma_list(Parametrized << ( media_range(no_q=True) * maybe(accept_params, MultiDict()))) > pivot charset = Charset << token > pivot Accept_Charset = comma_list1(Parametrized << ( (charset | Charset << literal('*')) * maybe(weight))) > pivot codings = (content_coding | ContentCoding << literal('identity') | literal('*')) > pivot Accept_Encoding = comma_list(Parametrized << codings * maybe(weight)) > pivot Accept_Language = comma_list1(Parametrized << language_range * maybe(weight)) > pivot delay_seconds = int << string1(DIGIT) > pivot Retry_After = HTTP_date | delay_seconds > pivot
request_target = (origin_form | absolute_form | authority_form | asterisk_form) > pivot reason_phrase = string(HTAB | SP | VCHAR | obs_text) > pivot field_name = FieldName << token > pivot def transfer_parameter(no_q=False): return ((token__excluding(['q']) if no_q else token) * skip(BWS * '=' * BWS) * (token | quoted_string)) > named( u'transfer-parameter', RFC(7230), is_pivot=True) _built_in_codings = ['chunked', 'compress', 'deflate', 'gzip'] _empty_params = lambda c: Parametrized(c, MultiDict()) def transfer_extension(exclude=None, no_q=False): return Parametrized << ( (TransferCoding << token__excluding(exclude or [])) * (MultiDict << many(skip(OWS * ';' * OWS) * transfer_parameter(no_q))) ) > named(u'transfer-extension', RFC(7230), is_pivot=True) def transfer_coding(no_trailers=False, no_q=False): exclude = _built_in_codings if no_trailers: exclude = exclude + ['trailers'] r = transfer_extension(exclude, no_q) for name in _built_in_codings:
def test_link(): p = rfc8288.Link assert parse( p, b'<http://example.com/TheBook/chapter2>; rel="previous"; ' b'title="previous chapter"' ) == [ Parametrized( u'http://example.com/TheBook/chapter2', MultiDict([ (u'rel', [u'previous']), (u'title', u'previous chapter'), ]) ) ] assert parse(p, b'</>; rel="http://example.net/foo"') == \ [Parametrized(u'/', MultiDict([(u'rel', [u'http://example.net/foo'])]))] assert parse( p, b'</TheBook/chapter2>; ' b'rel="previous"; title*=UTF-8\'de\'letztes%20Kapitel, ' b'</TheBook/chapter4>; ' b'rel="next"; title*=UTF-8\'de\'n%c3%a4chstes%20Kapitel' ) == [ Parametrized( u'/TheBook/chapter2', MultiDict([ (u'rel', [u'previous']), (u'title*', ExtValue(u'UTF-8', u'de', u'letztes Kapitel'.encode('utf-8'))), ]) ), Parametrized( u'/TheBook/chapter4', MultiDict([ (u'rel', [u'next']), (u'Title*', ExtValue(u'UTF-8', u'de', u'nächstes Kapitel'.encode('utf-8'))), ]) ), ] assert parse( p, b'<http://example.org/>; ' b'rel="start http://example.net/relation/other"' ) == [ Parametrized( u'http://example.org/', MultiDict([ (u'REL', [u'START', u'http://example.net/relation/other']), ]) ), ] assert parse( p, b'</>; rel=foo; type="text/plain"; rel=bar; type="text/html"' ) == [ Parametrized( u'/', MultiDict([ (u'rel', [u'foo']), (u'type', media.text_plain), ]) ), ] assert parse( p, b'</foo/bar?baz=qux#xyzzy> ; media = "whatever man okay?" ; ' b'hreflang=en-US' ) == [ Parametrized( u'/foo/bar?baz=qux#xyzzy', MultiDict([ (u'media', u'whatever man okay?'), (u'hreflang', LanguageTag(u'en-US')), ]) ), ] assert parse(p, b'<foo>, <bar>, <>') == \ [ Parametrized(u'foo', MultiDict()), Parametrized(u'bar', MultiDict()), Parametrized(u'', MultiDict()), ] assert parse( p, b"<urn:foo:bar:baz>; MyParam* = ISO-8859-1'en'whatever" ) == [ Parametrized( u'urn:foo:bar:baz', MultiDict([ (u'myparam*', u"ISO-8859-1'en'whatever"), ]) ), ] assert parse(p, b'<#me>; coolest; man; ever!') == \ [ Parametrized( u'#me', MultiDict([ (u'coolest', None), (u'man', None), (u'ever!', None), ]) ), ] no_parse(p, b'</>; rel=foo; type=text/plain; rel=bar; type=text/html') no_parse(p, b'</>; anchor=/index.html') no_parse(p, u'<http://пример.рф/>; rel=next'.encode('utf-8')) no_parse(p, b"</>; title * = UTF-8''Hello") no_parse(p, b'</index.html>;') no_parse(p, b'</index.html>; rel=next;')
def test_accept(): p = rfc7231.Accept assert parse( p, b'text/html;charset="utf-8";Q=1;profile="mobile", ' b'text/plain;Q=0.2, text/*;Q=0.02, */*;Q=0.01' ) == [ Parametrized( Parametrized(media.text_html, MultiDict([(u'charset', u'utf-8')])), MultiDict([(u'q', 1), (u'profile', u'mobile')]) ), Parametrized( Parametrized(media.text_plain, MultiDict()), MultiDict([(u'q', 0.2)]) ), Parametrized( Parametrized(u'text/*', MultiDict()), MultiDict([(u'q', 0.02)]) ), Parametrized( Parametrized(u'*/*', MultiDict()), MultiDict([(u'q', 0.01)]) ), ] assert parse(p, b'*/*') == \ [Parametrized(Parametrized(u'*/*', MultiDict()), MultiDict())] assert parse(p, b'application/json') == \ [Parametrized(Parametrized(media.application_json, MultiDict()), MultiDict())] assert parse(p, b'audio/*; q=0.2, audio/basic') == \ [ Parametrized(Parametrized(u'audio/*', MultiDict()), MultiDict([(u'q', 0.2)])), Parametrized(Parametrized(media.audio_basic, MultiDict()), MultiDict()), ] assert parse( p, b'text/plain; q=0.5, text/html, text/x-dvi; q=0.8, text/x-c' ) == [ Parametrized(Parametrized(media.text_plain, MultiDict()), MultiDict([(u'q', 0.5)])), Parametrized(Parametrized(media.text_html, MultiDict()), MultiDict()), Parametrized(Parametrized(u'text/x-dvi', MultiDict()), MultiDict([(u'q', 0.8)])), Parametrized(Parametrized(u'text/x-c', MultiDict()), MultiDict()), ] assert parse( p, b', ,text/*, text/plain,,, text/plain;format=flowed, */*' ) == [ Parametrized(Parametrized(u'text/*', MultiDict()), MultiDict()), Parametrized(Parametrized(media.text_plain, MultiDict()), MultiDict()), Parametrized( Parametrized(media.text_plain, MultiDict([(u'format', u'flowed')])), MultiDict() ), Parametrized(Parametrized(u'*/*', MultiDict()), MultiDict()), ] assert parse(p, b'') == [] assert parse(p, b',') == [] no_parse(p, b'text/html;q=foo-bar') no_parse(p, b'text/html;q=0.12345') no_parse(p, b'text/html;q=1.23456') no_parse(p, b'text/html;foo=bar;q=1.23456') no_parse(p, b'text/html=0.123') no_parse(p, b'text/html,q=0.123') no_parse(p, b'text/html q=0.123') no_parse(p, b'text/html;text/plain') no_parse(p, b'text/html;;q=0.123') no_parse(p, b'text/html;q="0.123"')
def test_media_type(): p = rfc7231.media_type assert parse(p, b'Text/HTML; Charset="utf-8"') == \ Parametrized(media.text_html, MultiDict([(u'charset', u'utf-8')])) assert parse(p, b'application/vnd.github.v3+json') == \ Parametrized(u'application/vnd.github.v3+json', MultiDict())
@can_complain def _check_realm(complain, k, v): (symbol, v) = v if k == u'realm' and symbol is not quoted_string: complain(1196) return (k, v) auth_param = _check_realm << ((CaseInsensitive << token) * skip(BWS * '=' * BWS) * (mark(token) | mark(quoted_string))) > pivot challenge = Parametrized << ( auth_scheme * maybe(skip(string1(SP)) * (token68 | MultiDict << comma_list(auth_param)), default=MultiDict())) > auto WWW_Authenticate = comma_list1(challenge) > pivot Proxy_Authenticate = comma_list1(challenge) > pivot credentials = Parametrized << ( auth_scheme * maybe(skip(string1(SP)) * (token68 | MultiDict << comma_list(auth_param)), default=MultiDict())) > auto Authorization = credentials > pivot Proxy_Authorization = credentials > pivot fill_names(globals(), RFC(7235))