def test_content_disposition(): p = rfc6266.content_disposition assert parse(p, b'Attachment; filename=example.html') == \ Parametrized(u'attachment', MultiDict([(u'filename', u'example.html')])) assert parse(p, b'INLINE; FILENAME= "an example.html"') == \ Parametrized(u'inline', MultiDict([(u'filename', u'an example.html')])) assert parse(p, b"attachment; filename*= UTF-8''%e2%82%ac%20rates") == \ Parametrized( u'attachment', MultiDict([(u'filename*', ExtValue(u'UTF-8', None, u'€ rates'.encode('utf-8')))]) ) assert parse( p, b'attachment; filename="EURO rates"; ' b"filename*=utf-8''%e2%82%ac%20rates" ) == \ Parametrized( u'attachment', MultiDict([(u'filename', u'EURO rates'), (u'filename*', ExtValue(u'utf-8', None, u'€ rates'.encode('utf-8')))]) ) no_parse(p, b'attachment; filename*=example.html')
def test_alt_svc(): [exch1] = load_from_file('funny_alt_svc') assert exch1.responses[0].headers.alt_svc.value == [ Parametrized((b'http/1.1', u'foo:443'), MultiDict([(altsvc.ma, 3600), (altsvc.persist, True)])), Parametrized((b'h2', u':8000'), MultiDict([(u'foo', u'bar')])), ]
def test_hsts(): [exch1] = load_from_file('funny_hsts.https') sts = exch1.responses[0].headers.strict_transport_security assert sts.value == [ Parametrized(hsts.max_age, 15768000), Parametrized(hsts.includesubdomains, None), Parametrized(hsts.max_age, Unavailable), Parametrized(u'fooBar', None), ] assert sts.max_age == 15768000 assert sts.includesubdomains == True
def test_accept_encoding(): p = rfc7231.Accept_Encoding assert parse(p, b'compress, gzip') == \ [Parametrized(cc.compress, None), Parametrized(cc.gzip, None)] assert parse(p, b'') == [] assert parse(p, b'*') == [Parametrized(u'*', None)] assert parse(p, b'compress;q=0.5, gzip;q=1.0') == \ [Parametrized(cc.compress, 0.5), Parametrized(cc.gzip, 1)] assert parse(p, b'gzip;q=1.0, identity; q=0.5, *;q=0') == \ [ Parametrized(cc.gzip, 1), Parametrized(u'identity', 0.5), Parametrized(u'*', 0) ] no_parse(p, b'gzip; identity') no_parse(p, b'gzip, q=1.0')
def test_www_authenticate(): [exch1] = load_from_file('funny_www_authenticate') assert exch1.responses[0].headers.www_authenticate.value == [ Parametrized(u'Basic', MultiDict([(u'realm', u'my "magical" realm')])), Parametrized(u'Foo', MultiDict()), Parametrized(u'Bar', u'jgfCGSU8u=='), Parametrized(u'Baz', MultiDict()), Unavailable, Parametrized(u'Scheme1', MultiDict([(u'foo', u'bar'), (u'baz', u'qux')])), Parametrized(u'Scheme2', MultiDict()), Parametrized(u'Newauth', MultiDict([(u'realm', u'apps'), (u'type', u'1'), (u'title', u'Login to "apps"')])), Parametrized(auth.basic, MultiDict([(u'realm', u'simple')])), ]
def _process_pair(self, entry, pair): name, argument = pair if argument is None: if self.knowledge.argument_required(name): self.message.complain(1156, entry=entry, name=name) argument = Unavailable(u'') else: syntax = self.knowledge.syntax_for(name) if self.knowledge.no_argument(name): self.message.complain(1157, entry=entry, name=name) argument = Unavailable(argument) elif syntax is not None: argument = parse(argument, syntax, self.message.complain, 1158, place=entry, name=name, value=argument) return Parametrized(name, argument)
def test_accept_language(): p = rfc7231.Accept_Language assert parse(p, b'da, en-gb;q=0.8, en;q=0.7') == \ [ Parametrized(u'da', None), Parametrized(u'en-GB', 0.8), Parametrized(u'en', 0.7), ] assert parse(p, b'en, *; q=0') == \ [Parametrized(u'en', None), Parametrized(u'*', 0)] assert parse(p, b'da') == [Parametrized(u'da', None)] no_parse(p, b'en_GB') no_parse(p, b'x1, x2') no_parse(p, b'en; q = 0.7')
def test_transfer_coding(): p = rfc7230.transfer_coding() assert parse(p, b'chunked') == Parametrized(tc.chunked, MultiDict()) assert parse(p, b'foo') == Parametrized(u'foo', MultiDict()) assert parse(p, b'foo ; bar = baz ; qux = "\\"xyzzy\\""') == \ Parametrized(u'foo', MultiDict([(u'bar', u'baz'), (u'qux', u'"xyzzy"')])) no_parse(p, b'') no_parse(p, b'foo;???') no_parse(p, b'foo;"bar"="baz"') p = rfc7230.t_codings assert parse(p, b'gzip;q=0.345') == \ Parametrized(Parametrized(tc.gzip, MultiDict()), 0.345) assert parse(p, b'gzip; Q=1.0') == \ Parametrized(Parametrized(tc.gzip, MultiDict()), 1) assert parse(p, b'trailers') == u'trailers' no_parse(p, b'gzip;q=2.0')
def _process_directive(self, entry, directive_with_argument): directive, argument = directive_with_argument parser = self.knowledge_module.parser_for(directive) if argument is None: if self.knowledge_module.argument_required(directive): self.message.complain(1156, entry=entry, directive=directive) argument = Unavailable else: if self.knowledge_module.no_argument(directive): self.message.complain(1157, entry=entry, directive=directive) argument = None elif parser is not None: argument = simple_parse(argument, parser, self.message.complain, 1158, place=entry, directive=directive, value=argument) return Parametrized(directive, argument)
def test_cache_control(): [exch1] = load_from_file('funny_cache_control') headers = exch1.request.headers assert headers.cache_control.value == [ Parametrized(cache.max_age, 3600), Parametrized(cache.max_stale, 60), Unavailable, Parametrized(u'qux', u'xyzzy 123'), Parametrized(cache.no_transform, None), Parametrized(u'abcde', None), Parametrized(cache.min_fresh, Unavailable), Parametrized(cache.no_store, None), ] assert headers.pragma.value == [ u'no-cache', (u'foo', None), (u'bar', u'baz'), (u'qux', u'xyzzy'), Unavailable ] assert cache.max_age in headers.cache_control assert headers.cache_control.max_age == 3600 assert cache.max_stale in headers.cache_control assert headers.cache_control.max_stale == 60 assert headers.cache_control[u'qux'] == u'xyzzy 123' assert cache.no_transform in headers.cache_control assert headers.cache_control.no_transform == True assert headers.cache_control[u'abcde'] == True assert headers.cache_control.no_cache is None assert cache.min_fresh in headers.cache_control assert headers.cache_control.min_fresh is Unavailable assert cache.no_store in headers.cache_control assert headers.cache_control.no_store is True assert cache.only_if_cached not in headers.cache_control
def test_accept(): p = rfc7231.Accept assert parse( p, b'text/html;charset="utf-8";Q=1;profile="mobile", ' b'text/plain;Q=0.2, text/*;Q=0.02, */*;Q=0.01' ) == [ Parametrized( Parametrized(media.text_html, MultiDict([(u'charset', u'utf-8')])), MultiDict([(u'q', 1), (u'profile', u'mobile')]) ), Parametrized( Parametrized(media.text_plain, MultiDict()), MultiDict([(u'q', 0.2)]) ), Parametrized( Parametrized(u'text/*', MultiDict()), MultiDict([(u'q', 0.02)]) ), Parametrized( Parametrized(u'*/*', MultiDict()), MultiDict([(u'q', 0.01)]) ), ] assert parse(p, b'*/*') == \ [Parametrized(Parametrized(u'*/*', MultiDict()), MultiDict())] assert parse(p, b'application/json') == \ [Parametrized(Parametrized(media.application_json, MultiDict()), MultiDict())] assert parse(p, b'audio/*; q=0.2, audio/basic') == \ [ Parametrized(Parametrized(u'audio/*', MultiDict()), MultiDict([(u'q', 0.2)])), Parametrized(Parametrized(media.audio_basic, MultiDict()), MultiDict()), ] assert parse( p, b'text/plain; q=0.5, text/html, text/x-dvi; q=0.8, text/x-c' ) == [ Parametrized(Parametrized(media.text_plain, MultiDict()), MultiDict([(u'q', 0.5)])), Parametrized(Parametrized(media.text_html, MultiDict()), MultiDict()), Parametrized(Parametrized(u'text/x-dvi', MultiDict()), MultiDict([(u'q', 0.8)])), Parametrized(Parametrized(u'text/x-c', MultiDict()), MultiDict()), ] assert parse( p, b', ,text/*, text/plain,,, text/plain;format=flowed, */*' ) == [ Parametrized(Parametrized(u'text/*', MultiDict()), MultiDict()), Parametrized(Parametrized(media.text_plain, MultiDict()), MultiDict()), Parametrized( Parametrized(media.text_plain, MultiDict([(u'format', u'flowed')])), MultiDict() ), Parametrized(Parametrized(u'*/*', MultiDict()), MultiDict()), ] assert parse(p, b'') == [] assert parse(p, b',') == [] no_parse(p, b'text/html;q=foo-bar') no_parse(p, b'text/html;q=0.12345') no_parse(p, b'text/html;q=1.23456') no_parse(p, b'text/html;foo=bar;q=1.23456') no_parse(p, b'text/html=0.123') no_parse(p, b'text/html,q=0.123') no_parse(p, b'text/html q=0.123') no_parse(p, b'text/html;text/plain') no_parse(p, b'text/html;;q=0.123') no_parse(p, b'text/html;q="0.123"')
request_target = (origin_form | absolute_form | authority_form | asterisk_form) > pivot reason_phrase = string(HTAB | SP | VCHAR | obs_text) > pivot field_name = FieldName << token > pivot def transfer_parameter(no_q=False): return ((token__excluding(['q']) if no_q else token) * skip(BWS * '=' * BWS) * (token | quoted_string)) > named( u'transfer-parameter', RFC(7230), is_pivot=True) _built_in_codings = ['chunked', 'compress', 'deflate', 'gzip'] _empty_params = lambda c: Parametrized(c, MultiDict()) def transfer_extension(exclude=None, no_q=False): return Parametrized << ( (TransferCoding << token__excluding(exclude or [])) * (MultiDict << many(skip(OWS * ';' * OWS) * transfer_parameter(no_q))) ) > named(u'transfer-extension', RFC(7230), is_pivot=True) def transfer_coding(no_trailers=False, no_q=False): exclude = _built_in_codings if no_trailers: exclude = exclude + ['trailers'] r = transfer_extension(exclude, no_q) for name in _built_in_codings:
def _process_parsed(self, entry, ds): return [ Parametrized(self._process_directive(entry, d), params) for (d, params) in ds ]
def test_media_type(): p = rfc7231.media_type assert parse(p, b'Text/HTML; Charset="utf-8"') == \ Parametrized(media.text_html, MultiDict([(u'charset', u'utf-8')])) assert parse(p, b'application/vnd.github.v3+json') == \ Parametrized(u'application/vnd.github.v3+json', MultiDict())
def test_prefer(): [exch1] = load_from_file('funny_prefer') assert exch1.request.headers.prefer.value == [ Parametrized(Parametrized(pref.handling, u'lenient'), [ Parametrized(u'param1', u"this is a parameter to 'handling'!"), Parametrized(u'param2', None), ]), Unavailable, Parametrized(Parametrized(pref.wait, 600), []), Parametrized(Parametrized(u'my-pref', None), [ None, None, Parametrized(u'foo', None), None, None, Parametrized(u'bar', None), ]), Parametrized(Parametrized(pref.respond_async, None), []), Parametrized(Parametrized(pref.wait, 0), []), Parametrized(Parametrized(pref.return_, Unavailable), []), ] assert exch1.request.headers.prefer.wait == 600 assert exch1.request.headers.prefer.respond_async assert exch1.request.headers.prefer.return_ is Unavailable assert exch1.request.headers.prefer[u'quux'] is None
def test_accept_charset(): p = rfc7231.Accept_Charset assert parse(p, b'iso-8859-5, unicode-1-1 ; q=0.8') == \ [Parametrized(u'iso-8859-5', None), Parametrized(u'unicode-1-1', 0.8)]
def test_link(): p = rfc8288.Link assert parse( p, b'<http://example.com/TheBook/chapter2>; rel="previous"; ' b'title="previous chapter"' ) == [ Parametrized( u'http://example.com/TheBook/chapter2', MultiDict([ (u'rel', [u'previous']), (u'title', u'previous chapter'), ]) ) ] assert parse(p, b'</>; rel="http://example.net/foo"') == \ [Parametrized(u'/', MultiDict([(u'rel', [u'http://example.net/foo'])]))] assert parse( p, b'</TheBook/chapter2>; ' b'rel="previous"; title*=UTF-8\'de\'letztes%20Kapitel, ' b'</TheBook/chapter4>; ' b'rel="next"; title*=UTF-8\'de\'n%c3%a4chstes%20Kapitel' ) == [ Parametrized( u'/TheBook/chapter2', MultiDict([ (u'rel', [u'previous']), (u'title*', ExtValue(u'UTF-8', u'de', u'letztes Kapitel'.encode('utf-8'))), ]) ), Parametrized( u'/TheBook/chapter4', MultiDict([ (u'rel', [u'next']), (u'Title*', ExtValue(u'UTF-8', u'de', u'nächstes Kapitel'.encode('utf-8'))), ]) ), ] assert parse( p, b'<http://example.org/>; ' b'rel="start http://example.net/relation/other"' ) == [ Parametrized( u'http://example.org/', MultiDict([ (u'REL', [u'START', u'http://example.net/relation/other']), ]) ), ] assert parse( p, b'</>; rel=foo; type="text/plain"; rel=bar; type="text/html"' ) == [ Parametrized( u'/', MultiDict([ (u'rel', [u'foo']), (u'type', media.text_plain), ]) ), ] assert parse( p, b'</foo/bar?baz=qux#xyzzy> ; media = "whatever man okay?" ; ' b'hreflang=en-US' ) == [ Parametrized( u'/foo/bar?baz=qux#xyzzy', MultiDict([ (u'media', u'whatever man okay?'), (u'hreflang', LanguageTag(u'en-US')), ]) ), ] assert parse(p, b'<foo>, <bar>, <>') == \ [ Parametrized(u'foo', MultiDict()), Parametrized(u'bar', MultiDict()), Parametrized(u'', MultiDict()), ] assert parse( p, b"<urn:foo:bar:baz>; MyParam* = ISO-8859-1'en'whatever" ) == [ Parametrized( u'urn:foo:bar:baz', MultiDict([ (u'myparam*', u"ISO-8859-1'en'whatever"), ]) ), ] assert parse(p, b'<#me>; coolest; man; ever!') == \ [ Parametrized( u'#me', MultiDict([ (u'coolest', None), (u'man', None), (u'ever!', None), ]) ), ] no_parse(p, b'</>; rel=foo; type=text/plain; rel=bar; type=text/html') no_parse(p, b'</>; anchor=/index.html') no_parse(p, u'<http://пример.рф/>; rel=next'.encode('utf-8')) no_parse(p, b"</>; title * = UTF-8''Hello") no_parse(p, b'</index.html>;') no_parse(p, b'</index.html>; rel=next;')
def test_common_structures(): assert CaseInsensitive(u'foo') == CaseInsensitive(u'Foo') assert CaseInsensitive(u'foo') != CaseInsensitive(u'bar') assert CaseInsensitive(u'foo') == u'Foo' assert CaseInsensitive(u'foo') != u'bar' assert (Parametrized(CaseInsensitive(u'foo'), []) == CaseInsensitive(u'Foo')) assert Parametrized(CaseInsensitive(u'foo'), [(u'bar', u'qux')]) == u'Foo' assert Parametrized(CaseInsensitive(u'foo'), [(u'bar', u'qux')]) != u'bar' assert (Parametrized(CaseInsensitive(u'foo'), [(u'bar', u'qux')]) == Parametrized( CaseInsensitive(u'Foo'), [(u'bar', u'qux')])) assert (Parametrized(CaseInsensitive(u'foo'), [(u'bar', u'qux')]) != Parametrized(CaseInsensitive(u'foo'), [(u'bar', u'xyzzy')])) assert (Parametrized(u'foo', [(u'bar', u'qux')]) != Parametrized( u'foo', [(u'bar', u'xyzzy')])) assert (Parametrized(CaseInsensitive(u'foo'), [(u'bar', u'qux')]) != Parametrized(CaseInsensitive(u'bar'), [(u'bar', u'qux')]))
def test_prefer(): [exch1] = load_from_file('funny_prefer') assert exch1.request.headers.prefer.value == [ Parametrized(Parametrized(prefer.handling, u'lenient'), [ Parametrized(u'param1', u"this is a parameter to 'handling'!"), Parametrized(u'param2', None), ]), Unavailable(b'BWS-is-not-parsed = because-see-errata'), Parametrized(Parametrized(prefer.wait, 600), []), Parametrized(Parametrized(u'my-pref', None), [ None, None, Parametrized(u'foo', None), None, None, Parametrized(u'bar', None), ]), Parametrized(Parametrized(prefer.respond_async, None), []), Parametrized(Parametrized(prefer.wait, 0), []), Parametrized( Parametrized(prefer.return_, Unavailable(b'something-else')), []), ] assert exch1.request.headers.prefer.wait == 600 assert exch1.request.headers.prefer.respond_async assert isinstance(exch1.request.headers.prefer.return_, Unavailable) assert exch1.request.headers.prefer[u'quux'] is None assert isinstance( exch1.responses[0].headers.preference_applied.respond_async, Unavailable) check_exchange(exch1) buf = io.BytesIO() text_report([exch1], buf) assert b'Preference-Applied: respond-async=true was not requested' \ in buf.getvalue() # not "respond-async=Unavailable"
def _process_parsed(self, entry, parsed): return [ Parametrized(self._process_pair(entry, pair), params) for (pair, params) in parsed ]
def _normalize_empty_value(x): # RFC 7240 Section 2: "Empty or zero-length values on both # the preference token and within parameters are equivalent # to no value being specified at all." (name, value) = x if isinstance(x, tuple) else (x, None) return Parametrized(name, None if value == u'' else value)