def choose_best_handler(self): # The best handler for the formatter instance depends on the # request; in particular it relies on what the client has # indicated it can accept # Get the accept header accept_header = ctx.request.headers.get("Accept") if accept_header == "*/*" or accept_header is None: if hasattr(self, 'default'): return self.default, self.handlers[self.default] else: try: return next(iter(self.handlers.items())) except StopIteration: raise ValueError("No handlers have been registered " "for this formatter.") else: accept = parse_accept_header(accept_header, MIMEAccept) mime_type = accept.best_match(self.handlers) if mime_type is not None: return mime_type, self.handlers[mime_type] else: raise NotAcceptable # Parse the Accept header accept = parse_accept_header( ctx.request.headers.get("Accept", "*/*"), MIMEAccept ) mime_type = accept.best_match(self.handlers) if mime_type is not None: return mime_type, self.handlers[mime_type] elif "*/*" in accept.values(): for mimetype in self.handlers: return self.handlers[mimetype] else: # The requesting client will accept anything, but we # don't have handlers at all. This is a LookupError raise LookupError("No formatter handlers available at " "all; cannot format this data.") else: # The output format that the requesting client asked for # isn't supported. This is NotAcceptable raise NotAcceptable
def test_favour_quality_over_same_specificity(): server_mimetypes = ["application/json", "application/marcxml+xml"] client_mimetypes = parse_accept_header( "application/json;q=0.5, application/marcxml+xml", MIMEAccept) assert "application/marcxml+xml" == ContentNegotiator.match_by_accept( server_mimetypes, client_mimetypes) client_mimetypes = parse_accept_header( "application/marcxml+xml;q=0.4, application/json;q=0.6", MIMEAccept) assert "application/json" == ContentNegotiator.match_by_accept( server_mimetypes, client_mimetypes)
def test_choose_provided_and_accepted_mimetype(): # Should choose mimetype that is accepted by client and served by server server_mimetypes = ["application/json", "application/marcxml+xml"] client_mimetypes = parse_accept_header("text/plain,application/json,*/*", MIMEAccept) assert "application/json" == ContentNegotiator.match_by_accept( server_mimetypes, client_mimetypes) client_mimetypes = parse_accept_header( "text/plain,application/marcxml+xml,*/*", MIMEAccept) assert "application/marcxml+xml" == ContentNegotiator.match_by_accept( server_mimetypes, client_mimetypes)
async def route(self, request: Request) -> Optional[Response]: headers: MultiMapping[str] = await request.headers() if parse_accept_header(headers.get("Accept") or "*/*", MIMEAccept).best_match( self._types ): return await self._mg.act(request) return None
def get_user_locale(self): self.langs = self.application.service.langs lang_codes = self.langs.keys() if len(self.contest.allowed_localizations) > 0: lang_codes = filter_language_codes( lang_codes, self.contest.allowed_localizations) # TODO We fallback on "en" if no language matches: we could # return 406 Not Acceptable instead. # Select the one the user likes most. http_langs = [lang_code.replace("_", "-") for lang_code in lang_codes] self.browser_lang = parse_accept_header( self.request.headers.get("Accept-Language", ""), LanguageAccept).best_match(http_langs, "en") self.cookie_lang = self.get_cookie("language", None) if self.cookie_lang in http_langs: lang_code = self.cookie_lang else: lang_code = self.browser_lang self.set_header("Content-Language", lang_code) return self.langs[lang_code.replace("-", "_")]
def test_language_accept(self): a = http.parse_accept_header("de-AT,de;q=0.8,en;q=0.5", datastructures.LanguageAccept) self.assert_equal(a.best, "de-AT") self.assert_true("de_AT" in a) self.assert_true("en" in a) self.assert_equal(a["de-at"], 1) self.assert_equal(a["en"], 0.5)
def test_accept(self): a = http.parse_accept_header('en-us,ru;q=0.5') self.assert_equal(a.values(), ['en-us', 'ru']) self.assert_equal(a.best, 'en-us') self.assert_equal(a.find('ru'), 1) self.assert_raises(ValueError, a.index, 'de') self.assert_equal(a.to_header(), 'en-us,ru;q=0.5')
def setup_locale(self): lang_codes = list(iterkeys(self.available_translations)) browser_langs = parse_accept_header( self.request.headers.get("Accept-Language", ""), LanguageAccept).values() automatic_lang = choose_language_code(browser_langs, lang_codes) if automatic_lang is None: automatic_lang = 'et'#lang_codes[0] self.automatic_translation = \ self.available_translations[automatic_lang] cookie_lang = self.get_cookie("language", None) if cookie_lang is not None: chosen_lang = \ choose_language_code([cookie_lang, automatic_lang], lang_codes) if chosen_lang == cookie_lang: self.cookie_translation = \ self.available_translations[cookie_lang] else: chosen_lang = automatic_lang self.translation = self.available_translations[chosen_lang] self._ = self.translation.gettext self.n_ = self.translation.ngettext self.set_header("Content-Language", chosen_lang)
def get_user_locale(self): self.langs = self.application.service.langs lang_codes = self.langs.keys() if self.contest.allowed_localizations: lang_codes = filter_language_codes( lang_codes, self.contest.allowed_localizations) # Select the one the user likes most. basic_lang = 'en' if self.contest.allowed_localizations: basic_lang = lang_codes[0].replace("_", "-") http_langs = [lang_code.replace("_", "-") for lang_code in lang_codes] self.browser_lang = parse_accept_header( self.request.headers.get("Accept-Language", ""), LanguageAccept).best_match(http_langs, basic_lang) self.cookie_lang = self.get_cookie("language", None) if self.cookie_lang in http_langs: lang_code = self.cookie_lang else: lang_code = self.browser_lang self.set_header("Content-Language", lang_code) return self.langs[lang_code.replace("-", "_")]
def setup_locale(self): lang_codes = list(iterkeys(self.available_translations)) browser_langs = parse_accept_header( self.request.headers.get("Accept-Language", ""), LanguageAccept).values() automatic_lang = choose_language_code(browser_langs, lang_codes) if automatic_lang is None: automatic_lang = lang_codes[0] self.automatic_translation = \ self.available_translations[automatic_lang] cookie_lang = self.get_cookie("language", None) if cookie_lang is not None: chosen_lang = \ choose_language_code([cookie_lang, automatic_lang], lang_codes) if chosen_lang == cookie_lang: self.cookie_translation = \ self.available_translations[cookie_lang] else: chosen_lang = automatic_lang self.translation = self.available_translations[chosen_lang] self._ = self.translation.gettext self.n_ = self.translation.ngettext self.set_header("Content-Language", chosen_lang)
def get_lang(self, environ): """ Detects user's preferred language (either via the 'getlang' plugin or from HTTP_ACCEPT_LANGUAGE env value) arguments: environ -- WSGI environment variable returns: underscore-separated ISO 639 language code and ISO 3166 country code """ cookies = KonTextCookie(environ.get('HTTP_COOKIE', '')) if plugins.runtime.GETLANG.exists: lgs_string = plugins.runtime.GETLANG.instance.fetch_current_language( cookies) else: lang_cookie = cookies.get('kontext_ui_lang') if not lang_cookie: lgs_string = parse_accept_header( environ.get('HTTP_ACCEPT_LANGUAGE')).best else: lgs_string = lang_cookie.value if lgs_string is None: lgs_string = 'en_US' if len(lgs_string ) == 2: # in case we obtain just an ISO 639 language code lgs_string = self._installed_langs.get(lgs_string) else: lgs_string = lgs_string.replace('-', '_') if lgs_string is None: lgs_string = 'en_US' return lgs_string
def browse(): uri = request.args.get('uri', None) if uri is None: return document() else: if 'Accept' in request.headers: mimetype = parse_accept_header(request.headers['Accept']).best else: log.debug("No accept header, using 'text/html'") mimetype = 'text/html' try: if mimetype in ['text/html', 'application/xhtml_xml', '*/*']: results = visit(uri, format='html', external=True) local_results = localize_results(results) return render_template('resource.html', local_resource='http://bla', resource=uri, results=local_results, local=LOCAL_STORE, preflabel=PREFLABEL_SERVICE) elif mimetype in ['application/json']: response = make_response(visit(uri, format='jsonld', external=True), 200) response.headers['Content-Type'] = 'application/json' return response elif mimetype in ['application/rdf+xml', 'application/xml']: response = make_response(visit(uri, format='rdfxml', external=True), 200) response.headers['Content-Type'] = 'application/rdf+xml' return response elif mimetype in ['application/x-turtle', 'text/turtle']: response = make_response(visit(uri, format='turtle', external=True), 200) response.headers['Content-Type'] = 'text/turtle' return response except Exception as e: log.error(e) log.error(traceback.format_exc()) return traceback.format_exc()
def test_language_accept(self): a = http.parse_accept_header("de-AT,de;q=0.8,en;q=0.5", datastructures.LanguageAccept) assert a.best == "de-AT" assert "de_AT" in a assert "en" in a assert a["de-at"] == 1 assert a["en"] == 0.5
def get_lang(environ): """ Detects user's preferred language (either via the 'getlang' plugin or from HTTP_ACCEPT_LANGUAGE env value) arguments: environ -- WSGI environment variable returns: underscore-separated ISO 639 language code and ISO 3166 country code """ installed = dict([ (x.split('_')[0], x) for x in os.listdir('%s/../locale' % os.path.dirname(__file__)) ]) if plugins.has_plugin('getlang'): lgs_string = plugins.get('getlang').fetch_current_language( KonTextCookie(environ.get('HTTP_COOKIE', ''))) else: lgs_string = parse_accept_header( environ.get('HTTP_ACCEPT_LANGUAGE')).best if len(lgs_string ) == 2: # in case we obtain just an ISO 639 language code lgs_string = installed.get(lgs_string) else: lgs_string = lgs_string.replace('-', '_') if lgs_string is None: lgs_string = 'en_US' return lgs_string
def test_accept(self): a = http.parse_accept_header('en-us,ru;q=0.5') assert list(itervalues(a)) == ['en-us', 'ru'] assert a.best == 'en-us' assert a.find('ru') == 1 pytest.raises(ValueError, a.index, 'de') assert a.to_header() == 'en-us,ru;q=0.5'
def test_charset_accept(self): a = http.parse_accept_header('ISO-8859-1,utf-8;q=0.7,*;q=0.7', datastructures.CharsetAccept) assert a['iso-8859-1'] == a['iso8859-1'] assert a['iso-8859-1'] == 1 assert a['UTF8'] == 0.7 assert a['ebcdic'] == 0.7
def test_choose_default_if_no_match_and_wildcard_accepted(): # choose default if no match and client accepts wildcard server_mimetypes = ["application/json", "application/marcxml+xml"] client_mimetypes = parse_accept_header("text/plain,*/*", MIMEAccept) assert "application/json" == ContentNegotiator.match_by_accept( server_mimetypes, client_mimetypes, default="application/json")
def test_accept(self): a = http.parse_accept_header("en-us,ru;q=0.5") assert list(itervalues(a)) == ["en-us", "ru"] assert a.best == "en-us" assert a.find("ru") == 1 pytest.raises(ValueError, a.index, "de") assert a.to_header() == "en-us,ru;q=0.5"
def test_charset_accept(self): a = http.parse_accept_header('ISO-8859-1,utf-8;q=0.7,*;q=0.7', datastructures.CharsetAccept) self.assert_equal(a['iso-8859-1'], a['iso8859-1']) self.assert_equal(a['iso-8859-1'], 1) self.assert_equal(a['UTF8'], 0.7) self.assert_equal(a['ebcdic'], 0.7)
def get_lang(self, environ): """ Detects user's preferred language (either via the 'getlang' plugin or from HTTP_ACCEPT_LANGUAGE env value) arguments: environ -- WSGI environment variable returns: underscore-separated ISO 639 language code and ISO 3166 country code """ cookies = KonTextCookie(environ.get('HTTP_COOKIE', '')) if plugins.runtime.GETLANG.exists: lgs_string = plugins.runtime.GETLANG.instance.fetch_current_language(cookies) else: lang_cookie = cookies.get('kontext_ui_lang') if not lang_cookie: lgs_string = parse_accept_header(environ.get('HTTP_ACCEPT_LANGUAGE')).best else: lgs_string = lang_cookie.value if len(lgs_string) == 2: # in case we obtain just an ISO 639 language code lgs_string = self._installed_langs.get(lgs_string) else: lgs_string = lgs_string.replace('-', '_') if lgs_string is None: lgs_string = 'en_US' return lgs_string
def make_response(self, request, data, *args, **kwargs): """Looks up the representation transformer for the requested media type, invoking the transformer to create a response object. This defaults to default_mediatype if no transformer is found for the requested mediatype. If default_mediatype is None, a 406 Not Acceptable response will be sent as per RFC 2616 section 14.1 :param data: Python object containing response data to be transformed """ default_mediatype = kwargs.pop("fallback_mediatype", None) or self.default_mediatype mediatype = parse_accept_header(request.headers.get( 'accept', None)).best_match(self.representations, default=default_mediatype) if not mediatype: raise NotAcceptable("Not Acceptable") if mediatype in self.representations: resp = self.representations[mediatype](request.app, data, *args, **kwargs) resp.headers["Content-type"] = mediatype return resp elif mediatype == "text/plain": resp = text(str(data), *args, **kwargs) return resp else: raise ServerError(None)
def test_accept(self): a = http.parse_accept_header('en-us,ru;q=0.5') self.assert_equal(list(itervalues(a)), ['en-us', 'ru']) self.assert_equal(a.best, 'en-us') self.assert_equal(a.find('ru'), 1) self.assert_raises(ValueError, a.index, 'de') self.assert_equal(a.to_header(), 'en-us,ru;q=0.5')
def test_best_match_works(self): # was a bug in 0.6 rv = http.parse_accept_header('foo=,application/xml,application/xhtml+xml,' 'text/html;q=0.9,text/plain;q=0.8,' 'image/png,*/*;q=0.5', datastructures.MIMEAccept).best_match(['foo/bar']) assert rv == 'foo/bar'
def test_accept(self): a = http.parse_accept_header("en-us,ru;q=0.5") self.assert_equal(list(itervalues(a)), ["en-us", "ru"]) self.assert_equal(a.best, "en-us") self.assert_equal(a.find("ru"), 1) self.assert_raises(ValueError, a.index, "de") self.assert_equal(a.to_header(), "en-us,ru;q=0.5")
def test_charset_accept(self): a = http.parse_accept_header("ISO-8859-1,utf-8;q=0.7,*;q=0.7", datastructures.CharsetAccept) assert a["iso-8859-1"] == a["iso8859-1"] assert a["iso-8859-1"] == 1 assert a["UTF8"] == 0.7 assert a["ebcdic"] == 0.7
def test_accept(self): a = http.parse_accept_header("en-us,ru;q=0.5") assert list(a.values()) == ["en-us", "ru"] assert a.best == "en-us" assert a.find("ru") == 1 pytest.raises(ValueError, a.index, "de") assert a.to_header() == "en-us,ru;q=0.5"
def browse(): uri = request.args.get('uri', None) if uri is None: return document() else: if 'Accept' in request.headers: mimetype = parse_accept_header(request.headers['Accept']).best else: log.debug("No accept header, using 'text/html'") mimetype = 'text/html' try: if mimetype in ['text/html', 'application/xhtml_xml', '*/*']: results = visit(uri, format='html', external=True) local_results = localize_results(results) return render_template('resource.html', local_resource='http://bla', resource=uri, results=local_results, local=LOCAL_STORE) elif mimetype in ['application/json']: response = make_response(visit(uri, format='jsonld', external=True), 200) response.headers['Content-Type'] = 'application/json' return response elif mimetype in ['application/rdf+xml', 'application/xml']: response = make_response(visit(uri, format='rdfxml', external=True), 200) response.headers['Content-Type'] = 'application/rdf+xml' return response elif mimetype in ['application/x-turtle', 'text/turtle']: response = make_response(visit(uri, format='turtle', external=True), 200) response.headers['Content-Type'] = 'text/turtle' return response except Exception as e: log.error(e) log.error(traceback.format_exc()) return traceback.format_exc()
def set_mimetype(self, environ): client_accepts = dict(parse_accept_header(environ['HTTP_ACCEPT'])) if 'application/json' in client_accepts: self.mime_type = 'application/json' else: #fallback if the client has not told us that it is calling the API self.mime_type = 'text/html'
def test_best_match_works(self): # was a bug in 0.6 rv = http.parse_accept_header('foo=,application/xml,application/xhtml+xml,' 'text/html;q=0.9,text/plain;q=0.8,' 'image/png,*/*;q=0.5', datastructures.MIMEAccept).best_match(['foo/bar']) self.assert_equal(rv, 'foo/bar')
def test_best_match_works(self): # was a bug in 0.6 rv = http.parse_accept_header( "foo=,application/xml,application/xhtml+xml," "text/html;q=0.9,text/plain;q=0.8," "image/png,*/*;q=0.5", datastructures.MIMEAccept, ).best_match(["foo/bar"]) self.assert_equal(rv, "foo/bar")
def test_charset_accept(self): a = http.parse_accept_header( "ISO-8859-1,utf-8;q=0.7,*;q=0.7", datastructures.CharsetAccept ) assert a["iso-8859-1"] == a["iso8859-1"] assert a["iso-8859-1"] == 1 assert a["UTF8"] == 0.7 assert a["ebcdic"] == 0.7
def test_accept_mime_specificity(self): a = http.parse_accept_header( "text/*, text/html, text/html;level=1, */*", datastructures.MIMEAccept) assert a.best_match(["text/html; version=1", "text/html"]) == "text/html" assert a.best_match(["text/html", "text/html; level=1"]) == "text/html; level=1"
def test_language_accept(self): a = http.parse_accept_header('de-AT,de;q=0.8,en;q=0.5', datastructures.LanguageAccept) assert a.best == 'de-AT' assert 'de_AT' in a assert 'en' in a assert a['de-at'] == 1 assert a['en'] == 0.5
def __getitem__(self, accept: Optional[str]): """Find the best serializer for the given Accept header.""" if not accept: return self.default media_type = parse_accept_header(accept).best_match(self.keys()) if not media_type: return self.default return super().__getitem__(media_type)
def test_language_accept(self): a = http.parse_accept_header('de-AT,de;q=0.8,en;q=0.5', datastructures.LanguageAccept) self.assert_equal(a.best, 'de-AT') self.assert_true('de_AT' in a) self.assert_true('en' in a) self.assert_equal(a['de-at'], 1) self.assert_equal(a['en'], 0.5)
def test_language_accept(self): a = http.parse_accept_header('de-AT,de;q=0.8,en;q=0.5', datastructures.LanguageAccept) self.assert_equal(a.best, 'de-AT') self.assert_('de_AT' in a) self.assert_('en' in a) self.assert_equal(a['de-at'], 1) self.assert_equal(a['en'], 0.5)
def commits(branch_or_ref): """ Lists all commits of a given git branch. Returns: HTTP Response 200: a list of commits HTTP Response 403: unknown branch or ref HTTP Response 406: Unsupported Mimetype requested """ quit = current_app.config['quit'] if not branch_or_ref: branch_or_ref = quit.getDefaultBranch() try: current_app.logger.debug(branch_or_ref) if not quit.repository.is_empty: results = quit.repository.revisions(branch_or_ref, order=pygit2.GIT_SORT_TIME) else: results = [] if 'Accept' in request.headers: mimetype = parse_accept_header(request.headers['Accept']).best else: mimetype = '*/*' if mimetype in ['text/html', 'application/xhtml_xml', '*/*']: data = generate_graph_data(CommitGraph.gets(results)) response = make_response(render_template('commits.html', results=results, data=data, current_ref=branch_or_ref, isLoggedIn=isLoggedIn, githubEnabled=githubEnabled)) response.headers['Content-Type'] = 'text/html' return response elif mimetype in ['application/json', 'application/sparql-results+json']: res = [] for revision in results: res.append({"id": revision.id, "author_name": revision.author.name, "author_email": revision.author.email, "author_time": str(git_timestamp(revision.author.time, revision.author.offset)), "committer_name": revision.committer.name, "committer_email": revision.committer.email, "committer_time": str(git_timestamp(revision.committer.time, revision.committer.offset)), "committer_offset": revision.committer.offset, "message": revision.message, "parrents": [parent.id for parent in revision.parents]}) response = make_response(json.dumps(res), 200) response.headers['Content-Type'] = 'application/json' return response else: return "<pre>Unsupported Mimetype: {}</pre>".format(mimetype), 406 except Exception as e: current_app.logger.error(e) current_app.logger.error(traceback.format_exc()) return "<pre>" + traceback.format_exc() + "</pre>", 403
def test_favour_specificity_over_quality(): # favour more specific but lower quality mimetype over # less specific (e.g. wildcard) but higher quality server_mimetypes = ["application/json", "application/marcxml+xml"] client_mimetypes = parse_accept_header( "text/plain, application/json;q=0.5, */*", MIMEAccept) assert "application/json" == ContentNegotiator.match_by_accept( server_mimetypes, client_mimetypes)
def test_choose_none_if_no_match_and_wildcard_not_accepted(): server_mimetypes = ["application/json", "application/marcxml+xml"] client_mimetypes = parse_accept_header("text/plain", MIMEAccept) mime_type = ContentNegotiator.match_by_accept(server_mimetypes, client_mimetypes, default="application/json") assert mime_type is None
def accept_encodings(self): """ List of encodings this client accepts. Encodings in a HTTP term are compression encodings such as gzip. For charsets have a look at `accept_charset`. """ if not 'HTTP_ACCEPT_ENCODING' in self.environ: return Accept(None) return parse_accept_header(self.environ['HTTP_ACCEPT_ENCODING'])
def encode(self, val): vals = http.parse_accept_header(val) _v = '' for val in vals: if val[1] > 0.0: if len(_v) > 0: _v += '\x00' _v += val[0] return _v
def enc_accept(val, type='req'): vals = http.parse_accept_header(val) _v = '' for val in vals: if val[1] > 0.0: if len(_v) > 0: _v += '\x00' _v += val[0] return _v
def _convert_adapter(self, adapter, environ=None): mimetypes = parse_accept_header( environ.get('HTTP_ACCEPT'), MIMEAccept ) if environ else None return MapAdapter(adapter.map, adapter.server_name, adapter.script_name, adapter.subdomain, adapter.url_scheme, adapter.path_info, adapter.default_method, adapter.query_args, mimetypes)
def test_favour_query_mimetype_over_header_mimetype(): server_mimetypes = ["application/json", "application/marcxml+xml"] client_mimetypes = parse_accept_header("application/json", MIMEAccept) formats_map = { "json": "application/json", "marcxml": "application/marcxml+xml", } fmt = "marcxml" assert "application/marcxml+xml" == ContentNegotiator.match( server_mimetypes, client_mimetypes, formats_map, fmt) client_mimetypes = parse_accept_header("application/marcxml+xml", MIMEAccept) fmt = "json" assert "application/json" == ContentNegotiator.match( server_mimetypes, client_mimetypes, formats_map, fmt)
def test_best_match_works(self): # was a bug in 0.6 rv = http.parse_accept_header( "foo=,application/xml,application/xhtml+xml," "text/html;q=0.9,text/plain;q=0.8," "image/png,*/*;q=0.5", datastructures.MIMEAccept, ).best_match(["foo/bar"]) assert rv == "foo/bar"
def test_accept_matches(self): a = http.parse_accept_header( "text/xml,application/xml,application/xhtml+xml," "text/html;q=0.9,text/plain;q=0.8," "image/png", datastructures.MIMEAccept, ) self.assert_equal(a.best_match(["text/html", "application/xhtml+xml"]), "application/xhtml+xml") self.assert_equal(a.best_match(["text/html"]), "text/html") self.assert_true(a.best_match(["foo/bar"]) is None) self.assert_equal(a.best_match(["foo/bar", "bar/foo"], default="foo/bar"), "foo/bar") self.assert_equal(a.best_match(["application/xml", "text/xml"]), "application/xml")
def test_accept_matches(self): a = http.parse_accept_header('text/xml,application/xml,application/xhtml+xml,' 'text/html;q=0.9,text/plain;q=0.8,' 'image/png', datastructures.MIMEAccept) assert a.best_match(['text/html', 'application/xhtml+xml']) == \ 'application/xhtml+xml' assert a.best_match(['text/html']) == 'text/html' assert a.best_match(['foo/bar']) is None assert a.best_match(['foo/bar', 'bar/foo'], default='foo/bar') == 'foo/bar' assert a.best_match(['application/xml', 'text/xml']) == 'application/xml'
def test_mime_accept(self): a = http.parse_accept_header('text/xml,application/xml,' 'application/xhtml+xml,' 'text/html;q=0.9,text/plain;q=0.8,' 'image/png,*/*;q=0.5', datastructures.MIMEAccept) self.assert_raises(ValueError, lambda: a['missing']) self.assert_equal(a['image/png'], 1) self.assert_equal(a['text/plain'], 0.8) self.assert_equal(a['foo/bar'], 0.5) self.assert_equal(a[a.find('foo/bar')], ('*/*', 0.5))
def test_mime_accept(self): a = http.parse_accept_header('text/xml,application/xml,' 'application/xhtml+xml,' 'application/foo;quiet=no; bar=baz;q=0.6,' 'text/html;q=0.9,text/plain;q=0.8,' 'image/png,*/*;q=0.5', datastructures.MIMEAccept) pytest.raises(ValueError, lambda: a['missing']) assert a['image/png'] == 1 assert a['text/plain'] == 0.8 assert a['foo/bar'] == 0.5 assert a['application/foo;quiet=no; bar=baz'] == 0.6 assert a[a.find('foo/bar')] == ('*/*', 0.5)
def test_mime_accept(self): a = http.parse_accept_header( "text/xml,application/xml," "application/xhtml+xml," "text/html;q=0.9,text/plain;q=0.8," "image/png,*/*;q=0.5", datastructures.MIMEAccept, ) self.assert_raises(ValueError, lambda: a["missing"]) self.assert_equal(a["image/png"], 1) self.assert_equal(a["text/plain"], 0.8) self.assert_equal(a["foo/bar"], 0.5) self.assert_equal(a[a.find("foo/bar")], ("*/*", 0.5))
def negotiate(graph, html_template, request): ''' Negotiate the response to return @param graph: the RDF graph containing the data to render @param html_template: the template to use for HTML responses @param headers: the request headers @param suffix: the suffix used for the query URL ''' # Serve HTML by default mimetype = 'text/html' logger.debug('{}'.format(len(graph))) # Use the accept header if it was provided if 'Accept' in request.headers: mimetype = parse_accept_header(request.headers['Accept']).best logger.debug("Asked for {} in content neg".format(mimetype)) # If a known suffix was asked use that instead of the accept header ext = os.path.splitext(request.base_url)[1] if ext in SUFFIX_TO_MIME: mimetype = SUFFIX_TO_MIME[ext] logger.debug("Asked for {} using {}".format(mimetype, ext)) logger.debug("Will serve {}".format(mimetype)) # Serve HTML if mimetype in ['text/html','application/xhtml_xml','*/*']: # Get data usable by the template engine data = graph_to_python(request, graph) # Render the requested template return render_template(html_template, data=data) # Serve Turtle elif mimetype in ['text/turtle', 'application/x-turtle']: logger.debug(graph.serialize(format='turtle')) response = make_response(graph.serialize(format='turtle')) response.headers['Content-Type'] = mimetype return response # Serve N-triples elif mimetype in ['application/n-triples']: response = make_response(graph.serialize(format='nt')) response.headers['Content-Type'] = mimetype return response # Serve RDF+XML :-( elif mimetype in ['application/rdf+xml']: response = make_response(graph.serialize(format='pretty-xml')) response.headers['Content-Type'] = mimetype return response