def test_list_with_localization_tags(redirect_doc, doc_hierarchy, client, locale): top_doc = doc_hierarchy.top bottom_doc = doc_hierarchy.bottom de_doc = top_doc.translated_to('de') for doc in (top_doc, bottom_doc, de_doc, redirect_doc): doc.current_revision.localization_tags.set('inprogress') if locale == 'en-US': exp_docs = (top_doc, bottom_doc) elif locale == 'de': exp_docs = (de_doc,) else: # fr exp_docs = () url = reverse('wiki.list_with_localization_tags', locale=locale) resp = client.get(url) dom = pq(resp.content) assert resp.status_code == 200 assert_shared_cache_header(resp) assert 'text/html' in resp['Content-Type'] assert len(dom.find('.document-list li')) == len(exp_docs) selector = 'ul.document-list li a[href="/{}/docs/{}"]' for doc in exp_docs: assert len(dom.find(selector.format(doc.locale, doc.slug))) == 1
def test_tag_list(root_doc, trans_doc, client, locale_case, tag_case, tag): """ Verify the tagged documents list view. Tags should be case insensitive (https://bugzil.la/976071). """ tag_query = getattr(tag, tag_case)() root_doc.tags.set('foo', 'bar') trans_doc.tags.set('foo', 'bar') exp_doc = root_doc if (locale_case == 'root') else trans_doc url = reverse('wiki.tag', locale=exp_doc.locale, kwargs={'tag': tag_query}) resp = client.get(url) assert resp.status_code == 200 assert_shared_cache_header(resp) dom = pq(resp.content) selector = 'ul.document-list li a[href="/{}/docs/{}"]' assert len(dom('#document-list ul.document-list li')) == 1 assert len(dom.find(selector.format(exp_doc.locale, exp_doc.slug))) == 1 # Changing the tags to something other than what we're # searching for should take the results to zero. root_doc.tags.set('foobar') trans_doc.tags.set('foobar') resp = client.get(url) assert resp.status_code == 200 dom = pq(resp.content) assert len(dom('#document-list ul.document-list li')) == 0 assert root_doc.slug not in resp.content.decode('utf-8') assert trans_doc.slug not in resp.content.decode('utf-8')
def test_search_plugin(db, client, locale): response = client.get(reverse('search.plugin', locale=locale)) assert response.status_code == 200 assert_shared_cache_header(response) assert response['Content-Type'] == 'application/opensearchdescription+xml' assert 'search/plugin.html' in [t.name for t in response.templates] assert '/{}/search'.format(locale) in response.content.decode('utf-8')
def test_api_put_authkey_tracking(client, authkey): """ Revisions modified by PUT API should track the auth key used """ url = '/en-US/docs/foobar$api' data = dict( title="Foobar, The Document", content='<p>Hello, I am foobar.</p>', ) content_type, encoded_data = get_content('json', data) response = client.put( url, data=encoded_data, content_type=content_type, HTTP_AUTHORIZATION=authkey.header ) assert response.status_code == 201 assert_shared_cache_header(response) last_log = authkey.key.history.order_by('-pk').all()[0] assert last_log.action == 'created' data['title'] = 'Foobar, The New Document' content_type, encoded_data = get_content('json', data) response = client.put( url, data=encoded_data, content_type=content_type, HTTP_AUTHORIZATION=authkey.header ) assert response.status_code == 205 assert_shared_cache_header(response) last_log = authkey.key.history.order_by('-pk').all()[0] assert last_log.action == 'updated'
def test_recent_revisions_diff_includes_tags(create_revision, client): """The revision feed includes document tags and editorial flags.""" new_revision = create_revision.document.revisions.create( title=create_revision.title, content=create_revision.content, creator=create_revision.creator, tags='"NewTag"' ) new_revision.review_tags.add('editorial') feed_url = reverse('wiki.feeds.recent_revisions', kwargs={'format': 'rss'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) feed = pq(resp.content) items = feed.find('item') assert len(items) == 2 desc1, desc2 = [pq(item).find('description').text() for item in items] assert 'Edited' not in desc1 # Created revision assert 'Edited' in desc2 # New revision assert '<h3>Tag changes:</h3>' in desc2 assert ('<span class="diff_add" style="background-color: #afa; ' 'text-decoration: none;">"NewTag"</span>') in desc2 assert '<h3>Review changes:</h3>' in desc2 assert ('<span class="diff_add" style="background-color: #afa; ' 'text-decoration: none;">editorial</span>') in desc2
def test_recent_documents_optional_items(create_revision, client): """The recent documents JSON feed includes some items if set.""" feed_url = reverse('wiki.feeds.recent_documents', kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 1 assert data[0]['author_avatar'].startswith( 'https://secure.gravatar.com/avatar/') assert 'summary' not in data[0] create_revision.creator.email = '' create_revision.creator.save() resp = client.get(feed_url) assert resp.status_code == 200 data = json.loads(resp.content) assert 'author_avatar' not in data[0] create_revision.summary = 'The summary' create_revision.save() resp = client.get(feed_url) assert resp.status_code == 200 data = json.loads(resp.content) assert data[0]['summary'] == 'The summary'
def test_recent_revisions_all_locales(trans_edit_revision, client, settings): """The ?all_locales parameter returns mixed locales (bug 869301).""" host = 'example.com' settings.ALLOWED_HOSTS.append(host) feed_url = reverse('wiki.feeds.recent_revisions', kwargs={'format': 'rss'}) resp = client.get(feed_url, {'all_locales': ''}, HTTP_HOST=host, HTTP_X_FORWARDED_PROTO='https') assert resp.status_code == 200 assert_shared_cache_header(resp) feed = pq(resp.content) items = feed.find('item') assert len(items) == 4 # Test that links use host domain actual_links = [pq(item).find('link').text() for item in items] actual_domains = [urlparse(link).netloc for link in actual_links] assert actual_domains == [host] * 4 # Test that links are a mix of en-US and translated documents trans_doc = trans_edit_revision.document root_doc = trans_doc.parent expected_paths = [ root_doc.get_absolute_url(), trans_doc.get_absolute_url(), root_doc.get_absolute_url(), trans_doc.get_absolute_url(), ] actual_paths = [urlparse(link).path for link in actual_links] assert expected_paths == actual_paths
def test_feed(self): test_user = self.user_model.objects.get(username='******') attachment = Attachment(title='Test attachment for get_previous') attachment.save() revision = AttachmentRevision( attachment=attachment, mime_type='text/plain', title=attachment.title, description='', comment='Initial revision.', created=datetime.datetime.now() - datetime.timedelta(seconds=30), creator=test_user, is_approved=True) revision.file.save('get_previous_test_file.txt', ContentFile('I am a test file for get_previous')) revision.save() revision.make_current() feed_url = reverse('attachments.feeds.recent_files', kwargs={'format': 'json'}) response = self.client.get(feed_url) assert_shared_cache_header(response) data = json.loads(response.content) assert len(data) == 1 assert data[0]['title'] == revision.title assert data[0]['link'] == revision.attachment.get_file_url() assert data[0]['author_name'] == test_user.username
def test_revisions_all_params_as_anon_user_is_forbidden(root_doc, client): """Anonymous users are forbidden to request all revisions.""" url = reverse('wiki.document_revisions', args=(root_doc.slug,)) all_url = urlparams(url, limit='all') resp = client.get(all_url) assert resp.status_code == 403 assert_shared_cache_header(resp)
def test_recent_documents_atom_feed(root_doc, client): """The recent documents feed can be formatted as an Atom feed.""" feed_url = reverse('wiki.feeds.recent_documents', kwargs={'format': 'atom'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) assert resp['Content-Type'] == 'application/atom+xml; charset=utf-8'
def test_disallowed_methods(db, client, http_method, endpoint): """HTTP methods other than GET & HEAD are not allowed.""" kwargs = None if endpoint in ('tag', 'list_review_tag', 'list_with_localization_tag'): kwargs = dict(tag='tag') url = reverse('wiki.{}'.format(endpoint), kwargs=kwargs) resp = getattr(client, http_method)(url) assert resp.status_code == 405 assert_shared_cache_header(resp)
def test_l10n_updates_no_updates(trans_doc, client): """When translations are up-to-date, l10n-updates feed is empty.""" feed_url = reverse('wiki.feeds.l10n_updates', locale=trans_doc.locale, kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 0 # No entries, translation is up to date
def test_disallowed_methods(client, http_method, endpoint): """HTTP methods other than GET & HEAD are not allowed.""" kwargs = None if endpoint != 'json': kwargs = dict(document_path='Web/CSS') url = reverse('wiki.{}'.format(endpoint), kwargs=kwargs) response = getattr(client, http_method)(url) assert response.status_code == 405 assert_shared_cache_header(response)
def test_tags(root_doc, client): """Test list of all tags.""" root_doc.tags.set('foobar', 'blast') url = reverse('wiki.list_tags') resp = client.get(url) assert resp.status_code == 200 assert b'foobar' in resp.content assert b'blast' in resp.content assert 'wiki/list/tags.html' in [t.name for t in resp.templates] assert_shared_cache_header(resp)
def test_l10n_updates_parent_updated(trans_doc, edit_revision, client): """Out-of-date translations appear in the l10n-updates feed.""" feed_url = reverse('wiki.feeds.l10n_updates', locale=trans_doc.locale, kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 1 assert trans_doc.get_absolute_url() in data[0]['link']
def test_robots_not_allowed(client): """By default, robots.txt shows that robots are not allowed.""" response = client.get(reverse('robots_txt')) assert response.status_code == 200 assert_shared_cache_header(response) assert response['Content-Type'] == 'text/plain' content = response.content assert b'Sitemap: ' not in content assert b'Disallow: /\n' in content assert b'Disallow: /admin/\n' not in content
def test_robots_allowed_main_attachment_host(client, settings): """On the main attachment host, allow robots without restrictions.""" host = 'samples.mdn.moz.works' settings.ALLOW_ROBOTS_DOMAINS = [host] settings.ALLOWED_HOSTS.append(host) response = client.get(reverse('robots_txt'), HTTP_HOST=host) assert response.status_code == 200 assert_shared_cache_header(response) assert response['Content-Type'] == 'text/plain' content = response.content assert content == b''
def test_list_no_redirects(redirect_doc, doc_hierarchy, client): url = reverse('wiki.all_documents') resp = client.get(url) assert resp.status_code == 200 assert_shared_cache_header(resp) assert 'text/html' in resp['Content-Type'] # There should be 4 documents in the 'en-US' locale from # doc_hierarchy, plus the root_doc (which is pulled-in by # the redirect_doc), but the redirect_doc should not be one of them. assert len(pq(resp.content).find('.document-list li')) == 5 assert redirect_doc.slug.encode('utf-8') not in resp.content
def test_recent_documents_feed_filter_by_locale(locale, trans_edit_revision, client): """The recent documents feed can be filtered by locale.""" feed_url = reverse('wiki.feeds.recent_documents', locale=locale, kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 1 path = urlparse(data[0]['link']).path assert path.startswith('/' + locale + '/')
def test_robots_allowed_main_website(client, settings): """On the main website, allow robots with restrictions.""" host = 'main.mdn.moz.works' settings.ALLOW_ROBOTS_WEB_DOMAINS = [host] response = client.get(reverse('robots_txt'), HTTP_HOST=host) assert response.status_code == 200 assert_shared_cache_header(response) assert response['Content-Type'] == 'text/plain' content = response.content assert 'Sitemap: ' in content assert 'Disallow: /\n' not in content assert 'Disallow: /admin/\n' in content
def test_disallowed_methods(db, client, http_method, endpoint): """HTTP methods other than GET & HEAD are not allowed.""" url = reverse('dashboards.{}'.format(endpoint)) response = getattr(client, http_method)(url) assert response.status_code == 405 if endpoint in ('spam', 'revisions'): assert_no_cache_header(response) else: assert_shared_cache_header(response) if endpoint in ('user_lookup', 'topic_lookup'): assert 'Vary' in response assert 'X-Requested-With' in response['Vary']
def test_fallback_to_translation(root_doc, trans_doc, client, params_case): """ If a slug isn't found in the requested locale but is in the default locale and if there is a translation of that default-locale document to the requested locale, the translation should be served. """ params = '?x=y&x=z' if (params_case == 'with-params') else '' url = reverse('wiki.document', args=[root_doc.slug], locale='fr') response = client.get(url + params) assert response.status_code == 302 assert_shared_cache_header(response) assert response['Location'].endswith(trans_doc.get_absolute_url() + params)
def test_fallback_to_translation(root_doc, trans_doc, client, params_case): """ If a slug isn't found in the requested locale but is in the default locale and if there is a translation of that default-locale document to the requested locale, the translation should be served. """ params = "?x=y&x=z" if (params_case == "with-params") else "" url = reverse("wiki.document", args=[root_doc.slug], locale="fr") response = client.get(url + params, HTTP_HOST=settings.WIKI_HOST) assert response.status_code == 302 assert_shared_cache_header(response) assert response["Location"].endswith(trans_doc.get_absolute_url() + params)
def test_disallowed_methods(client, http_method, endpoint): """HTTP methods other than GET & HEAD are not allowed.""" headers = {} kwargs = None if endpoint != "json": kwargs = dict(document_path="Web/CSS") if endpoint == "toc": headers.update(HTTP_HOST=settings.WIKI_HOST) url = reverse("wiki.{}".format(endpoint), kwargs=kwargs) response = getattr(client, http_method)(url, **headers) assert response.status_code == 405 assert_shared_cache_header(response)
def test_disallowed_methods(db, client, http_method, endpoint): """HTTP methods other than GET & HEAD are not allowed.""" url = reverse("dashboards.{}".format(endpoint)) response = getattr(client, http_method)(url, HTTP_HOST=settings.WIKI_HOST) assert response.status_code == 405 if endpoint in ("spam", "revisions"): assert_no_cache_header(response) else: assert_shared_cache_header(response) if endpoint in ("user_lookup", "topic_lookup"): assert "Vary" in response assert "X-Requested-With" in response["Vary"]
def test_legacy_redirect(client, file_attachment): mindtouch_url = reverse('attachments.mindtouch_file_redirect', args=(), kwargs={ 'file_id': file_attachment['file']['id'], 'filename': file_attachment['file']['name'] }) response = client.get(mindtouch_url) assert response.status_code == 301 assert_shared_cache_header(response) assert response['Location'] == file_attachment['attachment'].get_file_url() assert not response.has_header('Vary')
def test_recent_revisions_feed_filter_by_locale(locale, trans_edit_revision, client): """The recent revisions feed can be filtered by locale.""" feed_url = reverse( "wiki.feeds.recent_revisions", locale=locale, kwargs={"format": "json"} ) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 2 for item in data: path = urlparse(item["link"]).path assert path.startswith("/" + locale + "/")
def test_api_put_unsupported_content_type(client, authkey): """ A PUT to the wiki.document_api endpoint with an unsupported content type should return a 400. """ url = '/en-US/docs/foobar$api' response = client.put(url, data='stuff', content_type='nonsense', HTTP_AUTHORIZATION=authkey.header, HTTP_HOST=settings.WIKI_HOST) assert response.status_code == 400 assert_shared_cache_header(response)
def test_compare_revisions(edit_revision, client, raw): """Comparing two valid revisions of the same document works.""" doc = edit_revision.document first_revision = doc.revisions.first() params = {"from": first_revision.id, "to": edit_revision.id} if raw: params["raw"] = "1" url = urlparams(reverse("wiki.compare_revisions", args=[doc.slug]), **params) response = client.get(url, HTTP_HOST=settings.WIKI_HOST) assert response.status_code == 200 assert response["X-Robots-Tag"] == "noindex" assert_shared_cache_header(response)
def test_robots_allowed_main_website(client, settings): """On the main website, allow robots with restrictions.""" host = "main.mdn.moz.works" settings.ALLOW_ROBOTS_WEB_DOMAINS = [host] settings.ALLOWED_HOSTS.append(host) response = client.get(reverse("robots_txt"), HTTP_HOST=host) assert response.status_code == 200 assert_shared_cache_header(response) assert response["Content-Type"] == "text/plain" content = response.content assert b"Sitemap: " in content assert b"Disallow: /\n" not in content assert b"Disallow: /admin/\n" in content
def test_disallowed_methods(client, db, http_method, endpoint): """ HTTP methods other than GET & HEAD are not allowed. TODO: Remove db fixture when bug 1462475 (disable zone URL root) is fixed. """ kwargs = None if endpoint != 'json': kwargs = dict(document_path='Web/CSS') url = reverse('wiki.{}'.format(endpoint), kwargs=kwargs) response = getattr(client, http_method)(url) assert response.status_code == 405 assert_shared_cache_header(response)
def test_robots_allowed_main_website(client, settings): """On the main website, allow robots with restrictions.""" host = 'main.mdn.moz.works' settings.ALLOW_ROBOTS_WEB_DOMAINS = [host] settings.ALLOWED_HOSTS.append(host) response = client.get(reverse('robots_txt'), HTTP_HOST=host) assert response.status_code == 200 assert_shared_cache_header(response) assert response['Content-Type'] == 'text/plain' content = response.content assert b'Sitemap: ' in content assert b'Disallow: /\n' not in content assert b'Disallow: /admin/\n' in content
def test_recent_documents_feed_filter_by_locale(locale, trans_edit_revision, client): """The recent documents feed can be filtered by locale.""" feed_url = reverse('wiki.feeds.recent_documents', locale=locale, kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 1 path = urlparse(data[0]['link']).path assert path.startswith('/' + locale + '/')
def test_api_put_unsupported_content_type(client, authkey): """ A PUT to the wiki.document_api endpoint with an unsupported content type should return a 400. """ url = '/en-US/docs/foobar$api' response = client.put( url, data='stuff', content_type='nonsense', HTTP_AUTHORIZATION=authkey.header ) assert response.status_code == 400 assert_shared_cache_header(response)
def test_list_review(edit_revision, client): """The documents needing review feed shows documents needing any review.""" feed_url = reverse('wiki.feeds.list_review', kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 0 edit_revision.review_tags.add('editorial') resp = client.get(feed_url) assert resp.status_code == 200 data = json.loads(resp.content) assert len(data) == 1
def test_compare_revisions(edit_revision, client, raw): """Comparing two valid revisions of the same document works.""" doc = edit_revision.document first_revision = doc.revisions.first() params = {'from': first_revision.id, 'to': edit_revision.id} if raw: params['raw'] = '1' url = urlparams(reverse('wiki.compare_revisions', args=[doc.slug]), **params) response = client.get(url) assert response.status_code == 200 assert response['X-Robots-Tag'] == 'noindex' assert_shared_cache_header(response)
def test_legacy_redirect(client, file_attachment): mindtouch_url = reverse( 'attachments.mindtouch_file_redirect', args=(), kwargs={ 'file_id': file_attachment['file']['id'], 'filename': file_attachment['file']['name'] } ) response = client.get(mindtouch_url) assert response.status_code == 301 assert_shared_cache_header(response) assert response['Location'] == file_attachment['attachment'].get_file_url() assert not response.has_header('Vary')
def test_compare_revisions(edit_revision, client, raw): """Comparing two valid revisions of the same document works.""" doc = edit_revision.document first_revision = doc.revisions.first() params = {'from': first_revision.id, 'to': edit_revision.id} if raw: params['raw'] = '1' url = urlparams(reverse('wiki.compare_revisions', args=[doc.slug]), **params) response = client.get(url, HTTP_HOST=settings.WIKI_HOST) assert response.status_code == 200 assert response['X-Robots-Tag'] == 'noindex' assert_shared_cache_header(response)
def test_recent_revisions_limit_0(edit_revision, client): """ For the revisions feed, a limit of 0 gets no results. TODO: the limit should probably be MAX_FEED_ITEMS instead, and applied before the start and finish positions are picked. """ feed_url = reverse('wiki.feeds.recent_revisions', kwargs={'format': 'rss'}) resp = client.get(feed_url, {'limit': 0}) assert resp.status_code == 200 assert_shared_cache_header(resp) feed = pq(resp.content) items = feed.find('item') assert len(items) == 0
def test_legacy_redirect(client, file_attachment): mindtouch_url = reverse( "attachments.mindtouch_file_redirect", args=(), kwargs={ "file_id": file_attachment["file"]["id"], "filename": file_attachment["file"]["name"], }, ) response = client.get(mindtouch_url) assert response.status_code == 301 assert_shared_cache_header(response) assert response["Location"] == file_attachment["attachment"].get_file_url() assert not response.has_header("Vary")
def test_robots_all_allowed_wiki_website(client, settings): """On the wiki website, allow robots with NO restrictions.""" host = "main.mdn.moz.works" wiki_host = "wiki." + host settings.WIKI_HOST = wiki_host settings.ALLOWED_HOSTS = [host, wiki_host] settings.ALLOW_ROBOTS_WEB_DOMAINS = [host, wiki_host] response = client.get(reverse("robots_txt"), HTTP_HOST=wiki_host) assert response.status_code == 200 assert_shared_cache_header(response) assert response["Content-Type"] == "text/plain" content = response.content assert b"Sitemap: " in content assert b"Disallow:\n" in content assert b"Disallow: /" not in content
def test_recent_revisions_limit_0(edit_revision, client): """ For the revisions feed, a limit of 0 gets no results. TODO: the limit should probably be MAX_FEED_ITEMS instead, and applied before the start and finish positions are picked. """ feed_url = reverse('wiki.feeds.recent_revisions', kwargs={'format': 'rss'}) resp = client.get(feed_url, {'limit': 0}) assert resp.status_code == 200 assert_shared_cache_header(resp) feed = pq(resp.content) items = feed.find('item') assert len(items) == 0
def test_autosuggest(client, redirect_doc, doc_hierarchy, locale_case, term): params = {} expected_status_code = 200 if term: params.update(term=term) else: expected_status_code = 400 if locale_case == "non-english-locale": params.update(locale="it") expected_titles = {"Superiore Documento"} elif locale_case == "current-locale": params.update(current_locale="true") # The root document is pulled-in by the redirect_doc fixture. expected_titles = { "Root Document", "Top Document", "Middle-Top Document", "Middle-Bottom Document", "Bottom Document", } elif locale_case == "exclude-current-locale": params.update(exclude_current_locale="true") expected_titles = {"Haut Document", "Superiore Documento"} else: # All locales # The root document is pulled-in by the redirect_doc fixture. expected_titles = { "Root Document", "Top Document", "Haut Document", "Superiore Documento", "Middle-Top Document", "Middle-Bottom Document", "Bottom Document", } url = reverse("wiki.autosuggest_documents") if params: url += "?{}".format(urlencode(params)) with override_switch("application_ACAO", True): response = client.get(url) assert response.status_code == expected_status_code assert_shared_cache_header(response) assert "Access-Control-Allow-Origin" in response assert response["Access-Control-Allow-Origin"] == "*" if expected_status_code == 200: assert response["Content-Type"] == "application/json" data = json.loads(response.content) assert set(item["title"] for item in data) == expected_titles
def test_list_review_tag(edit_revision, client): """The documents needing editorial review feed works.""" feed_url = reverse( "wiki.feeds.list_review_tag", kwargs={"format": "json", "tag": "editorial"} ) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 0 edit_revision.review_tags.add("editorial") resp = client.get(feed_url) assert resp.status_code == 200 data = json.loads(resp.content) assert len(data) == 1
def test_recent_revisions_feed_omits_docs_without_rev(edit_revision, client): """Documents without a current revision are omitted from the feed.""" feed_url = reverse("wiki.feeds.recent_documents", args=(), kwargs={"format": "rss"}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) feed = pq(resp.content) items = feed.find("item") assert len(items) == 1 Document.objects.create(locale="en-US", slug="NoCurrentRev", title="No Current Rev") resp = client.get(feed_url) assert resp.status_code == 200 feed = pq(resp.content) items = feed.find("item") assert len(items) == 1
def test_list_top_level(redirect_doc, root_doc, doc_hierarchy, client, locale): if locale == "en-US": exp_docs = (root_doc, doc_hierarchy.top) else: exp_docs = (doc_hierarchy.top.translated_to(locale), ) url = reverse("wiki.top_level", locale=locale) resp = client.get(url, HTTP_HOST=settings.WIKI_HOST) dom = pq(resp.content) assert resp.status_code == 200 assert_shared_cache_header(resp) assert "text/html" in resp["Content-Type"] assert len(dom.find(".document-list li")) == len(exp_docs) selector = 'ul.document-list li a[href="/{}/docs/{}"]' for doc in exp_docs: assert len(dom.find(selector.format(doc.locale, doc.slug))) == 1
def test_list_top_level(redirect_doc, root_doc, doc_hierarchy, client, locale): if locale == 'en-US': exp_docs = (root_doc, doc_hierarchy.top) else: exp_docs = (doc_hierarchy.top.translated_to(locale),) url = reverse('wiki.top_level', locale=locale) resp = client.get(url) dom = pq(resp.content) assert resp.status_code == 200 assert_shared_cache_header(resp) assert 'text/html' in resp['Content-Type'] assert len(dom.find('.document-list li')) == len(exp_docs) selector = 'ul.document-list li a[href="/{}/docs/{}"]' for doc in exp_docs: assert len(dom.find(selector.format(doc.locale, doc.slug))) == 1
def test_compare_translation(trans_revision, client, raw): """A localized revision can be compared to an English source revision.""" fr_doc = trans_revision.document en_revision = trans_revision.based_on en_doc = en_revision.document assert en_doc != fr_doc params = {'from': en_revision.id, 'to': trans_revision.id} if raw: params['raw'] = '1' url = urlparams(reverse('wiki.compare_revisions', args=[fr_doc.slug], locale=fr_doc.locale), **params) response = client.get(url) assert response.status_code == 200 assert response['X-Robots-Tag'] == 'noindex' assert_shared_cache_header(response)
def test_list_top_level(redirect_doc, root_doc, doc_hierarchy, client, locale): if locale == 'en-US': exp_docs = (root_doc, doc_hierarchy.top) else: exp_docs = (doc_hierarchy.top.translated_to(locale),) url = reverse('wiki.top_level', locale=locale) resp = client.get(url) dom = pq(resp.content) assert resp.status_code == 200 assert_shared_cache_header(resp) assert 'text/html' in resp['Content-Type'] assert len(dom.find('.document-list li')) == len(exp_docs) selector = 'ul.document-list li a[href="/{}/docs/{}"]' for doc in exp_docs: assert len(dom.find(selector.format(doc.locale, doc.slug))) == 1
def test_compare_translation(trans_revision, client, raw): """A localized revision can be compared to an English source revision.""" fr_doc = trans_revision.document en_revision = trans_revision.based_on en_doc = en_revision.document assert en_doc != fr_doc params = {'from': en_revision.id, 'to': trans_revision.id} if raw: params['raw'] = '1' url = urlparams(reverse('wiki.compare_revisions', args=[fr_doc.slug], locale=fr_doc.locale), **params) response = client.get(url) assert response.status_code == 200 assert response['X-Robots-Tag'] == 'noindex' assert_shared_cache_header(response)
def test_json(doc_hierarchy_with_zones, client, params_case): """Test the wiki.json endpoint.""" Switch.objects.create(name='application_ACAO', active=True) top_doc = doc_hierarchy_with_zones.top bottom_doc = doc_hierarchy_with_zones.bottom expected_tags = sorted(['foo', 'bar', 'baz']) expected_review_tags = sorted(['tech', 'editorial']) for doc in (top_doc, bottom_doc): doc.tags.set(*expected_tags) doc.current_revision.review_tags.set(*expected_review_tags) params = None expected_slug = None expected_status_code = 200 if params_case == 'nothing': expected_status_code = 400 elif params_case == 'title-only': expected_slug = top_doc.slug params = dict(title=top_doc.title) elif params_case == 'slug-only': expected_slug = bottom_doc.slug params = dict(slug=bottom_doc.slug) elif params_case == 'title-and-slug': expected_slug = top_doc.slug params = dict(title=top_doc.title, slug=bottom_doc.slug) else: # missing title expected_status_code = 404 params = dict(title='nonexistent document title') url = reverse('wiki.json', locale='en-US') response = client.get(url, params) assert response.status_code == expected_status_code if response.status_code == 404: assert_no_cache_header(response) else: assert_shared_cache_header(response) assert response['Access-Control-Allow-Origin'] == '*' if response.status_code == 200: data = json.loads(response.content) assert data['slug'] == expected_slug assert sorted(data['tags']) == expected_tags assert sorted(data['review_tags']) == expected_review_tags
def test_recent_documents_optional_items(create_revision, client, settings): """The recent documents JSON feed includes some items if set.""" feed_url = reverse("wiki.feeds.recent_documents", kwargs={"format": "json"}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 1 assert data[0]["author_avatar"] == settings.DEFAULT_AVATAR assert "summary" not in data[0] create_revision.summary = "The summary" create_revision.save() resp = client.get(feed_url) assert resp.status_code == 200 data = json.loads(resp.content) assert data[0]["summary"] == "The summary"
def test_json(doc_hierarchy, client, params_case): """Test the wiki.json endpoint.""" top_doc = doc_hierarchy.top bottom_doc = doc_hierarchy.bottom expected_tags = sorted(["foo", "bar", "baz"]) expected_review_tags = sorted(["tech", "editorial"]) for doc in (top_doc, bottom_doc): doc.tags.set(*expected_tags) doc.current_revision.review_tags.set(*expected_review_tags) params = None expected_slug = None expected_status_code = 200 if params_case == "nothing": expected_status_code = 400 elif params_case == "title-only": expected_slug = top_doc.slug params = dict(title=top_doc.title) elif params_case == "slug-only": expected_slug = bottom_doc.slug params = dict(slug=bottom_doc.slug) elif params_case == "title-and-slug": expected_slug = top_doc.slug params = dict(title=top_doc.title, slug=bottom_doc.slug) else: # missing title expected_status_code = 404 params = dict(title="nonexistent document title") url = reverse("wiki.json") with override_switch("application_ACAO", True): response = client.get(url, params) assert response.status_code == expected_status_code if response.status_code == 404: assert_no_cache_header(response) else: assert_shared_cache_header(response) assert response["Access-Control-Allow-Origin"] == "*" if response.status_code == 200: data = json.loads(response.content) assert data["slug"] == expected_slug assert sorted(data["tags"]) == expected_tags assert sorted(data["review_tags"]) == expected_review_tags
def test_json(doc_hierarchy, client, params_case): """Test the wiki.json endpoint.""" top_doc = doc_hierarchy.top bottom_doc = doc_hierarchy.bottom expected_tags = sorted(['foo', 'bar', 'baz']) expected_review_tags = sorted(['tech', 'editorial']) for doc in (top_doc, bottom_doc): doc.tags.set(*expected_tags) doc.current_revision.review_tags.set(*expected_review_tags) params = None expected_slug = None expected_status_code = 200 if params_case == 'nothing': expected_status_code = 400 elif params_case == 'title-only': expected_slug = top_doc.slug params = dict(title=top_doc.title) elif params_case == 'slug-only': expected_slug = bottom_doc.slug params = dict(slug=bottom_doc.slug) elif params_case == 'title-and-slug': expected_slug = top_doc.slug params = dict(title=top_doc.title, slug=bottom_doc.slug) else: # missing title expected_status_code = 404 params = dict(title='nonexistent document title') url = reverse('wiki.json') with override_switch('application_ACAO', True): response = client.get(url, params) assert response.status_code == expected_status_code if response.status_code == 404: assert_no_cache_header(response) else: assert_shared_cache_header(response) assert response['Access-Control-Allow-Origin'] == '*' if response.status_code == 200: data = json.loads(response.content) assert data['slug'] == expected_slug assert sorted(data['tags']) == expected_tags assert sorted(data['review_tags']) == expected_review_tags
def test_compare_translation(trans_revision, client, raw): """A localized revision can be compared to an English source revision.""" fr_doc = trans_revision.document en_revision = trans_revision.based_on en_doc = en_revision.document assert en_doc != fr_doc params = {"from": en_revision.id, "to": trans_revision.id} if raw: params["raw"] = "1" url = urlparams( reverse("wiki.compare_revisions", args=[fr_doc.slug], locale=fr_doc.locale), **params, ) response = client.get(url, HTTP_HOST=settings.WIKI_HOST) assert response.status_code == 200 assert response["X-Robots-Tag"] == "noindex" assert_shared_cache_header(response)
def test_macros(mock_usage, client, db): """The normal macro page is a three-column table.""" mock_usage.return_value = { 'A11yRoleQuicklinks': { 'github_subpath': 'A11yRoleQuicklinks.ejs', 'count': 100, 'en_count': 50, } } response = client.get(reverse('dashboards.macros')) assert response.status_code == 200 assert 'Cookie' in response['Vary'] assert_shared_cache_header(response) assert "Found 1 active macro." in response.content.decode('utf8') page = pq(response.content) assert len(page("table.macros-table")) == 1 assert len(page("th.stat-header")) == 2
def test_recent_revisions_feed_omits_docs_without_rev(edit_revision, client): """Documents without a current revision are omitted from the feed.""" feed_url = reverse('wiki.feeds.recent_documents', args=(), kwargs={'format': 'rss'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) feed = pq(resp.content) items = feed.find('item') assert len(items) == 1 Document.objects.create(locale='en-US', slug='NoCurrentRev', title='No Current Rev') resp = client.get(feed_url) assert resp.status_code == 200 feed = pq(resp.content) items = feed.find('item') assert len(items) == 1
def test_recent_documents_handles_ambiguous_time(root_doc, client): """The recent_documents feed handles times during DST transition.""" ambiguous = datetime(2017, 11, 5, 1, 8, 42) with pytest.raises(AmbiguousTimeError): make_aware(ambiguous) root_doc.current_revision = Revision.objects.create( document=root_doc, creator=root_doc.current_revision.creator, content='<p>Happy Daylight Savings Time!</p>', title=root_doc.title, created=ambiguous) feed_url = reverse('wiki.feeds.recent_documents', kwargs={'format': 'json'}) resp = client.get(feed_url) assert resp.status_code == 200 assert_shared_cache_header(resp) data = json.loads(resp.content) assert len(data) == 1
def test_list_without_parent(redirect_doc, root_doc, doc_hierarchy, client, locale): if locale == 'en-US': exp_docs = (root_doc, doc_hierarchy.top, doc_hierarchy.middle_top, doc_hierarchy.middle_bottom, doc_hierarchy.bottom) else: # All translations have a parent. exp_docs = () url = reverse('wiki.without_parent', locale=locale) resp = client.get(url, HTTP_HOST=settings.WIKI_HOST) dom = pq(resp.content) assert resp.status_code == 200 assert_shared_cache_header(resp) assert 'text/html' in resp['Content-Type'] assert len(dom.find('.document-list li')) == len(exp_docs) selector = 'ul.document-list li a[href="/{}/docs/{}"]' for doc in exp_docs: assert len(dom.find(selector.format(doc.locale, doc.slug))) == 1