def test_active_contributors(self): """Test active contributors API call.""" # 2 en-US revisions by 2 contributors: r1 = revision(creator=user(save=True), save=True) r2 = revision(creator=user(save=True), save=True) # A translation with 2 contributors (translator + reviewer): d = document(parent=r1.document, locale="es", save=True) revision(document=d, reviewed=datetime.now(), reviewer=r1.creator, creator=r2.creator, save=True) # 1 active support forum contributor: # A user with 10 answers u1 = user(save=True) for x in range(10): answer(save=True, creator=u1) # A user with 9 answers u2 = user(save=True) for x in range(9): answer(save=True, creator=u2) # A user with 1 answer u3 = user(save=True) answer(save=True, creator=u3) # An AoA reply (1 contributor): reply(save=True) # Create metric kinds and update metrics for tomorrow (today's # activity shows up tomorrow). self._make_contributor_metric_kinds() update_contributor_metrics(day=date.today() + timedelta(days=1)) r = self._get_api_result("kpi_active_contributors") eq_(r["objects"][0]["en_us"], 2) eq_(r["objects"][0]["non_en_us"], 2) eq_(r["objects"][0]["support_forum"], 1) eq_(r["objects"][0]["aoa"], 1)
def test_l10n_badge(self): """Verify the L10n Badge is awarded properly.""" # Create the user and badge. year = date.today().year u = profile().user b = badge( slug=WIKI_BADGES['l10n-badge']['slug'].format(year=year), title=WIKI_BADGES['l10n-badge']['title'].format(year=year), description=WIKI_BADGES['l10n-badge']['description'].format( year=year), save=True) # Create 9 approved es revisions. d = document(locale='es', save=True) for i in range(9): revision(creator=u, document=d, is_approved=True, save=True) # User should NOT have the badge yet assert not b.is_awarded_to(u) # Create 1 more approved es revision. revision(creator=u, document=d, is_approved=True, save=True) # User should have the badge now assert b.is_awarded_to(u)
def test_default_only_shows_wiki_and_questions(self): """Tests that the default search doesn't show forums This verifies that we're only showing documents of the type that should be shown and that the filters on model are working correctly. Bug #767394 """ p = product(slug=u'desktop', save=True) ques = question(title=u'audio', save=True) ques.products.add(p) ans = answer(question=ques, content=u'volume', save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u'audio', locale=u'en-US', category=10, save=True) doc.products.add(p) revision(document=doc, is_approved=True, save=True) thread1 = thread(title=u'audio', save=True) post(thread=thread1, save=True) self.refresh() response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 2)
def test_advanced_search_sortby_documents_helpful(self): """Tests advanced search with a sortby_documents by helpful""" r1 = revision(is_approved=True, save=True) r2 = revision(is_approved=True, save=True) helpful_vote(revision=r2, helpful=True, save=True) # Note: We have to wipe and rebuild the index because new # helpful_votes don't update the index data. self.setup_indexes() self.reindex_and_refresh() # r2.document should come first with 1 vote. response = self.client.get(reverse('search'), { 'w': '1', 'a': '1', 'sortby_documents': 'helpful', 'format': 'json'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(r2.document.title, content['results'][0]['title']) # Vote twice on r1, now it should come first. helpful_vote(revision=r1, helpful=True, save=True) helpful_vote(revision=r1, helpful=True, save=True) self.setup_indexes() self.reindex_and_refresh() response = self.client.get(reverse('search'), { 'w': '1', 'a': '1', 'sortby_documents': 'helpful', 'format': 'json'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(r1.document.title, content['results'][0]['title'])
def test_wiki_topics(self): """Search wiki for topics, includes multiple.""" t1 = topic(slug='doesnotexist', save=True) t2 = topic(slug='extant', save=True) t3 = topic(slug='tagged', save=True) doc = document(locale=u'en-US', category=10, save=True) doc.topics.add(t2) revision(document=doc, is_approved=True, save=True) doc = document(locale=u'en-US', category=10, save=True) doc.topics.add(t2) doc.topics.add(t3) revision(document=doc, is_approved=True, save=True) self.refresh() topic_vals = ( (t1.slug, 0), (t2.slug, 2), (t3.slug, 1), ([t2.slug, t3.slug], 1), ) qs = {'a': 1, 'w': 1, 'format': 'json'} for topics, number in topic_vals: qs.update({'topics': topics}) response = self.client.get(reverse('search'), qs) eq_(number, json.loads(response.content)['total'])
def test_suggestions(self, get_current): """Suggestions API is well-formatted.""" get_current.return_value.domain = 'testserver' doc = document(title=u'doc1 audio', locale=u'en-US', is_archived=False, save=True) revision(document=doc, summary=u'audio', content=u'audio', is_approved=True, save=True) ques = question(title=u'q1 audio', save=True) ques.tags.add(u'desktop') ans = answer(question=ques, save=True) answervote(answer=ans, helpful=True, save=True) self.refresh() response = self.client.get(reverse('search.suggestions', locale='en-US'), {'q': 'audio'}) eq_(200, response.status_code) eq_('application/x-suggestions+json', response['content-type']) results = json.loads(response.content) eq_('audio', results[0]) eq_(2, len(results[1])) eq_(0, len(results[2])) eq_(2, len(results[3]))
def test_default_search_for_wiki(self): """This tests whether doing a default search returns wiki document results. Bug #709202. """ doc = document(title=u'audio', locale=u'en-US', category=10, save=True) doc.products.add(product(title=u'firefox', slug=u'desktop', save=True)) revision(document=doc, is_approved=True, save=True) self.refresh() # This is the search that you get when you start on the sumo # homepage and do a search from the box with two differences: # first, we do it in json since it's easier to deal with # testing-wise and second, we search for 'audio' since we have # data for that. response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 1)
def test_unlocalizable(self): """Unlocalizable docs shouldn't show up in the list.""" revision( document=document(is_localizable=False, save=True), is_approved=True, save=True) self.assertRaises(IndexError, self.row)
def test_ready_for_l10n_updates_doc(self): """Approving and marking ready a rev should update the doc's ref.""" # Ready a rev in a new doc: ready_1 = revision(is_approved=True, is_ready_for_localization=True, save=True) eq_(ready_1, ready_1.document.latest_localizable_revision) # Add an unready revision that we can ready later: unready = revision(document=ready_1.document, is_approved=False, is_ready_for_localization=False, save=True) # Ready a rev in a doc that already has a ready revision: ready_2 = revision(document=ready_1.document, is_approved=True, is_ready_for_localization=True, save=True) eq_(ready_2, ready_2.document.latest_localizable_revision) # Ready the older rev. It should not become the latest_localizable. unready.is_ready_for_localization = True unready.is_approved = True unready.save() eq_(ready_2, ready_2.document.latest_localizable_revision)
def test_it_works(self): u1 = profile().user u2 = profile().user r1 = revision(creator=u1, save=True) # noqa r2 = revision(creator=u1, save=True) r3 = revision(creator=u2, save=True) r2.reviewer = u2 r2.save() self.refresh() req = self.factory.get('/') data = self.api.get_data(req) eq_(data['count'], 2) eq_(data['results'][0]['user']['username'], u1.username) eq_(data['results'][0]['rank'], 1) eq_(data['results'][0]['revision_count'], 2) eq_(data['results'][0]['review_count'], 0) eq_(data['results'][0]['last_contribution_date'], r2.created.replace(microsecond=0)) eq_(data['results'][1]['user']['username'], u2.username) eq_(data['results'][1]['rank'], 2) eq_(data['results'][1]['revision_count'], 1) eq_(data['results'][1]['review_count'], 1) eq_(data['results'][1]['last_contribution_date'], r3.created.replace(microsecond=0))
def test_consider_max_significance(self): """Use max significance for determining change significance When determining how significantly an article has changed since translation, use the max significance of the approved revisions, not just that of the latest ready-to-localize one. """ translation = translated_revision(is_approved=True, save=True) revision( document=translation.document.parent, is_approved=True, is_ready_for_localization=False, # should still count significance=MAJOR_SIGNIFICANCE, save=True, ) revision( document=translation.document.parent, is_approved=True, is_ready_for_localization=True, significance=MEDIUM_SIGNIFICANCE, save=True, ) row = self.row() eq_(row["title"], translation.document.title) eq_(unicode(row["status"]), "Immediate Update Needed")
def test_l10n_welcome_email(self, get_current): get_current.return_value.domain = 'testserver' u1 = profile().user u2 = profile(first_l10n_email_sent=True).user two_days = datetime.now() - timedelta(hours=48) d = document(locale='ru', save=True) revision(document=d, creator=u1, created=two_days, save=True) revision(document=d, creator=u2, created=two_days, save=True) # Clear out the notifications that were sent mail.outbox = [] # Send email(s) for welcome messages cron.send_welcome_emails() # There should be an email for u1 only. # u2 has already recieved the email eq_(len(mail.outbox), 1) attrs_eq(mail.outbox[0], to=[u1.email]) # Links should be locale-neutral assert 'en-US' not in mail.outbox[0].body # Check that no links used the wrong host. assert 'support.mozilla.org' not in mail.outbox[0].body # Check that one link used the right host. assert 'https://testserver/kb/locales' in mail.outbox[0].body # Assumption: links will be done consistently, and so this is enough testing. # u3's flag should now be set. u1 = User.objects.get(id=u1.id) eq_(u1.profile.first_l10n_email_sent, True)
def test_unapproved_articles(self): eq_(0, len(kb_overview_rows())) d = document(save=True) revision(document=d, save=True) eq_(1, len(kb_overview_rows()))
def test_filter_by_doctype(self): desktop = product(slug=u'desktop', save=True) ques = question(title=u'audio', product=desktop, save=True) ans = answer(question=ques, content=u'volume', save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u'audio', locale=u'en-US', category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) doc = document( title=u'audio too', locale=u'en-US', category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) self.refresh() # There should be 2 results for kb (w=1) and 1 for questions (w=2). response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json', 'w': '1'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 2) response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json', 'w': '2'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 1)
def test_not_counting_outdated(self): """Out-of-date translations shouldn't count as "done". "Out-of-date" can mean either moderately or majorly out of date. The only thing we don't care about is typo-level outdatedness. """ t = translated_revision(is_approved=True, save=True) overview = overview_rows('de') eq_(1, overview['most-visited']['numerator']) eq_(1, overview['all']['numerator']) # Update the parent with a typo-level revision: revision(document=t.document.parent, significance=TYPO_SIGNIFICANCE, is_approved=True, is_ready_for_localization=True, save=True) # Assert it still shows up in the numerators: overview = overview_rows('de') eq_(1, overview['most-visited']['numerator']) eq_(1, overview['all']['numerator']) # Update the parent with a medium-level revision: revision(document=t.document.parent, significance=MEDIUM_SIGNIFICANCE, is_approved=True, is_ready_for_localization=True, save=True) # Assert it no longer shows up in the numerators: overview = overview_rows('de') eq_(0, overview['all']['numerator']) eq_(0, overview['most-visited']['numerator'])
def test_previous(self): r1 = revision(is_approved=True, save=True) d = r1.document r2 = revision(document=d, is_approved=True, save=True) eq_(r1.previous, None) eq_(r2.previous.id, r1.id)
def test_top_contributors_l10n(self): d = document(locale='es', save=True) es1 = revision(document=d, save=True) es1 = revision(document=d, creator=es1.creator, save=True) es3 = revision(document=d, save=True) es4 = revision(document=d, created=date.today()-timedelta(days=91), save=True) d = document(locale='de', save=True) de1 = revision(document=d, save=True) de2 = revision(document=d, creator=de1.creator, save=True) d = document(locale='en-US', save=True) revision(document=d, save=True) revision(document=d, save=True) self.refresh() # By default, we should only get 2 top contributors back for 'es'. top = top_contributors_l10n(locale='es') eq_(2, len(top)) assert es4.creator_id not in [u['term'] for u in top] eq_(es1.creator_id, top[0]['term']) # By default, we should only get 1 top contributors back for 'de'. top = top_contributors_l10n(locale='de') eq_(1, len(top)) eq_(de1.creator_id, top[0]['term']) # If no locale is passed, it includes all locales except en-US. top = top_contributors_l10n() eq_(3, len(top))
def test_approved_over_unreviewed(self): """Favor an approved revision over a more recent unreviewed one.""" approved = revision(is_approved=True, is_ready_for_localization=False, save=True) revision( document=approved.document, is_ready_for_localization=False, is_approved=False, reviewed=None, save=True ) eq_(approved, approved.document.localizable_or_latest_revision())
def test_non_localizable(self): """When document isn't localizable, ignore is_ready_for_l10n.""" r1 = revision(is_approved=True, is_ready_for_localization=True, save=True) r2 = revision(document=r1.document, is_approved=True, is_ready_for_localization=False, save=True) r1.document.is_localizable = False r1.document.save() eq_(r2, r2.document.localizable_or_latest_revision())
def test_top_contributors_kb(self): d = document(locale='en-US', save=True) r1 = revision(document=d, save=True) r2 = revision(document=d, creator=r1.creator, save=True) r3 = revision(document=d, save=True) r4 = revision(document=d, created=date.today()-timedelta(days=91), save=True) self.refresh() # By default, we should only get 2 top contributors back. top = top_contributors_kb() eq_(2, len(top)) assert r4.creator_id not in [u['term'] for u in top] eq_(r1.creator_id, top[0]['term']) # If we specify an older start, then we get all 3. top = top_contributors_kb(start=date.today() - timedelta(days=92)) eq_(3, len(top)) # If we also specify an older end date, we only get the creator for # the older revision. top = top_contributors_kb( start=date.today() - timedelta(days=92), end=date.today() - timedelta(days=1)) eq_(1, len(top)) eq_(r4.creator_id, top[0]['term'])
def test_filter_by_category(self): d = document(save=True, category=CATEGORIES[1][0]) revision(document=d, save=True) eq_(1, len(kb_overview_rows())) eq_(0, len(kb_overview_rows(category=CATEGORIES[0][0]))) eq_(1, len(kb_overview_rows(category=CATEGORIES[1][0])))
def test_correct_based_on_to_none(self): """Assure Revision.clean() changes a bad based_on value to None when there is no current_revision of the English document.""" r = revision() r.based_on = revision() # Revision of some other unrelated Document self.assertRaises(ValidationError, r.clean) eq_(None, r.based_on)
def test_deferred_translation(self): """Verify a translation with only a deferred revision appears.""" d = document(title='Foo', save=True) untranslated = revision(is_approved=True, is_ready_for_localization=True, document=d, save=True) # There should be 1. eq_(1, len(self.titles(locale='es'))) translation = document( parent=untranslated.document, locale='es', save=True) deferred = revision(is_approved=False, reviewed=datetime.now(), document=translation, save=True) # There should still be 1. eq_(1, len(self.titles(locale='es'))) # Mark that rev as approved and there should then be 0. deferred.is_approved = True deferred.save() eq_(0, len(self.titles(locale='es')))
def test_product_facets(self): """Verify the facet counts on the results page.""" # Create products, questions and documents. p1 = product(title='Firefox', slug='firefox', save=True) p2 = product(title='Firefox for mobile', slug='mobile', save=True) ques = question(title=u'audio', save=True) ques.products.add(p1) ans = answer(question=ques, content=u'volume', save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u'audio', locale=u'en-US', category=10, save=True) doc.products.add(p1) doc.products.add(p2) revision(document=doc, is_approved=True, save=True) self.refresh() # There should be 2 total results, 2 "firefox" results and # 1 "mobile" result. response = self.client.get(reverse('search'), {'q': 'audio'}) eq_(200, response.status_code) doc = pq(response.content) eq_('Found 2 results for audio in English', doc('h2').text()) facet_text = doc('#product-filter').text() assert 'Firefox (2)' in facet_text assert 'Firefox for mobile (1)' in facet_text
def test_reindex_users_that_contributed_yesterday(self): yesterday = datetime.now() - timedelta(days=1) # Verify for answers. u = user(username='******', save=True) profile(user=u) answer(creator=u, created=yesterday, save=True) reindex_users_that_contributed_yesterday() self.refresh() data = UserMappingType.search().query(username__match='answerer')[0] eq_(data['last_contribution_date'].date(), yesterday.date()) # Verify for edits. u = user(username='******', save=True) profile(user=u) revision(creator=u, created=yesterday, save=True) reindex_users_that_contributed_yesterday() self.refresh() data = UserMappingType.search().query(username__match='editor')[0] eq_(data['last_contribution_date'].date(), yesterday.date()) # Verify for reviews. u = user(username='******', save=True) profile(user=u) revision(reviewer=u, reviewed=yesterday, save=True) reindex_users_that_contributed_yesterday() self.refresh() data = UserMappingType.search().query(username__match='reviewer')[0] eq_(data['last_contribution_date'].date(), yesterday.date())
def test_delete(self): """Make sure deleting the latest localizable revision doesn't delete the document but instead sets its latest localizable revision to the previous one. Making sure current_revision does the same is covered in the test_delete_current_revision template test. """ r1 = revision(is_approved=True, is_ready_for_localization=True, save=True) d = r1.document r2 = revision(document=d, is_approved=True, is_ready_for_localization=True, save=True) # Deleting r2 should make the latest fall back to r1: r2.delete() eq_(r1, Document.objects.get(pk=d.pk).latest_localizable_revision) # And deleting r1 should fall back to None: r1.delete() eq_(None, Document.objects.get(pk=d.pk).latest_localizable_revision)
def test_no_approved_revs(self): """Articles with no approved revisions should not appear.""" revision(is_approved=False, is_ready_for_localization=False, significance=MAJOR_SIGNIFICANCE, save=True) eq_([], self.titles())
def test_search_suggestions_archived_articles(self): """Verifies that archived articles aren't shown.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) d1 = document(title=u'document donut', category=10, save=True) d1.products.add(p) revision(document=d1, is_approved=True, save=True) d2 = document(title=u'document cupcake', category=10, is_archived=True, save=True) d2.products.add(p) revision(document=d1, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='document') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(len(doc('.result.document')), 1) assert 'donut' in doc('.result.document h3 a').text() assert 'cupcake' not in doc('.result.document h3 a').text()
def test_product_facets(self): """Verify the facet counts on the results page.""" # Create products, questions and documents. p1 = product(title="Firefox", slug="firefox", save=True) p2 = product(title="Firefox for mobile", slug="mobile", save=True) ques = question(title=u"audio", save=True) ques.products.add(p1) ans = answer(question=ques, content=u"volume", save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u"audio", locale=u"en-US", category=10, save=True) doc.products.add(p1) doc.products.add(p2) revision(document=doc, is_approved=True, save=True) self.refresh() # There should be 2 total results, 2 "firefox" results and # 1 "mobile" result. response = self.client.get(reverse("search"), {"q": "audio"}) eq_(200, response.status_code) doc = pq(response.content) eq_("Found 2 results for audio in English", doc("h2").text()) facet_text = doc("#product-filter").text() assert "Firefox (2)" in facet_text assert "Firefox for mobile (1)" in facet_text
def setUp(self): super(TestAnalyzers, self).setUp() self.locale_data = { 'en-US': { 'analyzer': 'snowball-english', 'content': 'I have a cat.', }, 'es': { 'analyzer': 'snowball-spanish', 'content': 'Tieno un gato.', }, 'ar': { 'analyzer': 'arabic', 'content': u'لدي اثنين من القطط', }, 'he': { 'analyzer': 'standard', 'content': u'גאולוגיה היא אחד', } } self.docs = {} for locale, data in self.locale_data.items(): d = document(locale=locale, save=True) revision(document=d, content=data['content'], is_approved=True, save=True) self.locale_data[locale]['doc'] = d self.refresh()
def test_based_on_approved(self): u1 = user() u1.save() r1 = revision(is_approved=False, creator=u1, is_ready_for_localization=False, save=True) u2 = user() u2.save() r2 = revision(document=r1.document, based_on=r1, is_approved=False, creator=u2, is_ready_for_localization=False, save=True) eq_(0, len(mail.outbox)) self._review_revision(r=r2) # 1 mail for each watcher, 1 for creator, and one for reviewer. eq_(4, len(mail.outbox)) assert 'has a new approved revision' in mail.outbox[0].subject assert 'Your revision has been approved' in mail.outbox[1].subject assert 'Your revision has been approved' in mail.outbox[2].subject assert 'A revision you contributed to has' in mail.outbox[3].subject
def test_document_listing(self): """Verify /products/<product slug>/<topic slug> renders articles.""" # Create a topic and product. p = product(save=True) t1 = topic(product=p, save=True) # Create 3 documents with the topic and product and one without. for i in range(3): doc = revision(is_approved=True, save=True).document doc.topics.add(t1) doc.products.add(p) doc = revision(is_approved=True, save=True).document self.refresh() # GET the page and verify the content. url = reverse('products.documents', args=[p.slug, t1.slug]) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(3, len(doc('#document-list > ul > li'))) eq_(p.slug, doc('#support-search input[name=product]').attr['value'])
def test_not_fired(self): """Test that the Not Localized and Not Updated events don't fire when they are not appropriate.""" trans = translated_revision(is_approved=True, save=True) trans_doc = trans.document # Add a parent revision of TYPO significance. This shouldn't do # anything, since it isn't significant enough. revision(document=trans.document.parent, is_approved=True, is_ready_for_localization=True, significance=TYPO_SIGNIFICANCE, save=True) url = reverse('wiki.document', args=[trans_doc.slug], locale=trans_doc.locale) response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) assert '"Not Localized"' not in doc('body').attr('data-ga-push') assert '"Not Updated"' not in doc('body').attr('data-ga-push')
def test_wiki_products(self): """Search wiki for products.""" prod_vals = ( (product(slug='b2g', save=True), 0), (product(slug='mobile', save=True), 1), (product(slug='desktop', save=True), 2), ) for prod, total in prod_vals: for i in range(total): doc = document(locale=u'en-US', category=10, save=True) doc.products.add(prod) revision(document=doc, is_approved=True, save=True) self.refresh() qs = {'a': 1, 'w': 1, 'format': 'json'} for prod, total in prod_vals: qs.update({'product': prod.slug}) response = self.client.get(reverse('search'), qs) eq_(total, json.loads(response.content)['total'])
def test_custom_event_not_translated(self): """If a document is requested in a locale it is not translated to, it should fire a "Not Localized" GA event.""" # This will make a document and revision suitable for translation. r = revision(is_approved=True, is_ready_for_localization=True, save=True) url = reverse('wiki.document', args=[r.document.slug], locale='fr') response = self.client.get(url) eq_(200, response.status_code) doc = pq(response.content) assert '"Not Localized"' in doc('body').attr('data-ga-push') assert '"Not Updated"' not in doc('body').attr('data-ga-push')
def test_by_product(self): """Test the product filtering of the readout.""" p = product(title='Firefox', slug='firefox', save=True) d = revision(save=True).document d.needs_change = True d.needs_change_comment = "Please update for Firefox.next" d.save() # There shouldn't be any rows yet. eq_(0, len(self.rows(product=p))) # Add the product to the document, and verify it shows up. d.products.add(p) eq_(self.row(product=p)['title'], d.title)
def test_custom_event_medium_out_of_date(self): """If a document's parent has medium edits and the document has not been updated, it should fire a "Not Updated" GA event.""" # Make a document, and a translation of it. trans = translated_revision(is_approved=True, save=True) # Add a parent revision of MEDIUM significance: revision(document=trans.document.parent, is_approved=True, is_ready_for_localization=True, significance=MEDIUM_SIGNIFICANCE, save=True) url = reverse('wiki.document', args=[trans.document.slug], locale=trans.document.locale) response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) assert '"Not Localized"' not in doc('body').attr('data-ga-push') assert '"Not Updated"' in doc('body').attr('data-ga-push')
def test_localized_based_on(self): """Editing a localized article 'based on' an older revision of the localization is OK.""" en_r = revision(save=True) fr_d = document(parent=en_r.document, locale='fr', save=True) revision(document=fr_d, based_on=en_r, is_approved=True, save=True) fr_r = revision(document=fr_d, based_on=en_r, keywords="oui", summary="lipsum", save=True) url = reverse('wiki.new_revision_based_on', locale='fr', args=( fr_d.slug, fr_r.pk, )) response = self.client.get(url) doc = pq(response.content) input = doc('#id_based_on')[0] eq_(int(input.value), en_r.pk) eq_(doc('#id_keywords')[0].attrib['value'], 'oui') eq_(doc('#id_summary').text(), 'lipsum')
def test_visit_count_from_analytics(self, _build_request): """Verify stored visit counts from mocked analytics data. It has some nasty non-ASCII chars in it. """ execute = _build_request.return_value.get.return_value.execute execute.return_value = PAGEVIEWS_BY_DOCUMENT_RESPONSE d1 = revision(document=document(slug='hellỗ', save=True), is_approved=True, save=True).document d2 = revision(document=document(slug='there', save=True), is_approved=True, save=True).document WikiDocumentVisits.reload_period_from_analytics(LAST_7_DAYS) eq_(2, WikiDocumentVisits.objects.count()) wdv1 = WikiDocumentVisits.objects.get(document=d1) eq_(27, wdv1.visits) eq_(LAST_7_DAYS, wdv1.period) wdv2 = WikiDocumentVisits.objects.get(document=d2) eq_(LAST_7_DAYS, wdv2.period)
def test_approved_revision_updates_html(self): """Creating an approved revision updates document.html""" d, _ = doc_rev('Replace document html') assert 'Replace document html' in d.html, \ '"Replace document html" not in %s' % d.html # Creating another approved revision replaces it again r = revision(document=d, content='Replace html again', is_approved=True) r.save() assert 'Replace html again' in d.html, \ '"Replace html again" not in %s' % d.html
def test_filter_by_doctype(self): desktop = product(slug=u'desktop', save=True) ques = question(title=u'audio', product=desktop, save=True) ans = answer(question=ques, content=u'volume', save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u'audio', locale=u'en-US', category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) doc = document(title=u'audio too', locale=u'en-US', category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) self.refresh() # There should be 2 results for kb (w=1) and 1 for questions (w=2). response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json', 'w': '1' }) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 2) response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json', 'w': '2' }) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 1)
def test_data_in_index(self): """Verify the data we are indexing.""" p = product(save=True) d = document(locale='es', save=True) d.products.add(p) r = revision(document=d, is_approved=True, save=True) self.refresh() eq_(RevisionMetricsMappingType.search().count(), 1) data = RevisionMetricsMappingType.search().values_dict()[0] eq_(data['is_approved'], r.is_approved) eq_(data['locale'], d.locale) eq_(data['product'], [p.slug]) eq_(data['creator_id'], r.creator_id)
def test_top_contributors_l10n(self): d = document(locale='es', save=True) es1 = revision(document=d, save=True) es1 = revision(document=d, creator=es1.creator, save=True) es3 = revision(document=d, save=True) es4 = revision(document=d, created=date.today() - timedelta(days=91), save=True) d = document(locale='de', save=True) de1 = revision(document=d, save=True) de2 = revision(document=d, creator=de1.creator, save=True) d = document(locale='en-US', save=True) revision(document=d, save=True) revision(document=d, save=True) for u in User.objects.all(): profile(user=u) self.refresh() # By default, we should only get 2 top contributors back for 'es'. top, _ = top_contributors_l10n(locale='es') eq_(2, len(top)) assert es4.creator_id not in [u['term'] for u in top] eq_(es1.creator_id, top[0]['term']) # By default, we should only get 1 top contributors back for 'de'. top, _ = top_contributors_l10n(locale='de') eq_(1, len(top)) eq_(de1.creator_id, top[0]['term']) # If no locale is passed, it includes all locales except en-US. top, _ = top_contributors_l10n() eq_(3, len(top))
def test_reviewed_notification(self, get_current): get_current.return_value.domain = 'testserver' rev = revision() doc = rev.document msg = 'great work!' self._approve_and_send(rev, self.user, msg) # Two emails will be sent, one each for the reviewer and the reviewed. eq_(2, len(mail.outbox)) eq_('Your revision has been approved: %s' % doc.title, mail.outbox[0].subject) eq_([rev.creator.email], mail.outbox[0].to) eq_(REVIEWED_EMAIL_CONTENT % ( self.user.username, doc.title, msg, doc.slug), mail.outbox[0].body)
def test_kb_vote(self): """Test vote API call.""" r1 = revision(document=document(locale='en-US', save=True), save=True) r2 = revision(document=document(locale='es', save=True), save=True) r3 = revision(document=document(locale='es', save=True), save=True) for r in [r1, r2, r3]: helpful_vote(revision=r, save=True) helpful_vote(revision=r, save=True) helpful_vote(revision=r, helpful=True, save=True) # All votes should be counted if we don't specify a locale r = self._get_api_result('kpi_kb_vote') eq_(r['objects'][0]['kb_helpful'], 3) eq_(r['objects'][0]['kb_votes'], 9) # Only en-US votes: r = self._get_api_result('kpi_kb_vote', locale='en-US') eq_(r['objects'][0]['kb_helpful'], 1) eq_(r['objects'][0]['kb_votes'], 3) # Only es votes: r = self._get_api_result('kpi_kb_vote', locale='es') eq_(r['objects'][0]['kb_helpful'], 2) eq_(r['objects'][0]['kb_votes'], 6)
def test_hot_topics(self): """Verifies the hot topics section.""" # Create a product and the hot topics topic. p = product(save=True) hot = topic(slug=HOT_TOPIC_SLUG, product=p, save=True) # Create 7 hot documents. for i in range(7): doc = revision(is_approved=True, save=True).document doc.products.add(p) doc.topics.add(hot) # Create a not hot document. doc = revision(is_approved=True, save=True).document doc.products.add(p) self.refresh() # GET the product landing page and verify the content. url = reverse('products.product', args=[p.slug]) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(7, len(doc('#hot-topics li')))
def test_active_contributors(self): """Test active contributors API call.""" # 2 en-US revisions by 2 contributors: r1 = revision(creator=user(save=True), save=True) r2 = revision(creator=user(save=True), save=True) # A translation with 2 contributors (translator + reviewer): d = document(parent=r1.document, locale='es', save=True) revision(document=d, reviewed=datetime.now(), reviewer=r1.creator, creator=r2.creator, save=True) # 1 active support forum contributor: # A user with 10 answers u1 = user(save=True) for x in range(10): answer(save=True, creator=u1) # A user with 9 answers u2 = user(save=True) for x in range(9): answer(save=True, creator=u2) # A user with 1 answer u3 = user(save=True) answer(save=True, creator=u3) # An AoA reply (1 contributor): reply(save=True) # Create metric kinds and update metrics for tomorrow (today's # activity shows up tomorrow). self._make_contributor_metric_kinds() update_contributor_metrics(day=date.today() + timedelta(days=1)) r = self._get_api_result('kpi_active_contributors') eq_(r['objects'][0]['en_us'], 2) eq_(r['objects'][0]['non_en_us'], 2) eq_(r['objects'][0]['support_forum'], 1) eq_(r['objects'][0]['aoa'], 1)
def test_l10n_badge(self): """Verify the L10n Badge is awarded properly.""" # Create the user and badge. year = date.today().year u = profile().user b = badge(slug=WIKI_BADGES['l10n-badge']['slug'].format(year=year), title=WIKI_BADGES['l10n-badge']['title'].format(year=year), description=WIKI_BADGES['l10n-badge']['description'].format( year=year), save=True) # Create 9 approved es revisions. d = document(locale='es', save=True) for i in range(9): revision(creator=u, document=d, is_approved=True, save=True) # User should NOT have the badge yet assert not b.is_awarded_to(u) # Create 1 more approved es revision. revision(creator=u, document=d, is_approved=True, save=True) # User should have the badge now assert b.is_awarded_to(u)
def test_majorly_outdated_with_unapproved_parents(self): """Migrations might introduce translated revisions without based_on set. Tolerate these. If based_on of a translation's current_revision is None, the translation should be considered out of date iff any major-significance, approved revision to the English article exists. """ # Create a parent doc with only an unapproved revision... parent_rev = revision() parent_rev.save() # ...and a translation with a revision based on nothing. trans = document(parent=parent_rev.document, locale='de') trans.save() trans_rev = revision(document=trans, is_approved=True) trans_rev.save() assert trans_rev.based_on is None, \ ('based_on defaulted to something non-None, which this test ' "wasn't expecting.") assert not trans.is_majorly_outdated(), \ ('A translation was considered majorly out of date even though ' 'the English document has never had an approved revision of ' 'major significance.') major_parent_rev = revision(document=parent_rev.document, significance=MAJOR_SIGNIFICANCE, is_approved=True, is_ready_for_localization=True) major_parent_rev.save() assert trans.is_majorly_outdated(), \ ('A translation was not considered majorly outdated when its ' "current revision's based_on value was None.")
def setUp(self): super(TestAnalyzers, self).setUp() self.locale_data = { 'en-US': { 'analyzer': 'snowball-english', 'content': 'I have a cat.', }, 'es': { 'analyzer': 'snowball-spanish', 'content': 'Tieno un gato.', }, 'ar': { 'analyzer': 'arabic', 'content': u'لدي اثنين من القطط', }, 'my': { 'analyzer': 'custom-burmese', 'content': u'အနုပညာ', }, 'he': { 'analyzer': 'standard', 'content': u'גאולוגיה היא אחד', } } self.docs = {} for locale, data in self.locale_data.items(): d = document(locale=locale, save=True) revision(document=d, content=data['content'], is_approved=True, save=True) self.locale_data[locale]['doc'] = d self.refresh()
def _mark_as_ready_revision(self): """Make a revision, and approve or reject it through the view.""" r = revision(is_approved=True, is_ready_for_localization=False, significance=MEDIUM_SIGNIFICANCE, save=True) # Figure out POST data: data = {'comment': 'something'} response = post(self.client, 'wiki.mark_ready_for_l10n_revision', data, args=[r.document.slug, r.id]) eq_(200, response.status_code)
def test_by_product(self): """Test the product filtering of the readout.""" p = product(title='Firefox', slug='firefox', save=True) d = document(title='Foo', save=True) untranslated = revision(is_approved=True, is_ready_for_localization=True, document=d, save=True) # There shouldn't be any rows yet. eq_(0, len(self.rows(product=p))) # Add the product to the document, and verify it shows up. d.products.add(p) eq_(self.row(product=p)['title'], d.title)
def test_document_listing_order(self): """Verify documents are sorted by display_order and number of helpful votes.""" # Create topic, product and documents. p = product(save=True) t = topic(product=p, save=True) docs = [] for i in range(3): doc = revision(is_approved=True, save=True).document doc.topics.add(t) doc.products.add(p) docs.append(doc) # Add a lower display order to the second document. It should be first now. docs[1].display_order = 0 docs[1].save() self.refresh() url = reverse('products.documents', args=[p.slug, t.slug]) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(doc('#document-list > ul > li:first-child > a').text(), docs[1].title) # Add a helpful vote to the third document. It should be second now. rev = docs[2].current_revision helpful_vote(revision=rev, helpful=True, save=True) docs[2].save() # Votes don't trigger a reindex. self.refresh() cache.clear() # documents_for() is cached url = reverse('products.documents', args=[p.slug, t.slug]) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(doc('#document-list > ul > li:nth-child(2) > a').text(), docs[2].title) # Add 2 helpful votes the first document. It should be second now. rev = docs[0].current_revision helpful_vote(revision=rev, helpful=True, save=True) helpful_vote(revision=rev, helpful=True, save=True) docs[0].save() # Votes don't trigger a reindex. self.refresh() cache.clear() # documents_for() is cached r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(doc('#document-list > ul > li:nth-child(2) > a').text(), docs[0].title)
def test_counting_unready_docs(self): """Docs without a ready-for-l10n rev shouldn't count in total.""" # Make a doc with an approved but not-ready-for-l10n rev: r = revision(document=document(title='smoo', is_localizable=True, save=True), is_ready_for_localization=False, is_approved=True, save=True) # It shouldn't show up in the total: eq_(0, overview_rows('de')['all']['denominator']) r.is_ready_for_localization = True r.save() eq_(1, overview_rows('de')['all']['denominator'])
def test_escaping(self, get_current): get_current.return_value.domain = 'testserver' rev = revision() doc = rev.document doc.title = '"All about quotes"' msg = 'foo & "bar"' self._approve_and_send(rev, self.user, msg) # Two emails will be sent, one each for the reviewer and the reviewed. eq_(2, len(mail.outbox)) eq_('Your revision has been approved: %s' % doc.title, mail.outbox[0].subject) assert '"' not in mail.outbox[0].body assert '"All about quotes"' in mail.outbox[0].body assert 'foo & "bar"' in mail.outbox[0].body
def test_vote(self): """Test vote API call.""" r = revision(save=True) helpful_vote(revision=r, save=True) helpful_vote(revision=r, save=True) helpful_vote(revision=r, helpful=True, save=True) a = answer(save=True) answervote(answer=a, save=True) answervote(answer=a, helpful=True, save=True) answervote(answer=a, helpful=True, save=True) r = self._get_api_result('kpi_vote') eq_(r['objects'][0]['kb_helpful'], 1) eq_(r['objects'][0]['kb_votes'], 3) eq_(r['objects'][0]['ans_helpful'], 2) eq_(r['objects'][0]['ans_votes'], 3)
def test_counting_unready_templates(self): """Templates without a ready-for-l10n rev don't count""" # Make a template with an approved but not-ready-for-l10n rev: r = revision(document=document(title='Template:smoo', is_localizable=True, is_template=True, save=True), is_ready_for_localization=False, is_approved=True, save=True) # It shouldn't show up in the total: eq_(0, l10n_overview_rows('de')['templates']['denominator']) r.is_ready_for_localization = True r.save() eq_(1, l10n_overview_rows('de')['templates']['denominator'])
def test_french(self): # Create English parent document en_d = document() en_d.save() en_r = revision(document=en_d, is_approved=True) en_r.save() # Create the French document fr_d = document(parent=en_d, title='A doc', locale='fr') fr_d.save() obj = get_object_fallback(Document, 'A doc', 'fr', '!') eq_(fr_d, obj) # Also works when English exists d = document(title='A doc') d.save() obj = get_object_fallback(Document, 'A doc', 'fr', '!') eq_(fr_d, obj)
def test_counting_unready_navigation(self): """Navigation articles without ready-for-l10n rev don't count""" # Make a navigation doc with an approved but not-ready-for-l10n rev: r = revision(document=document(title='smoo', category=50, is_localizable=True, is_template=False, save=True), is_ready_for_localization=False, is_approved=True, save=True) # It shouldn't show up in the total: eq_(0, overview_rows('de')['navigation']['denominator']) r.is_ready_for_localization = True r.save() eq_(1, overview_rows('de')['navigation']['denominator'])
def test_untranslated_detail(self): """Assert the whole-page Untranslated Articles view works.""" # We don't need to test every whole-page view: just one, to # make sure the localization_detail template and the view # work. All the readouts' querying and formatting methods, # including the various template parameters for each # individual readout, are exercised by rendering the main, # multi-readout page. # Put something in the DB so something shows up: untranslated = revision(is_approved=True, is_ready_for_localization=True) untranslated.save() response = self.client.get( reverse('dashboards.localization_detail', args=['untranslated'], locale='de')) self.assertContains(response, untranslated.document.title)