def test_deferred_translation(self): """Verify a translation with only a deferred revision appears.""" d = document(title='Foo', save=True) untranslated = revision(is_approved=True, is_ready_for_localization=True, document=d, save=True) # There should be 1. eq_(1, len(self.titles(locale='es'))) translation = document( parent=untranslated.document, locale='es', save=True) deferred = revision(is_approved=False, reviewed=datetime.now(), document=translation, save=True) # There should still be 1. eq_(1, len(self.titles(locale='es'))) # Mark that rev as approved and there should then be 0. deferred.is_approved = True deferred.save() eq_(0, len(self.titles(locale='es')))
def test_filter_by_doctype(self): desktop = product(slug=u"desktop", save=True) ques = question(title=u"audio", product=desktop, save=True) ans = answer(question=ques, content=u"volume", save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u"audio", locale=u"en-US", category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) doc = document(title=u"audio too", locale=u"en-US", category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) self.refresh() # There should be 2 results for kb (w=1) and 1 for questions (w=2). response = self.client.get(reverse("search"), {"q": "audio", "format": "json", "w": "1"}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content["total"], 2) response = self.client.get(reverse("search"), {"q": "audio", "format": "json", "w": "2"}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content["total"], 1)
def test_category_inheritance(self): """A document's categories must always be those of its parent.""" some_category = CATEGORIES[1][0] other_category = CATEGORIES[2][0] # Notice if somebody ever changes the default on the category field, # which would invalidate our test: assert some_category != document().category parent = document(category=some_category) parent.save() child = document(parent=parent, locale="de") child.save() # Make sure child sees stuff set on parent: eq_(some_category, child.category) # Child'd category should revert to parent's on save: child.category = other_category child.save() eq_(some_category, child.category) # Changing the parent category should change the child's: parent.category = other_category parent.save() eq_(other_category, parent.translations.get(locale=child.locale).category)
def test_search_suggestions_archived_articles(self): """Verifies that archived articles aren't shown.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) d1 = document(title=u'document donut', category=10, save=True) d1.products.add(p) revision(document=d1, is_approved=True, save=True) d2 = document(title=u'document cupcake', category=10, is_archived=True, save=True) d2.products.add(p) revision(document=d1, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='document') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(len(doc('.result.document')), 1) assert 'donut' in doc('.result.document h3 a').text() assert 'cupcake' not in doc('.result.document h3 a').text()
def test_filter_by_doctype(self): desktop = product(slug=u'desktop', save=True) ques = question(title=u'audio', product=desktop, save=True) ans = answer(question=ques, content=u'volume', save=True) answervote(answer=ans, helpful=True, save=True) doc = document(title=u'audio', locale=u'en-US', category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) doc = document( title=u'audio too', locale=u'en-US', category=10, save=True) doc.products.add(desktop) revision(document=doc, is_approved=True, save=True) self.refresh() # There should be 2 results for kb (w=1) and 1 for questions (w=2). response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json', 'w': '1'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 2) response = self.client.get(reverse('search'), { 'q': 'audio', 'format': 'json', 'w': '2'}) eq_(200, response.status_code) content = json.loads(response.content) eq_(content['total'], 1)
def test_old_revisions(self): """Bug 862436. Updating old revisions could cause bad WLH data.""" d1 = document(title='D1', save=True) revision(document=d1, content='', is_approved=True, save=True) d2 = document(title='D2', save=True) revision(document=d2, content='', is_approved=True, save=True) # Make D3, then make a revision that links to D1, then a # revision that links to D2. Only the link to D2 should count. d3 = document(title='D3', save=True) r3_old = revision(document=d3, content='[[D1]]', is_approved=True, save=True) r3_new = revision(document=d3, content='[[D2]]', is_approved=True, save=True) # This could cause stale data r3_old.content_parsed # D1 is not linked to in any current revisions. eq_(len(d1.links_to()), 0) eq_(len(d1.links_from()), 0) eq_(len(d2.links_to()), 1) eq_(len(d2.links_from()), 0) eq_(len(d3.links_to()), 0) eq_(len(d3.links_from()), 1)
def test_translation_state(self): eng_doc = document(category=CANNED_RESPONSES_CATEGORY, save=True) eng_rev = revision(is_approved=True, is_ready_for_localization=True, document=eng_doc, save=True) eq_('untranslated', self.row()['status_class']) # Now translate it, but don't approve de_doc = document(category=CANNED_RESPONSES_CATEGORY, parent=eng_doc, locale='de', save=True) de_rev = revision(is_approved=False, document=de_doc, based_on=eng_rev, save=True) eq_('review', self.row()['status_class']) # Approve it, so now every this is ok. de_rev.is_approved = True de_rev.save() eq_('ok', self.row()['status_class']) # Now update the parent, so it becomes minorly out of date revision(is_approved=True, is_ready_for_localization=True, document=eng_doc, significance=MEDIUM_SIGNIFICANCE, save=True) eq_('update', self.row()['status_class']) # Now update the parent, so it becomes majorly out of date revision(is_approved=True, is_ready_for_localization=True, document=eng_doc, significance=MAJOR_SIGNIFICANCE, save=True) eq_('out-of-date', self.row()['status_class'])
def test_filter_by_product(self): u1 = profile().user u2 = profile().user p1 = product(save=True) p2 = product(save=True) d1 = document(save=True) d1.products.add(p1) revision(document=d1, creator=u1, save=True) d2 = document(save=True) d2.products.add(p2) revision(document=d2, creator=u1, save=True) d3 = document(save=True) d3.products.add(p2) revision(document=d3, creator=u2, save=True) self.refresh() req = self.factory.get('/', {'product': p1.slug}) data = self.api.get_data(req) eq_(data['count'], 1) eq_(data['results'][0]['user']['username'], u1.username) eq_(data['results'][0]['revision_count'], 1)
def setUp(self): super(ActiveContributorsTestCase, self).setUp() start_date = date.today() - timedelta(days=10) self.start_date = start_date before_start = start_date - timedelta(days=1) # Create some revisions to test with. # 3 'en-US' contributors: d = document(locale='en-US', save=True) u = user(save=True) self.user = u revision(document=d, is_approved=True, reviewer=u, save=True) revision(document=d, creator=u, save=True) self.product = product(save=True) r = revision(created=start_date, save=True) r.document.products.add(self.product) # Add one that shouldn't count: self.en_us_old = revision(document=d, created=before_start, save=True) # 4 'es' contributors: d = document(locale='es', save=True) revision(document=d, is_approved=True, reviewer=u, save=True) revision(document=d, creator=u, reviewer=user(save=True), save=True) revision(document=d, created=start_date, save=True) revision(document=d, save=True) # Add one that shouldn't count: self.es_old = revision(document=d, created=before_start, save=True)
def test_top_contributors_l10n(self): d = document(locale='es', save=True) es1 = revision(document=d, save=True) es1 = revision(document=d, creator=es1.creator, save=True) es3 = revision(document=d, save=True) es4 = revision(document=d, created=date.today()-timedelta(days=91), save=True) d = document(locale='de', save=True) de1 = revision(document=d, save=True) de2 = revision(document=d, creator=de1.creator, save=True) d = document(locale='en-US', save=True) revision(document=d, save=True) revision(document=d, save=True) self.refresh() # By default, we should only get 2 top contributors back for 'es'. top = top_contributors_l10n(locale='es') eq_(2, len(top)) assert es4.creator_id not in [u['term'] for u in top] eq_(es1.creator_id, top[0]['term']) # By default, we should only get 1 top contributors back for 'de'. top = top_contributors_l10n(locale='de') eq_(1, len(top)) eq_(de1.creator_id, top[0]['term']) # If no locale is passed, it includes all locales except en-US. top = top_contributors_l10n() eq_(3, len(top))
def test_wiki_topics(self): """Search wiki for topics, includes multiple.""" t1 = topic(slug='doesnotexist', save=True) t2 = topic(slug='extant', save=True) t3 = topic(slug='tagged', save=True) doc = document(locale=u'en-US', category=10, save=True) doc.topics.add(t2) revision(document=doc, is_approved=True, save=True) doc = document(locale=u'en-US', category=10, save=True) doc.topics.add(t2) doc.topics.add(t3) revision(document=doc, is_approved=True, save=True) self.refresh() topic_vals = ( (t1.slug, 0), (t2.slug, 2), (t3.slug, 1), ([t2.slug, t3.slug], 1), ) qs = {'a': 1, 'w': 1, 'format': 'json'} for topics, number in topic_vals: qs.update({'topics': topics}) response = self.client.get(reverse('search'), qs) eq_(number, json.loads(response.content)['total'])
def test_top_contributors(self): """Verify the top contributors appear.""" d = document(locale='en-US', save=True) revision(document=d, save=True) d = document(locale='es', save=True) revision(document=d, save=True) revision(document=d, save=True) answer(save=True) answer(save=True) answer(save=True) reply(user=user(save=True), save=True) reply(user=user(save=True), save=True) reply(user=user(save=True), save=True) reply(user=user(save=True), save=True) for u in User.objects.all(): profile(user=u) self.refresh() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(1, len(doc('ul.kb > li'))) eq_(2, len(doc('ul.l10n > li'))) eq_(3, len(doc('ul.questions > li'))) eq_(4, len(doc('ul.army-of-awesome > li')))
def test_synonyms_work_in_search_view(self): d1 = document(title='frob', save=True) d2 = document(title='glork', save=True) revision(document=d1, is_approved=True, save=True) revision(document=d2, is_approved=True, save=True) self.refresh() # First search without synonyms response = self.client.get(reverse('search'), {'q': 'frob'}) doc = pq(response.content) header = doc.find('#search-results h2').text().strip() eq_(header, 'Found 1 result for frob for All Products') # Now add a synonym. synonym(from_words='frob', to_words='frob, glork', save=True) update_synonyms_task() self.refresh() # Forward search response = self.client.get(reverse('search'), {'q': 'frob'}) doc = pq(response.content) header = doc.find('#search-results h2').text().strip() eq_(header, 'Found 2 results for frob for All Products') # Reverse search response = self.client.get(reverse('search'), {'q': 'glork'}) doc = pq(response.content) header = doc.find('#search-results h2').text().strip() eq_(header, 'Found 1 result for glork for All Products')
def _create_en_and_de_docs(self): en = settings.WIKI_DEFAULT_LANGUAGE en_doc = document(locale=en, slug='english-slug') en_doc.save() de_doc = document(locale='de', parent=en_doc) de_doc.save() de_rev = revision(document=de_doc, is_approved=True) de_rev.save() return en_doc, de_doc
def test_only_localizable_allowed_children(self): """You can't have children for a non-localizable document.""" # Make English rev: en_doc = document(is_localizable=False) en_doc.save() # Make Deutsch translation: de_doc = document(parent=en_doc, locale="de") self.assertRaises(ValidationError, de_doc.save)
def test_update_l10n_coverage_metrics(self): """Test the cron job that updates l10n coverage metrics.""" p = product(save=True) # Create en-US documents. for i in range(20): r = revision(is_approved=True, is_ready_for_localization=True, save=True) r.document.products.add(p) r1 = Revision.objects.all()[0] r2 = Revision.objects.all()[1] # Translate one to es. d = document(parent=r1.document, locale="es", save=True) revision(document=d, based_on=r1, is_approved=True, save=True) # Translate two to de. d = document(parent=r1.document, locale="de", save=True) revision(document=d, based_on=r1, is_approved=True, save=True) d = document(parent=r2.document, locale="de", save=True) revision(document=d, based_on=r2, is_approved=True, save=True) # Translate all to ru. for r in Revision.objects.filter(document__locale="en-US"): d = document(parent=r.document, locale="ru", save=True) revision(document=d, based_on=r, is_approved=True, save=True) # Call the cronjob update_l10n_coverage_metrics() # Verify es metrics. eq_(4, WikiMetric.objects.filter(locale="es").count()) eq_(5.0, WikiMetric.objects.get(locale="es", product=p, code=L10N_TOP20_CODE).value) eq_(5.0, WikiMetric.objects.get(locale="es", product=p, code=L10N_ALL_CODE).value) eq_(5.0, WikiMetric.objects.get(locale="es", product=None, code=L10N_TOP20_CODE).value) eq_(5.0, WikiMetric.objects.get(locale="es", product=None, code=L10N_ALL_CODE).value) # Verify de metrics. eq_(4, WikiMetric.objects.filter(locale="de").count()) eq_(10.0, WikiMetric.objects.get(locale="de", product=p, code=L10N_TOP20_CODE).value) eq_(10.0, WikiMetric.objects.get(locale="de", product=p, code=L10N_ALL_CODE).value) eq_(10.0, WikiMetric.objects.get(locale="de", product=None, code=L10N_TOP20_CODE).value) eq_(10.0, WikiMetric.objects.get(locale="de", product=None, code=L10N_ALL_CODE).value) # Verify ru metrics. eq_(4, WikiMetric.objects.filter(locale="ru").count()) eq_(100.0, WikiMetric.objects.get(locale="ru", product=p, code=L10N_TOP20_CODE).value) eq_(100.0, WikiMetric.objects.get(locale="ru", product=p, code=L10N_ALL_CODE).value) eq_(100.0, WikiMetric.objects.get(locale="ru", product=None, code=L10N_TOP20_CODE).value) eq_(100.0, WikiMetric.objects.get(locale="ru", product=None, code=L10N_ALL_CODE).value) # Verify it metrics. eq_(4, WikiMetric.objects.filter(locale="it").count()) eq_(0.0, WikiMetric.objects.get(locale="it", product=p, code=L10N_TOP20_CODE).value) eq_(0.0, WikiMetric.objects.get(locale="it", product=p, code=L10N_ALL_CODE).value) eq_(0.0, WikiMetric.objects.get(locale="it", product=None, code=L10N_TOP20_CODE).value) eq_(0.0, WikiMetric.objects.get(locale="it", product=None, code=L10N_ALL_CODE).value)
def test_archived(self): """Ensure archived articles show only when requested.""" doc = document(title=u'impalas', locale=u'en-US', is_archived=True, save=True) revision(document=doc, summary=u'impalas', is_approved=True, save=True) self.refresh() # include_archived gets the above document qs = {'q': 'impalas', 'a': 1, 'w': 1, 'format': 'json', 'include_archived': 'on'} response = self.client.get(reverse('search'), qs) results = json.loads(response.content)['results'] eq_(1, len(results)) # no include_archived gets you nothing since the only # document in the index is archived qs = {'q': 'impalas', 'a': 0, 'w': 1, 'format': 'json'} response = self.client.get(reverse('search'), qs) results = json.loads(response.content)['results'] eq_(0, len(results))
def test_wiki_topics(self): """Make sure that adding topics to a Document causes it to refresh the index. """ t = topic(slug=u'hiphop', save=True) eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 0) doc = document(save=True) revision(document=doc, is_approved=True, save=True) self.refresh() eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 0) doc.topics.add(t) self.refresh() eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 1) doc.topics.clear() self.refresh() # Make sure the document itself is still there and that we didn't # accidentally delete it through screwed up signal handling: eq_(DocumentMappingType.search().filter().count(), 1) eq_(DocumentMappingType.search().filter(topic=t.slug).count(), 0)
def test_search_products(self): p = product(title=u'Product One', slug='product', save=True) doc1 = document(title=u'cookies', locale='en-US', category=10, save=True) revision(document=doc1, is_approved=True, save=True) doc1.products.add(p) doc1.save() self.refresh() response = self.client.get(reverse('search.advanced'), { 'a': '1', 'product': 'product', 'q': 'cookies', 'w': '1' }) assert "We couldn't find any results for" not in response.content eq_(200, response.status_code) assert 'Product One' in response.content
def test_wiki_products(self): """Make sure that adding products to a Document causes it to refresh the index. """ p = product(slug=u'desktop', save=True) eq_(DocumentMappingType.search().filter(product=p.slug).count(), 0) doc = document(save=True) revision(document=doc, is_approved=True, save=True) self.refresh() eq_(DocumentMappingType.search().filter(product=p.slug).count(), 0) doc.products.add(p) self.refresh() eq_(DocumentMappingType.search().filter(product=p.slug).count(), 1) doc.products.remove(p) self.refresh() # Make sure the document itself is still there and that we didn't # accidentally delete it through screwed up signal handling: eq_(DocumentMappingType.search().filter().count(), 1) eq_(DocumentMappingType.search().filter(product=p.slug).count(), 0)
def test_wiki_redirects(self): """Make sure we don't index redirects""" # First create a revision that doesn't have a redirect and # make sure it's in the index. doc = document(title=u'wool hats') doc.save() revision(document=doc, is_approved=True, save=True) self.refresh() eq_( DocumentMappingType.search().query( document_title__match='wool').count(), 1) # Now create a revision that is a redirect and make sure the # document is removed from the index. revision(document=doc, content=REDIRECT_CONTENT, is_approved=True, save=True) self.refresh() eq_( DocumentMappingType.search().query( document_title__match='wool').count(), 0)
def test_pageviews_by_document(self, _build_request): """Test googleanalytics.pageviews_by_document().""" execute = _build_request.return_value.get.return_value.execute execute.return_value = PAGEVIEWS_BY_DOCUMENT_RESPONSE # Add some documents that match the response data. documents = [] for i in range(1, 6): documents.append(revision( document=document(slug='doc-%s' % i, save=True), is_approved=True, save=True).document) pageviews = googleanalytics.pageviews_by_document( date(2013, 01, 16), date(2013, 01, 16)) eq_(5, len(pageviews)) eq_(1, pageviews[documents[0].pk]) eq_(2, pageviews[documents[1].pk]) eq_(10, pageviews[documents[2].pk]) eq_(39, pageviews[documents[3].pk]) eq_(46, pageviews[documents[4].pk])
def test_active_contributors(self): """Test active contributors API call.""" # 2 en-US revisions by 2 contributors: r1 = revision(creator=user(save=True), save=True) r2 = revision(creator=user(save=True), save=True) # A translation with 2 contributors (translator + reviewer): d = document(parent=r1.document, locale='es', save=True) revision(document=d, reviewed=datetime.now(), reviewer=r1.creator, creator=r2.creator, save=True) # 1 active support forum contributor: # A user with 10 answers u1 = user(save=True) for x in range(10): answer(save=True, creator=u1) # A user with 9 answers u2 = user(save=True) for x in range(9): answer(save=True, creator=u2) # A user with 1 answer u3 = user(save=True) answer(save=True, creator=u3) # An AoA reply (1 contributor): reply(save=True) # Create metric kinds and update metrics for tomorrow (today's # activity shows up tomorrow). self._make_contributor_metric_kinds() update_contributor_metrics(day=date.today() + timedelta(days=1)) r = self._get_api_result('api.kpi.contributors') eq_(r['objects'][0]['en_us'], 2) eq_(r['objects'][0]['non_en_us'], 2) eq_(r['objects'][0]['support_forum'], 1) eq_(r['objects'][0]['aoa'], 1)
def test_search_suggestions_questions(self): """Verifies the view doesn't kick up an HTTP 500""" p = product(slug=u'firefox', save=True) l = QuestionLocale.objects.get(locale=settings.LANGUAGE_CODE) p.questions_locales.add(l) topic(title='Fix problems', slug='fix-problems', product=p, save=True) q = question(product=p, title=u'CupcakesQuestion cupcakes', save=True) d = document(title=u'CupcakesKB cupcakes', category=10, save=True) d.products.add(p) revision(document=d, is_approved=True, save=True) self.refresh() url = urlparams(reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) assert 'CupcakesQuestion' in response.content assert 'CupcakesKB' in response.content # Verify that archived articles and questions aren't shown... # Archive both and they shouldn't appear anymore. q.is_archived = True q.save() d.is_archived = True d.save() self.refresh() response = self.client.get(url, follow=True) eq_(200, response.status_code) assert 'CupcakesQuestion' not in response.content assert 'CupcakesKB' not in response.content
def test_locale_discussions_ignores_sticky(self): """Sticky flag is ignored in locale discussions view""" u = user(save=True) d = document(save=True) t = thread(title='Sticky Thread', is_sticky=True, document=d, save=True) t.new_post(creator=u, content='foo') t2 = thread(title='A thread with a very very long', is_sticky=False, document=d, save=True) t2.new_post(creator=u, content='bar') time.sleep(1) t2.new_post(creator=u, content='last') self.client.login(username=u.username, password='******') response = post(self.client, 'wiki.locale_discussions') eq_(200, response.status_code) doc = pq(response.content) title = doc('ol.threads li div.title a:first').text() assert title.startswith('A thread with a very very long')
def test_wiki_section(self): """Verify the wiki doc appears on the landing page.""" # If "Mozilla News" article doesn't exist, home page # should still work and omit the section. response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('#doc-content')), 0) # Create the "Mozilla News" article and verify it on home page. d = document( title='Community Hub News', slug='community-hub-news', save=True) rev = revision( document=d, content='splendid', is_approved=True, save=True) d.current_revision = rev d.save() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) community_news = doc('#doc-content') eq_(len(community_news), 1) assert 'splendid' in community_news.text()
def _create_bundle(self, prod, locale=settings.WIKI_DEFAULT_LANGUAGE): p = product(title=prod, save=True) t = topic(title='topic1', product=p, save=True) if locale == settings.WIKI_DEFAULT_LANGUAGE: parent = lambda i: None else: def parent(i): d = document(title='test {0} {1}'.format(locale, i), locale=settings.WIKI_DEFAULT_LANGUAGE, save=True) d.products.add(p) d.topics.add(t) d.save() revision(summary='test article {0}'.format(i), document=d, is_approved=True, save=True) return d for i in xrange(5): d = document(title='test {0} {1}'.format(locale, i), locale=locale, save=True) revision(summary='test article {0}'.format(i), document=d, is_approved=True, save=True) d.products.add(p) d.topics.add(t) d.parent = parent(i) d.save() build_kb_bundles((prod, ))
def test_localized_based_on(self): """Editing a localized article 'based on' an older revision of the localization is OK.""" en_r = revision(save=True) fr_d = document(parent=en_r.document, locale='fr', save=True) revision(document=fr_d, based_on=en_r, is_approved=True, save=True) fr_r = revision(document=fr_d, based_on=en_r, keywords="oui", summary="lipsum", save=True) url = reverse('wiki.new_revision_based_on', locale='fr', args=( fr_d.slug, fr_r.pk, )) response = self.client.get(url) doc = pq(response.content) input = doc('#id_based_on')[0] eq_(int(input.value), en_r.pk) eq_(doc('#id_keywords')[0].attrib['value'], 'oui') eq_(doc('#id_summary').text(), 'lipsum')
def test_watch_thread_then_reply(self, get_current): """The event fires and sends emails when watching a thread.""" get_current.return_value.domain = 'testserver' u = user(username='******', save=True) u_b = user(username='******', save=True) d = document(title='an article title', save=True) _t = thread(title='Sticky Thread', document=d, is_sticky=True, save=True) t = self._toggle_watch_thread_as(u_b.username, _t, turn_on=True) self.client.login(username=u.username, password='******') post(self.client, 'wiki.discuss.reply', {'content': 'a post'}, args=[t.document.slug, t.id]) p = Post.objects.all().order_by('-id')[0] attrs_eq(mail.outbox[0], to=[u_b.email], subject='Re: an article title - Sticky Thread') starts_with(mail.outbox[0].body, REPLY_EMAIL % (d.slug, t.id, p.id)) self._toggle_watch_thread_as(u_b.username, _t, turn_on=False)
def test_autowatch_new_thread(self, get_current): """Creating a new thread should email responses""" get_current.return_value.domain = 'testserver' d = document(save=True) u = user(save=True) self.client.login(username=u.username, password='******') s = Setting.objects.create(user=u, name='kbforums_watch_new_thread', value='False') data = {'title': 'a title', 'content': 'a post'} post(self.client, 'wiki.discuss.new_thread', data, args=[d.slug]) t1 = thread(document=d, save=True) assert not NewPostEvent.is_notifying( u, t1), ('NewPostEvent should not be notifying.') s.value = 'True' s.save() post(self.client, 'wiki.discuss.new_thread', data, args=[d.slug]) t2 = Thread.objects.all().order_by('-id')[0] assert NewPostEvent.is_notifying( u, t2), ('NewPostEvent should be notifying')
def test_watch_other_locale_then_new_thread(self, get_current): """Watching a different locale and createing a thread does not notify.""" get_current.return_value.domain = 'testserver' d = document(locale='en-US', save=True) u = user(username='******', save=True) self.client.login(username=u.username, password='******') post(self.client, 'wiki.discuss.watch_locale', {'watch': 'yes'}, locale='ja') u2 = user(save=True) self.client.login(username=u2.username, password='******') post(self.client, 'wiki.discuss.new_thread', { 'title': 'a title', 'content': 'a post' }, args=[d.slug]) # Email was not sent. eq_(0, len(mail.outbox))
def test_watch_forum_then_new_thread(self, get_current): """Watching a forum and creating a new thread should send email.""" get_current.return_value.domain = 'testserver' u = user(save=True) d = document(title='an article title', save=True) f = self._toggle_watch_kbforum_as(u.username, d, turn_on=True) u2 = user(username='******', save=True) self.client.login(username=u2.username, password='******') post(self.client, 'wiki.discuss.new_thread', { 'title': 'a title', 'content': 'a post' }, args=[f.slug]) t = Thread.objects.all().order_by('-id')[0] attrs_eq(mail.outbox[0], to=[u.email], subject=u'an article title - a title') starts_with(mail.outbox[0].body, NEW_THREAD_EMAIL % (d.slug, t.id)) self._toggle_watch_kbforum_as(u.username, d, turn_on=False)
def test_search_suggestions_questions(self): """Verifies the view doesn't kick up an HTTP 500""" p = product(slug=u'firefox', save=True) topic(title='Fix problems', slug='fix-problems', product=p, save=True) q = question(title=u'CupcakesQuestion cupcakes', save=True) q.products.add(p) d = document(title=u'CupcakesKB cupcakes', category=10, save=True) d.products.add(p) revision(document=d, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) assert 'CupcakesQuestion' in response.content assert 'CupcakesKB' in response.content
def _test_remembering_setter(self, field): old_field = 'old_' + field d = document() d.save() old = getattr(d, field) # Changing the field makes old_field spring into life: setattr(d, field, 'Foo') eq_(old, getattr(d, old_field)) # Changing it back makes old_field disappear: setattr(d, field, old) assert not hasattr(d, old_field) # Change it again once: setattr(d, field, 'Foo') # And twice: setattr(d, field, 'Bar') # And old_field should remain as it was, since it hasn't been saved # between the two changes: eq_(old, getattr(d, old_field))
def test_wiki_products(self): """Search wiki for products.""" prod_vals = ( (product(slug='b2g', save=True), 0), (product(slug='mobile', save=True), 1), (product(slug='desktop', save=True), 2), ) for prod, total in prod_vals: for i in range(total): doc = document(locale=u'en-US', category=10, save=True) doc.products.add(prod) revision(document=doc, is_approved=True, save=True) self.refresh() qs = {'a': 1, 'w': 1, 'format': 'json'} for prod, total in prod_vals: qs.update({'product': prod.slug}) response = self.client.get(reverse('search'), qs) eq_(total, json.loads(response.content)['total'])
def test_mozilla_news(self): """Verifies the Mozilla News section.""" # If "Mozilla News" article doesn't exist, home page # should still work and omit the section. r = self.client.get(reverse('home'), follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(len(doc('#mozilla-news')), 0) # Create the "Mozilla News" article and verify it on home page. d = document(title='Mozilla News', slug='mozilla-news', save=True) rev = revision(document=d, content='splendid', is_approved=True, save=True) d.current_revision = rev d.save() r = self.client.get(reverse('home'), follow=True) eq_(200, r.status_code) doc = pq(r.content) moz_news = doc('#mozilla-news') eq_(len(moz_news), 1) assert 'splendid' in moz_news.text()
def setUp(self): super(TestAnalyzers, self).setUp() self.locale_data = { 'en-US': { 'analyzer': 'snowball-english', 'content': 'I have a cat.', }, 'es': { 'analyzer': 'snowball-spanish', 'content': 'Tieno un gato.', }, 'ar': { 'analyzer': 'arabic', 'content': u'لدي اثنين من القطط', }, 'my': { 'analyzer': 'custom-burmese', 'content': u'အနုပညာ', }, 'he': { 'analyzer': 'standard', 'content': u'גאולוגיה היא אחד', } } self.docs = {} for locale, data in self.locale_data.items(): d = document(locale=locale, save=True) revision(document=d, content=data['content'], is_approved=True, save=True) self.locale_data[locale]['doc'] = d self.refresh()
def test_watch_all_then_new_post(self, get_current): """Watching document + thread + locale and reply to thread.""" get_current.return_value.domain = 'testserver' u = user(save=True) _d = document(title='an article title', save=True) d = self._toggle_watch_kbforum_as(u.username, _d, turn_on=True) t = thread(title='Sticky Thread', document=d, save=True) self._toggle_watch_thread_as(u.username, t, turn_on=True) self.client.login(username=u.username, password='******') post(self.client, 'wiki.discuss.watch_locale', {'watch': 'yes'}) # Reply as jsocol to document d. u2 = user(username='******', save=True) self.client.login(username=u2.username, password='******') post(self.client, 'wiki.discuss.reply', {'content': 'a post'}, args=[d.slug, t.id]) # Only ONE email was sent. As expected. eq_(1, len(mail.outbox)) p = Post.objects.all().order_by('-id')[0] attrs_eq(mail.outbox[0], to=[u.email], subject='Re: an article title - Sticky Thread') starts_with(mail.outbox[0].body, REPLY_EMAIL % (d.slug, t.id, p.id))
def test_majorly_outdated_with_unapproved_parents(self): """Migrations might introduce translated revisions without based_on set. Tolerate these. If based_on of a translation's current_revision is None, the translation should be considered out of date iff any major-significance, approved revision to the English article exists. """ # Create a parent doc with only an unapproved revision... parent_rev = revision() parent_rev.save() # ...and a translation with a revision based on nothing. trans = document(parent=parent_rev.document, locale='de') trans.save() trans_rev = revision(document=trans, is_approved=True) trans_rev.save() assert trans_rev.based_on is None, \ ('based_on defaulted to something non-None, which this test ' "wasn't expecting.") assert not trans.is_majorly_outdated(), \ ('A translation was considered majorly out of date even though ' 'the English document has never had an approved revision of ' 'major significance.') major_parent_rev = revision(document=parent_rev.document, significance=MAJOR_SIGNIFICANCE, is_approved=True, is_ready_for_localization=True) major_parent_rev.save() assert trans.is_majorly_outdated(), \ ('A translation was not considered majorly outdated when its ' "current revision's based_on value was None.")
def test_watch_both_then_new_post(self, get_current): """Watching both and replying to a thread should send ONE email.""" get_current.return_value.domain = 'testserver' u = user(save=True) d = document(title='an article title', save=True) f = self._toggle_watch_kbforum_as(u.username, d, turn_on=True) t = thread(title='Sticky Thread', document=d, save=True) self._toggle_watch_thread_as(u.username, t, turn_on=True) u2 = user(username='******', save=True) self.client.login(username=u2.username, password='******') post(self.client, 'wiki.discuss.reply', {'content': 'a post'}, args=[f.slug, t.id]) eq_(1, len(mail.outbox)) p = Post.objects.all().order_by('-id')[0] attrs_eq(mail.outbox[0], to=[u.email], subject='Re: an article title - Sticky Thread') starts_with(mail.outbox[0].body, REPLY_EMAIL % (d.slug, t.id, p.id)) self._toggle_watch_kbforum_as(u.username, d, turn_on=False) self._toggle_watch_thread_as(u.username, t, turn_on=False)
def test_l10n_badge(self): """Verify the L10n Badge is awarded properly.""" # Create the user and badge. year = date.today().year u = profile().user b = badge(slug=WIKI_BADGES['l10n-badge']['slug'].format(year=year), title=WIKI_BADGES['l10n-badge']['title'].format(year=year), description=WIKI_BADGES['l10n-badge']['description'].format( year=year), save=True) # Create 9 approved es revisions. d = document(locale='es', save=True) for i in range(9): revision(creator=u, document=d, is_approved=True, save=True) # User should NOT have the badge yet assert not b.is_awarded_to(u) # Create 1 more approved es revision. revision(creator=u, document=d, is_approved=True, save=True) # User should have the badge now assert b.is_awarded_to(u)
def test_watch_locale_then_new_thread(self, get_current): """Watching locale and create a thread.""" get_current.return_value.domain = 'testserver' d = document(title='an article title', locale='en-US', save=True) u = user(username='******', save=True) self.client.login(username=u.username, password='******') post(self.client, 'wiki.discuss.watch_locale', {'watch': 'yes'}) u2 = user(username='******', save=True) self.client.login(username=u2.username, password='******') post(self.client, 'wiki.discuss.new_thread', { 'title': 'a title', 'content': 'a post' }, args=[d.slug]) # Email was sent as expected. t = Thread.objects.all().order_by('-id')[0] attrs_eq(mail.outbox[0], to=[u.email], subject=u'an article title - a title') starts_with(mail.outbox[0].body, NEW_THREAD_EMAIL % (d.slug, t.id))
def test_lock_helpers_translation(self): doc_en = document(save=True) doc_de = document(parent=doc_en, locale='de', save=True) self._test_lock_helpers(doc_de)
def test_unapproved_template(self): document(title='Template:new').save() p = WikiParser() doc = pq(p.parse('[[T:new]]')) eq_('The template "new" does not exist or has no approved revision.', doc.text())
def test_lock_helpers_doc(self): doc = document(save=True) self._test_lock_helpers(doc)
def setUp(self): super(JsonViewTests, self).setUp() d = document(title='an article title', slug='article-title', save=True) revision(document=d, is_approved=True, save=True)