def test_locale_filter(self): """Test filtering results by locale.""" today = date.today() # Create 3 wikimetrics for es: for i in range(3): WikiMetricFactory(locale='es', date=today - timedelta(days=i)) # Create 1 for fr: WikiMetricFactory(locale='fr') # Call and verify the API for locale=es. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', locale='es')) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(3, len(results)) # Call and verify the API for locale=fr. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', locale='fr')) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(1, len(results))
def test_product_filter(self): """Test filtering results by product.""" today = date.today() # Create products and associated wiki metrics. p1 = ProductFactory() p2 = ProductFactory() # Create 3 for each product: for i in range(3): for p in [p1, p2]: WikiMetricFactory(date=today - timedelta(days=i), product=p) # Create one more for p2. WikiMetricFactory(date=today - timedelta(days=4), product=p2) # Call and verify the API for product=p1. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', product=p1.slug)) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(3, len(results)) # Call and verify the API for product=p1. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', product=p2.slug)) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(4, len(results))
def test_code_filter(self): """Test filtering results by code.""" today = date.today() # Create 3 wikimetrics for active_contributors: for i in range(3): WikiMetricFactory(code=METRIC_CODE_CHOICES[0][0], date=today - timedelta(days=i)) # Create 1 for percent_localized_all: WikiMetricFactory(code=METRIC_CODE_CHOICES[1][0]) # Call and verify the API for code=METRIC_CODE_CHOICES[0]. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', code=METRIC_CODE_CHOICES[0][0])) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(3, len(results)) # Call and verify the API for code=METRIC_CODE_CHOICES[1]. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', code=METRIC_CODE_CHOICES[1][0])) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(1, len(results))
def test_no_update_tagging(self): url = urlparams(reverse('questions.add_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'tag-name': 'foo' }) url = urlparams(reverse('questions.remove_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'remove-tag-foo': 1 })
def test_post_absolute_url(self): t = ThreadFactory(posts=[]) # Fill out the first page with posts from yesterday. p1 = PostFactory(thread=t, created=YESTERDAY) PostFactory.create_batch(POSTS_PER_PAGE - 1, created=YESTERDAY, thread=t) # Second page post from today. p2 = PostFactory(thread=t) url = reverse("forums.posts", kwargs={"forum_slug": p1.thread.forum.slug, "thread_id": p1.thread.id}) eq_(urlparams(url, hash="post-%s" % p1.id), p1.get_absolute_url()) url = reverse("forums.posts", kwargs={"forum_slug": p2.thread.forum.slug, "thread_id": p2.thread.id}) exp_ = urlparams(url, hash="post-%s" % p2.id, page=2) eq_(exp_, p2.get_absolute_url())
def test_default(self): """Test the default API call (no filtering).""" today = date.today() # Create 10 wikimetrics. for i in range(10): WikiMetricFactory( code=METRIC_CODE_CHOICES[i % len(METRIC_CODE_CHOICES)][0], date=today - timedelta(days=i), value=i) # Call the API. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json')) eq_(200, response.status_code) results = json.loads(response.content)['results'] # Verify the results are what we created. eq_(10, len(results)) for i in range(10): result = results[i] eq_(i, result['value']) eq_(METRIC_CODE_CHOICES[i % len(METRIC_CODE_CHOICES)][0], result['code']) eq_(str(today - timedelta(days=i)), result['date'])
def test_known_signature(self): sig = SignatureFactory() url = urlparams(reverse('postcrash.api'), s=sig.signature) response = self.client.get(url) eq_(200, response.status_code) eq_('https://example.com/kb/%s' % sig.document.slug, response.content) eq_('text/plain', response['content-type'])
def test_ratelimit(self): """Make sure posting new questions is ratelimited""" data = {'title': 'A test question', 'content': 'I have this question that I hope...', 'sites_affected': 'http://example.com', 'ff_version': '3.6.6', 'os': 'Intel Mac OS X 10.6', 'plugins': '* Shockwave Flash 10.1 r53', 'useragent': 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X ' '10.6; en-US; rv:1.9.2.6) Gecko/20100625 ' 'Firefox/3.6.6'} p = ProductFactory(slug='firefox') l = QuestionLocale.objects.get(locale=settings.LANGUAGE_CODE) p.questions_locales.add(l) TopicFactory(slug='fix-problems', product=p) url = urlparams( reverse('questions.aaq_step5', args=['desktop', 'fix-problems']), search='A test question') u = UserFactory() self.client.login(username=u.username, password='******') for i in range(0, 5): self.client.post(url, data, follow=True) response = self.client.post(url, data, follow=True) eq_(403, response.status_code)
def _get_api_result(self, name, **kwargs): """Helper to make API calls, parse the json and return the result.""" url = reverse(name) url = urlparams(url, format='json', **kwargs) response = self.client.get(url) eq_(200, response.status_code) return json.loads(response.content)
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url_ = self.thread.get_absolute_url() return urlparams(url_, hash='post-%s' % self.id, **query)
def login(request, template): """Try to log the user in.""" if request.method == 'GET' and not request.MOBILE: url = reverse('users.auth') + '?' + request.GET.urlencode() return HttpResponsePermanentRedirect(url) next_url = get_next_url(request) or reverse('home') only_active = request.POST.get('inactive', '0') != '1' form = handle_login(request, only_active=only_active) if request.user.is_authenticated(): # Add a parameter so we know the user just logged in. # fpa = "first page authed" or something. next_url = urlparams(next_url, fpa=1) res = HttpResponseRedirect(next_url) max_age = (None if settings.SESSION_EXPIRE_AT_BROWSER_CLOSE else settings.SESSION_COOKIE_AGE) res.set_cookie(settings.SESSION_EXISTS_COOKIE, '1', secure=False, max_age=max_age) return res if request.MOBILE: return render(request, template, { 'form': form, 'next_url': next_url}) return user_auth(request, login_form=form)
def _get_wiki_link(title, locale): """Checks the page exists, and returns its URL or the URL to create it. Return value is a dict: {'found': boolean, 'url': string}. found is False if the document does not exist. """ # Prevent circular import. sumo is conceptually a utils apps and # shouldn't have import-time (or really, any, but that's not going # to happen) dependencies on client apps. from kitsune.wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: # If the article redirects use its destination article while d.redirect_document(): d = d.redirect_document() # The locale in the link urls should always match the current # document's locale even if the document/slug being linked to # is in the default locale. url = reverse('wiki.document', locale=locale, args=[d.slug]) return {'found': True, 'url': url, 'text': d.title} # To avoid circular imports, wiki.models imports wiki_to_html from kitsune.sumo.templatetags.jinja_helpers import urlparams return {'found': False, 'text': title, 'url': urlparams(reverse('wiki.new_document', locale=locale), title=title)}
def get_last_post_url(self): query = {'last': self.last_post_id} page = self.last_page if page > 1: query['page'] = page url = reverse('forums.posts', args=[self.forum.slug, self.id]) return urlparams(url, hash='post-%s' % self.last_post_id, **query)
def test_question_feed_with_product_and_topic(self): """Test that questions feeds with products and topics work.""" p = ProductFactory() t = TopicFactory(product=p) url = urlparams(reverse('questions.list', args=[p.slug]), topic=t.slug) res = self.client.get(url) eq_(200, res.status_code) doc = pq(res.content) feed_links = doc('link[type="application/atom+xml"]') feed = feed_links[0] eq_(1, len(feed_links)) eq_('Recently updated questions', feed.attrib['title']) eq_(urlparams('/en-US/questions/feed', product=p.slug, topic=t.slug), feed.attrib['href']) eq_(200, self.client.get(feed.attrib['href']).status_code)
def test_gallery_image_search(self): """Test for ajax endpoint with search parameter.""" img = ImageFactory() url = urlparams(reverse('gallery.async'), type='image', q='foobar') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) imgs = doc('#media-list li img') eq_(0, len(imgs)) url = urlparams(reverse('gallery.async'), type='image', q=img.title) response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) imgs = doc('#media-list li img') eq_(1, len(imgs)) eq_(img.thumbnail_url_if_set(), imgs[0].attrib['src'])
def test_post_absolute_url(self): t = ThreadFactory() p = t.new_post(creator=t.creator, content="foo") url_ = reverse( "wiki.discuss.posts", locale=p.thread.document.locale, args=[p.thread.document.slug, p.thread.id] ) exp_ = urlparams(url_, hash="post-%s" % p.id) eq_(exp_, p.get_absolute_url())
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url = reverse('questions.details', kwargs={'question_id': self.question_id}) return urlparams(url, hash='answer-%s' % self.id, **query)
def sub_test(locale, *titles): url = urlparams(reverse( 'questions.list', args=['all'], locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc('section[id^=question]')), len(titles), 'Wrong number of results for {0}'.format(locale)) for substr in titles: assert substr in doc('.questions section .content h2 a').text()
def get_absolute_url(self): query = {} if self.page > 1: query['page'] = self.page url_ = reverse('wiki.discuss.posts', locale=self.thread.document.locale, kwargs={'document_slug': self.thread.document.slug, 'thread_id': self.thread.id}) return urlparams(url_, hash='post-%s' % self.id, **query)
def test_join_contributors(self): next = reverse('groups.list') url = reverse('groups.join_contributors', locale='en-US') url = urlparams(url, next=next) r = self.client.get(url) eq_(405, r.status_code) r = self.client.post(url) eq_(302, r.status_code) eq_('http://testserver%s' % next, r['location']) assert self.user.groups.filter(name='Contributors').exists()
def test_recent_threads(self): """Verify the Community Discussions section.""" ThreadFactory(forum__slug="contributors", title="we are SUMO!!!!!!") self.refresh() response = self.client.get(urlparams(reverse("community.home"))) eq_(response.status_code, 200) doc = pq(response.content) eq_(1, len(doc("#recent-threads"))) assert "we are SUMO!" in doc("#recent-threads td").html()
def test_wiki_section(self): """Verify the wiki doc appears on the landing page.""" # If "Mozilla News" article doesn't exist, home page # should still work and omit the section. response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('#doc-content')), 0) # Create the "Mozilla News" article and verify it on home page. d = DocumentFactory(title='Community Hub News', slug='community-hub-news') rev = ApprovedRevisionFactory(document=d, content='splendid') d.current_revision = rev d.save() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) community_news = doc('#doc-content') eq_(len(community_news), 1) assert 'splendid' in community_news.text()
def new_thread(request, forum_slug): """Start a new thread.""" forum = get_object_or_404(Forum, slug=forum_slug) user = request.user if not forum.allows_posting_by(user): if forum.allows_viewing_by(user): raise PermissionDenied else: raise Http404 if request.method == "GET": form = NewThreadForm() return render(request, "forums/new_thread.html", { "form": form, "forum": forum }) form = NewThreadForm(request.POST) post_preview = None if form.is_valid(): if "preview" in request.POST: thread = Thread(creator=request.user, title=form.cleaned_data["title"]) post_preview = Post(thread=thread, author=request.user, content=form.cleaned_data["content"]) post_preview.author_post_count = post_preview.author.post_set.count( ) elif not is_ratelimited(request, "forum-post", "5/d"): thread = forum.thread_set.create(creator=request.user, title=form.cleaned_data["title"]) thread.save() post = thread.new_post(author=request.user, content=form.cleaned_data["content"]) post.save() NewThreadEvent(post).fire(exclude=post.author) # Add notification automatically if needed. if Setting.get_for_user(request.user, "forums_watch_new_thread"): NewPostEvent.notify(request.user, thread) url = reverse("forums.posts", args=[forum_slug, thread.id]) return HttpResponseRedirect(urlparams(url, last=post.id)) return render( request, "forums/new_thread.html", { "form": form, "forum": forum, "post_preview": post_preview }, )
def sub_test(locale, *titles): url = urlparams(reverse('questions.aaq_step4', args=['desktop', 'fix-problems'], locale=locale), search='question') response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc('.result.question')), len(titles), 'Wrong number of results for {0}'.format(locale)) for substr in titles: assert substr in doc('.result.question h3 a').text()
def test_recent_threads(self): """Verify the Community Discussions section.""" ThreadFactory(forum__slug='contributors', title='we are SUMO!!!!!!') self.refresh() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(1, len(doc('#recent-threads'))) assert 'we are SUMO!' in doc('#recent-threads li').html()
def sub_test(locale, *titles): url = urlparams(reverse("questions.list", args=["all"], locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg( len(doc("article[id^=question]")), len(titles), "Wrong number of results for {0}".format(locale), ) for substr in titles: assert substr in doc(".forum--question-item-heading a").text()
def private_message(user): """Return a link to private message the user.""" # return an empty element - can match the :empty pseudo selector if not user.is_active: return Markup("<div></div>") url = urlparams(reverse("messages.new"), to=user.username) msg = _("Private message") return Markup( '<p class="pm"><a class="sumo-button primary-button button-lg" href="{url}">{msg}</a></p>' .format( # noqa url=url, msg=msg))
def _new_question(self, post_it=False): """Post a new question and return the response.""" p = ProductFactory(slug='mobile') l = QuestionLocale.objects.get(locale=settings.LANGUAGE_CODE) p.questions_locales.add(l) t = TopicFactory(slug='fix-problems', product=p) url = urlparams(reverse('questions.aaq_step5', args=[p.slug, t.slug]), search='A test question') if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def sub_test(locale, *titles): url = urlparams(reverse('questions.aaq_step4', args=['desktop', 'fix-problems'], locale=locale), search='question') response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc('.result.question')), len(titles), 'Wrong number of results for {0}'.format(locale)) for substr in titles: assert substr in doc('.result.question h3 a').text()
def test_wiki_section(self): """Verify the wiki doc appears on the landing page.""" # If "Mozilla News" article doesn't exist, home page # should still work and omit the section. response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('#doc-content')), 0) # Create the "Mozilla News" article and verify it on home page. d = DocumentFactory(title='Community Hub News', slug='community-hub-news') rev = ApprovedRevisionFactory(document=d, content='splendid') d.current_revision = rev d.save() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) community_news = doc('#doc-content') eq_(len(community_news), 1) assert 'splendid' in community_news.text()
def test_recent_threads(self): """Verify the Community Discussions section.""" ThreadFactory(forum__slug='contributors', title='we are SUMO!!!!!!') self.refresh() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(1, len(doc('#recent-threads'))) assert 'we are SUMO!' in doc('#recent-threads td').html()
def test_results(self): UserFactory(username='******', profile__name='Foo Bar') UserFactory(username='******', profile__name='Bar Bam') self.refresh() # Searching for "bam" should return 1 user. response = self.client.get( urlparams(reverse('community.search'), q='bam')) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('.results-user')), 1) # Searching for "bar" should return both users. response = self.client.get( urlparams(reverse('community.search'), q='bar')) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('.results-user')), 2)
def _new_question(self, post_it=False): """Post a new question and return the response.""" p = ProductFactory(slug='mobile') l = QuestionLocale.objects.get(locale=settings.LANGUAGE_CODE) p.questions_locales.add(l) t = TopicFactory(slug='fix-problems', product=p) url = urlparams( reverse('questions.aaq_step5', args=[p.slug, t.slug]), search='A test question') if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def test_results(self): UserFactory(username='******', profile__name='Foo Bar') UserFactory(username='******', profile__name='Bar Bam') self.refresh() # Searching for "bam" should return 1 user. response = self.client.get( urlparams(reverse('community.search'), q='bam')) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('.results-user')), 1) # Searching for "bar" should return both users. response = self.client.get( urlparams(reverse('community.search'), q='bar')) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('.results-user')), 2)
def link(self, query): slugs = {} if 'product' in query: slugs['product'] = query['product'].slug if 'topic' in query: slugs['topic'] = query['topic'].slug url = reverse('questions.list', args=[slugs.get('product', 'all')], locale=query.get('locale')) return urlparams(url, **slugs)
def get_absolute_url(self): query = {} if self.page > 1: query['page'] = self.page url_ = reverse('wiki.discuss.posts', locale=self.thread.document.locale, kwargs={ 'document_slug': self.thread.document.slug, 'thread_id': self.thread.id }) return urlparams(url_, hash='post-%s' % self.id, **query)
def test_results(self): UserFactory(username="******", profile__name="Foo Bar") UserFactory(username="******", profile__name="Bar Bam") self.refresh() # Searching for "bam" should return 1 user. response = self.client.get( urlparams(reverse("community.search"), q="bam")) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc(".results-user")), 1) # Searching for "bar" should return both users. response = self.client.get( urlparams(reverse("community.search"), q="bar")) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc(".results-user")), 2)
def process_request(self, request): try: urlname = resolve(request.path_info).url_name except Resolver404: urlname = None if settings.OIDC_ENABLE and urlname in settings.OIDC_EXEMPT_URLS: translation.activate(settings.LANGUAGE_CODE) return prefixer = Prefixer(request) set_url_prefixer(prefixer) full_path = prefixer.fix(prefixer.shortened_path) if request.GET.get('lang', '') in settings.SUMO_LANGUAGES: # Blank out the locale so that we can set a new one. Remove lang # from the query params so we don't have an infinite loop. prefixer.locale = '' new_path = prefixer.fix(prefixer.shortened_path) query = dict((smart_str(k), v) for k, v in request.GET.iteritems() if k != 'lang') # 'lang' is only used on the language selection page. If this is # present it is safe to set language preference for the current # user. if request.user.is_anonymous(): cookie = settings.LANGUAGE_COOKIE_NAME request.session[cookie] = request.GET['lang'] return HttpResponseRedirect(urlparams(new_path, **query)) if full_path != request.path: query_string = request.META.get('QUERY_STRING', '') full_path = urllib.quote(full_path.encode('utf-8')) if query_string: full_path = '%s?%s' % (full_path, query_string) response = HttpResponseRedirect(full_path) # Vary on Accept-Language if we changed the locale old_locale = prefixer.locale new_locale, _ = split_path(full_path) if old_locale != new_locale: response['Vary'] = 'Accept-Language' return response request.path_info = '/' + prefixer.shortened_path request.LANGUAGE_CODE = prefixer.locale translation.activate(prefixer.locale)
def extract_document(cls, obj_id, obj=None): """Extracts indexable attributes from an Answer.""" fields = ['id', 'created', 'creator_id', 'question_id'] composed_fields = [ 'question__locale', 'question__solution_id', 'question__creator_id', 'question__product_id'] all_fields = fields + composed_fields if obj is None: model = cls.get_model() obj_dict = model.objects.values(*all_fields).get(pk=obj_id) else: obj_dict = dict([(field, getattr(obj, field)) for field in fields]) obj_dict['question__locale'] = obj.question.locale obj_dict['question__solution_id'] = obj.question.solution_id obj_dict['question__creator_id'] = obj.question.creator_id obj_dict['question__product_id'] = obj.question.product_id d = {} d['id'] = obj_dict['id'] d['model'] = cls.get_mapping_type_name() # We do this because get_absolute_url is an instance method # and we don't want to create an instance because it's a DB # hit and expensive. So we do it by hand. get_absolute_url # doesn't change much, so this is probably ok. url = reverse('questions.details', kwargs={'question_id': obj_dict['question_id']}) d['url'] = urlparams(url, hash='answer-%s' % obj_dict['id']) d['indexed_on'] = int(time.time()) d['created'] = obj_dict['created'] d['locale'] = obj_dict['question__locale'] d['is_solution'] = ( obj_dict['id'] == obj_dict['question__solution_id']) d['creator_id'] = obj_dict['creator_id'] d['by_asker'] = ( obj_dict['creator_id'] == obj_dict['question__creator_id']) products = Product.objects.filter(id=obj_dict['question__product_id']) d['product'] = [p.slug for p in products] related_votes = AnswerVote.objects.filter(answer_id=obj_dict['id']) d['helpful_count'] = related_votes.filter(helpful=True).count() d['unhelpful_count'] = related_votes.filter(helpful=False).count() return d
def extract_document(cls, obj_id, obj=None): """Extracts indexable attributes from an Answer.""" fields = ['id', 'created', 'creator_id', 'question_id'] composed_fields = [ 'question__locale', 'question__solution_id', 'question__creator_id', 'question__product_id'] all_fields = fields + composed_fields if obj is None: model = cls.get_model() obj_dict = model.objects.values(*all_fields).get(pk=obj_id) else: obj_dict = dict([(field, getattr(obj, field)) for field in fields]) obj_dict['question__locale'] = obj.question.locale obj_dict['question__solution_id'] = obj.question.solution_id obj_dict['question__creator_id'] = obj.question.creator_id obj_dict['question__product_id'] = obj.question.product_id d = {} d['id'] = obj_dict['id'] d['model'] = cls.get_mapping_type_name() # We do this because get_absolute_url is an instance method # and we don't want to create an instance because it's a DB # hit and expensive. So we do it by hand. get_absolute_url # doesn't change much, so this is probably ok. url = reverse('questions.details', kwargs={'question_id': obj_dict['question_id']}) d['url'] = urlparams(url, hash='answer-%s' % obj_dict['id']) d['indexed_on'] = int(time.time()) d['created'] = obj_dict['created'] d['locale'] = obj_dict['question__locale'] d['is_solution'] = ( obj_dict['id'] == obj_dict['question__solution_id']) d['creator_id'] = obj_dict['creator_id'] d['by_asker'] = ( obj_dict['creator_id'] == obj_dict['question__creator_id']) products = Product.objects.filter(id=obj_dict['question__product_id']) d['product'] = [p.slug for p in products] related_votes = AnswerVote.objects.filter(answer_id=obj_dict['id']) d['helpful_count'] = related_votes.filter(helpful=True).count() d['unhelpful_count'] = related_votes.filter(helpful=False).count() return d
def test_no_xss(self): bad_string = 'locale=en-US8fa4a</script><script>alert(1)</script>' good_string = 'locale=en-US8fa4a<\/script><script>alert(1)<\/script>' url = reverse('community.top_contributors_new', args=['l10n']) url = urlparams(url, locale=bad_string) res = self.client.get(url) eq_(res.status_code, 200) doc = pq(res.content) target = doc('script[name="contributor-data"]') assert bad_string not in target.html() assert good_string in target.html()
def test_no_xss(self): bad_string = 'locale=en-US8fa4a</script><script>alert(1)</script>' good_string = 'locale=en-US8fa4a<\/script><script>alert(1)<\/script>' url = reverse('community.top_contributors_new', args=['l10n']) url = urlparams(url, locale=bad_string) res = self.client.get(url) eq_(res.status_code, 200) doc = pq(res.content) target = doc('script[name="contributor-data"]') assert bad_string not in target.html() assert good_string in target.html()
def test_top_army_of_awesome(self): r1 = ReplyFactory() r2 = ReplyFactory() self.refresh() response = self.client.get(urlparams( reverse('community.top_contributors', args=['army-of-awesome']))) eq_(200, response.status_code) doc = pq(response.content) eq_(2, len(doc('li.results-user'))) assert str(r1.user.username) in response.content assert str(r2.user.username) in response.content
def test_top_questions(self): a1 = AnswerFactory() a2 = AnswerFactory() self.refresh() response = self.client.get(urlparams( reverse('community.top_contributors', args=['questions']))) eq_(200, response.status_code) doc = pq(response.content) eq_(2, len(doc('li.results-user'))) assert str(a1.creator.username) in response.content assert str(a2.creator.username) in response.content
def link(self, query): slugs = {} if "product" in query: slugs["product"] = query["product"].slug if "topic" in query: slugs["topic"] = query["topic"].slug url = reverse("questions.list", args=[slugs.get("product", "all")], locale=query.get("locale")) return urlparams(url, **slugs)
def test_top_l10n(self): d = DocumentFactory(locale="es") r1 = RevisionFactory(document=d) r2 = RevisionFactory(document=d) self.refresh() response = self.client.get(urlparams(reverse("community.top_contributors", args=["l10n"]))) eq_(200, response.status_code) doc = pq(response.content) eq_(2, len(doc("li.results-user"))) assert str(r1.creator.username) in response.content assert str(r2.creator.username) in response.content
def test_top_kb(self): d = DocumentFactory(locale='en-US') r1 = RevisionFactory(document=d) r2 = RevisionFactory(document=d) self.refresh() response = self.client.get(urlparams(reverse('community.top_contributors', args=['kb']))) eq_(200, response.status_code) doc = pq(response.content) eq_(2, len(doc('li.results-user'))) assert str(r1.creator.username) in response.content assert str(r2.creator.username) in response.content
def test_top_kb(self): d = DocumentFactory(locale='en-US') r1 = RevisionFactory(document=d) r2 = RevisionFactory(document=d) self.refresh() response = self.client.get(urlparams(reverse('community.top_contributors', args=['kb']))) eq_(200, response.status_code) doc = pq(response.content) eq_(2, len(doc('li.results-user'))) assert str(r1.creator.username) in response.content assert str(r2.creator.username) in response.content
def link(self, query): slugs = {} if 'product' in query: slugs['product'] = query['product'].slug if 'topic' in query: slugs['topic'] = query['topic'].slug url = reverse('questions.list', args=[slugs.get('product', 'all')], locale=query.get('locale')) return urlparams(url, **slugs)
def test_top_questions(self): a1 = AnswerFactory() a2 = AnswerFactory() self.refresh() response = self.client.get(urlparams( reverse('community.top_contributors', args=['questions']))) eq_(200, response.status_code) doc = pq(response.content) eq_(2, len(doc('li.results-user'))) assert str(a1.creator.username) in response.content assert str(a2.creator.username) in response.content
def get_absolute_url(self, locale, product=None): if self.slug in L10N_READOUTS: url = reverse('dashboards.localization_detail', kwargs={'readout_slug': self.slug}, locale=locale) elif self.slug in CONTRIBUTOR_READOUTS: url = reverse('dashboards.contributors_detail', kwargs={'readout_slug': self.slug}, locale=locale) else: raise KeyError('This Readout was not found: %s' % self.slug) if product: return urlparams(url, product=product.slug) return url
def test_question_feed_with_locale(self): """Test that questions feeds with products and topics work.""" url = reverse('questions.list', args=['all'], locale='pt-BR') res = self.client.get(url) eq_(200, res.status_code) doc = pq(res.content) feed_links = doc('link[type="application/atom+xml"]') feed = feed_links[0] eq_(1, len(feed_links)) eq_('Recently updated questions', feed.attrib['title']) eq_(urlparams('/pt-BR/questions/feed?product=all'), feed.attrib['href']) eq_(200, self.client.get(feed.attrib['href']).status_code)
def get_absolute_url(self): query = {} if self.page > 1: query["page"] = self.page url_ = reverse( "wiki.discuss.posts", locale=self.thread.document.locale, kwargs={ "document_slug": self.thread.document.slug, "thread_id": self.thread.id, }, ) return urlparams(url_, hash="post-%s" % self.id, **query)
def test_question_feed_with_locale(self): """Test that questions feeds with products and topics work.""" url = reverse("questions.list", args=["all"], locale="pt-BR") res = self.client.get(url) eq_(200, res.status_code) doc = pq(res.content) feed_links = doc('link[type="application/atom+xml"]') feed = feed_links[0] eq_(1, len(feed_links)) eq_("Recently updated questions", feed.attrib["title"]) eq_(urlparams("/pt-BR/questions/feed?product=all"), feed.attrib["href"]) eq_(200, self.client.get(feed.attrib["href"]).status_code)
def test_tagged_feed_link(self): """Make sure the tagged feed is discoverable on the questions page.""" TagFactory(name="green", slug="green") url = urlparams(reverse("questions.list", args=["all"]), tagged="green") response = self.client.get(url) eq_(200, response.status_code) doc = pq(response.content) feed_links = doc('link[type="application/atom+xml"]') eq_(2, len(feed_links)) eq_("Recently updated questions", feed_links[0].attrib["title"]) eq_("/en-US/questions/feed?product=all", feed_links[0].attrib["href"]) eq_("Recently updated questions tagged green", feed_links[1].attrib["title"]) eq_("/en-US/questions/tagged/green/feed", feed_links[1].attrib["href"])
def test_tagged_feed_link(self): """Make sure the tagged feed is discoverable on the questions page.""" TagFactory(name='green', slug='green') url = urlparams(reverse('questions.list', args=['all']), tagged='green') response = self.client.get(url) eq_(200, response.status_code) doc = pq(response.content) feed_links = doc('link[type="application/atom+xml"]') eq_(2, len(feed_links)) eq_('Recently updated questions', feed_links[0].attrib['title']) eq_('/en-US/questions/feed?product=all', feed_links[0].attrib['href']) eq_('Recently updated questions tagged green', feed_links[1].attrib['title']) eq_('/en-US/questions/tagged/green/feed', feed_links[1].attrib['href'])
def login(request): """ This views is used as a wrapper for user_auth to login users with Firefox Accounts. """ if request.method == "GET": url = reverse("users.auth") + "?" + request.GET.urlencode() return HttpResponsePermanentRedirect(url) if request.user.is_authenticated: # We re-direct to the profile screen user_profile_url = urlparams( reverse("users.profile", args=[request.user.username]), fpa=1, ) return HttpResponseRedirect(user_profile_url) return user_auth(request)
def sub_test(locale, *titles): url = urlparams( reverse( "questions.aaq_step4", args=["desktop", "fix-problems"], locale=locale, ), search="question", ) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg( len(doc(".result.question")), len(titles), "Wrong number of results for {0}".format(locale), ) for substr in titles: assert substr in doc(".result.question h3 a").text()
def test_top_contributors(self): """Verify the top contributors appear.""" # FIXME: Change this to batch creation RevisionFactory(document__locale="en-US") d = DocumentFactory(locale="es") RevisionFactory(document=d) RevisionFactory(document=d) AnswerFactory() AnswerFactory() AnswerFactory() self.refresh() response = self.client.get(urlparams(reverse("community.home"))) eq_(response.status_code, 200) doc = pq(response.content) eq_(1, len(doc("ul.kb > li"))) eq_(2, len(doc("ul.l10n > li"))) eq_(3, len(doc("ul.questions > li")))
def test_login_next_parameter(self): '''Test with a valid ?next=url parameter.''' next = '/kb/new' # Verify that next parameter is set in form hidden field. response = self.client.get(urlparams(reverse('users.login'), next=next), follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('#login input[name="next"]')[0].attrib['value']) # Verify that it gets used on form POST. response = self.client.post(reverse('users.login'), { 'username': self.u.username, 'password': '******', 'next': next }) eq_(302, response.status_code) eq_(next + '?fpa=1', response['location'])