def test_locale_filter(self): """Test filtering results by locale.""" today = date.today() # Create 3 wikimetrics for es: for i in range(3): wikimetric( locale='es', date=today - timedelta(days=i), save=True) # Create 1 for fr: wikimetric(locale='fr', save=True) # Call and verify the API for locale=es. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', locale='es')) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(3, len(results)) # Call and verify the API for locale=fr. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', locale='fr')) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(1, len(results))
def test_post_absolute_url(self): t = ThreadFactory(posts=[]) # Fill out the first page with posts from yesterday. p1 = PostFactory(thread=t, created=YESTERDAY) PostFactory.create_batch(POSTS_PER_PAGE - 1, created=YESTERDAY, thread=t) # Second page post from today. p2 = PostFactory(thread=t) url = reverse('forums.posts', kwargs={ 'forum_slug': p1.thread.forum.slug, 'thread_id': p1.thread.id }) eq_(urlparams(url, hash='post-%s' % p1.id), p1.get_absolute_url()) url = reverse('forums.posts', kwargs={ 'forum_slug': p2.thread.forum.slug, 'thread_id': p2.thread.id }) exp_ = urlparams(url, hash='post-%s' % p2.id, page=2) eq_(exp_, p2.get_absolute_url())
def test_product_filter(self): """Test filtering results by product.""" today = date.today() # Create products and associated wiki metrics. p1 = ProductFactory() p2 = ProductFactory() # Create 3 for each product: for i in range(3): for p in [p1, p2]: WikiMetricFactory(date=today - timedelta(days=i), product=p) # Create one more for p2. WikiMetricFactory(date=today - timedelta(days=4), product=p2) # Call and verify the API for product=p1. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', product=p1.slug)) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(3, len(results)) # Call and verify the API for product=p1. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', product=p2.slug)) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(4, len(results))
def test_wiki_section(self): """Verify the wiki doc appears on the landing page.""" # If "Mozilla News" article doesn't exist, home page # should still work and omit the section. response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(len(doc('#doc-content')), 0) # Create the "Mozilla News" article and verify it on home page. d = document(title='Community Hub News', slug='community-hub-news', save=True) rev = revision(document=d, content='splendid', is_approved=True, save=True) d.current_revision = rev d.save() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) community_news = doc('#doc-content') eq_(len(community_news), 1) assert 'splendid' in community_news.text()
def test_code_filter(self): """Test filtering results by code.""" today = date.today() # Create 3 wikimetrics for active_contributors: for i in range(3): wikimetric( code=METRIC_CODE_CHOICES[0][0], date=today - timedelta(days=i), save=True) # Create 1 for percent_localized_all: wikimetric(code=METRIC_CODE_CHOICES[1][0], save=True) # Call and verify the API for code=METRIC_CODE_CHOICES[0]. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', code=METRIC_CODE_CHOICES[0][0])) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(3, len(results)) # Call and verify the API for code=METRIC_CODE_CHOICES[1]. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json', code=METRIC_CODE_CHOICES[1][0])) eq_(200, response.status_code) results = json.loads(response.content)['results'] eq_(1, len(results))
def _mails(self, users_and_watches): """Send one kind of mail to the asker and another to other watchers.""" # Cache answer.question, similar to caching solution.question below. self.answer.question = self.instance asker_id = self.answer.question.creator.id c = { 'answer': self.answer.content, 'answer_html': self.answer.content_parsed, 'answerer': self.answer.creator.username, 'question_title': self.instance.title, 'host': Site.objects.get_current().domain } @email_utils.safe_translation def _make_mail(locale, user, context): is_asker = asker_id == user.id if is_asker: subject = _( u'%s posted an answer to your question "%s"' % (self.answer.creator.username, self.instance.title)) text_template = 'questions/email/new_answer_to_asker.ltxt' html_template = 'questions/email/new_answer_to_asker.html' else: subject = _(u'%s commented on a Firefox question ' "you're watching" % self.answer.creator.username) text_template = 'questions/email/new_answer.ltxt' html_template = 'questions/email/new_answer.html' mail = email_utils.make_mail( subject=subject, text_template=text_template, html_template=html_template, context_vars=context, from_email=settings.TIDINGS_FROM_ADDRESS, to_email=user.email) return mail for u, w in users_and_watches: auth_str = get_auth_str(self.answer.question.creator) answer_url = self.answer.get_absolute_url() helpful_url = self.answer.get_helpful_answer_url() solution_url = self.answer.get_solution_url(watch=w[0]) c['answer_url'] = urlparams(answer_url, auth=auth_str) c['helpful_url'] = urlparams(helpful_url, auth=auth_str) c['solution_url'] = urlparams(solution_url, auth=auth_str) c['username'] = u.username c['watch'] = w[0] # TODO: Expose all watches. # u here can be a Django User model or a Tidings EmailUser # model. In the case of the latter, there is no associated # profile, so we set the locale to en-US. if hasattr(u, 'profile'): locale = u.profile.locale else: locale = 'en-US' yield _make_mail(locale, u, c)
def _mails(self, users_and_watches): """Send one kind of mail to the asker and another to other watchers.""" # Cache answer.question, similar to caching solution.question below. self.answer.question = self.instance asker_id = self.answer.question.creator.id c = {'answer': self.answer.content, 'answer_html': self.answer.content_parsed, 'answerer': self.answer.creator, 'created': self.answer.created, 'question_title': self.instance.title, 'host': Site.objects.get_current().domain} @email_utils.safe_translation def _make_mail(locale, user, context): is_asker = asker_id == user.id if is_asker: subject = _(u'%s posted an answer to your question "%s"' % (self.answer.creator.username, self.instance.title)) text_template = 'questions/email/new_answer_to_asker.ltxt' html_template = 'questions/email/new_answer_to_asker.html' else: subject = _(u'%s commented on a Firefox question ' "you're watching" % self.answer.creator.username) text_template = 'questions/email/new_answer.ltxt' html_template = 'questions/email/new_answer.html' mail = email_utils.make_mail( subject=subject, text_template=text_template, html_template=html_template, context_vars=context, from_email=settings.TIDINGS_FROM_ADDRESS, to_email=user.email) return mail for u, w in users_and_watches: auth_str = get_auth_str(self.answer.question.creator) answer_url = self.answer.get_absolute_url() helpful_url = self.answer.get_helpful_answer_url() solution_url = self.answer.get_solution_url(watch=w[0]) c['answer_url'] = urlparams(answer_url, auth=auth_str) c['helpful_url'] = urlparams(helpful_url, auth=auth_str) c['solution_url'] = urlparams(solution_url, auth=auth_str) c['username'] = u.username c['watch'] = w[0] # TODO: Expose all watches. # u here can be a Django User model or a Tidings EmailUser # model. In the case of the latter, there is no associated # profile, so we set the locale to en-US. if hasattr(u, 'profile'): locale = u.profile.locale else: locale = 'en-US' yield _make_mail(locale, u, c)
def test_no_update_tagging(self): url = urlparams(reverse('questions.add_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'tag-name': 'foo' }) url = urlparams(reverse('questions.remove_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'remove-tag-foo': 1 })
def test_question_feed_with_locale(self): """Test that questions feeds with products and topics work.""" url = urlparams(reverse('questions.questions', locale='pt-BR')) res = self.client.get(url) doc = pq(res.content) feed_links = doc('link[type="application/atom+xml"]') feed = feed_links[0] eq_(1, len(feed_links)) eq_('Recently updated questions', feed.attrib['title']) eq_(urlparams('/pt-BR/questions/feed'), feed.attrib['href'])
def test_question_feed_with_product_and_topic(self): """Test that questions feeds with products and topics work.""" p = product(save=True) t = topic(product=p, save=True) url = urlparams(reverse('questions.list', args=[p.slug]), topic=t.slug) res = self.client.get(url) doc = pq(res.content) feed_links = doc('link[type="application/atom+xml"]') feed = feed_links[0] eq_(1, len(feed_links)) eq_('Recently updated questions', feed.attrib['title']) eq_(urlparams('/en-US/questions/feed', product=p.slug, topic=t.slug), feed.attrib['href'])
def test_known_signature(self): sig = SignatureFactory() url = urlparams(reverse('postcrash.api'), s=sig.signature) response = self.client.get(url) eq_(200, response.status_code) eq_('https://example.com/kb/%s' % sig.document.slug, response.content) eq_('text/plain', response['content-type'])
def _get_api_result(self, name, **kwargs): """Helper to make API calls, parse the json and return the result.""" url = reverse(name) url = urlparams(url, format='json', **kwargs) response = self.client.get(url) eq_(200, response.status_code) return json.loads(response.content)
def test_default(self): """Test the default API call (no filtering).""" today = date.today() # Create 10 wikimetrics. for i in range(10): wikimetric( code=METRIC_CODE_CHOICES[i % len(METRIC_CODE_CHOICES)][0], date=today - timedelta(days=i), value=i, save=True) # Call the API. response = self.client.get( urlparams(reverse('api.wikimetric_list'), format='json')) eq_(200, response.status_code) results = json.loads(response.content)['results'] # Verify the results are what we created. eq_(10, len(results)) for i in range(10): result = results[i] eq_(i, result['value']) eq_(METRIC_CODE_CHOICES[i % len(METRIC_CODE_CHOICES)][0], result['code']) eq_(str(today - timedelta(days=i)), result['date'])
def test_top_contributors(self): """Verify the top contributors appear.""" d = document(locale='en-US', save=True) revision(document=d, save=True) d = document(locale='es', save=True) revision(document=d, save=True) revision(document=d, save=True) answer(save=True) answer(save=True) answer(save=True) reply(user=user(save=True), save=True) reply(user=user(save=True), save=True) reply(user=user(save=True), save=True) reply(user=user(save=True), save=True) for u in User.objects.all(): profile(user=u) self.refresh() response = self.client.get(urlparams(reverse('community.home'))) eq_(response.status_code, 200) doc = pq(response.content) eq_(1, len(doc('ul.kb > li'))) eq_(2, len(doc('ul.l10n > li'))) eq_(3, len(doc('ul.questions > li'))) eq_(4, len(doc('ul.army-of-awesome > li')))
def get_absolute_url(self): query = {} if self.page > 1: query = {"page": self.page} url = reverse("questions.answers", kwargs={"question_id": self.question_id}) return urlparams(url, hash="answer-%s" % self.id, **query)
def _get_wiki_link(title, locale): """Checks the page exists, and returns its URL or the URL to create it. Return value is a dict: {'found': boolean, 'url': string}. found is False if the document does not exist. """ # Prevent circular import. sumo is conceptually a utils apps and # shouldn't have import-time (or really, any, but that's not going # to happen) dependencies on client apps. from kitsune.wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: # If the article redirects use its destination article while d.redirect_document(): d = d.redirect_document() # The locale in the link urls should always match the current # document's locale even if the document/slug being linked to # is in the default locale. url = reverse('wiki.document', locale=locale, args=[d.slug]) return {'found': True, 'url': url, 'text': d.title} # To avoid circular imports, wiki.models imports wiki_to_html from kitsune.sumo.helpers import urlparams return {'found': False, 'text': title, 'url': urlparams(reverse('wiki.new_document', locale=locale), title=title)}
def test_ratelimit(self): """Make sure posting new questions is ratelimited""" data = { "title": "A test question", "content": "I have this question that I hope...", "sites_affected": "http://example.com", "ff_version": "3.6.6", "os": "Intel Mac OS X 10.6", "plugins": "* Shockwave Flash 10.1 r53", "useragent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X " "10.6; en-US; rv:1.9.2.6) Gecko/20100625 " "Firefox/3.6.6", } p = product(slug="firefox", save=True) t = topic(slug="fix-problems", product=p, save=True) url = urlparams(reverse("questions.aaq_step5", args=["desktop", "fix-problems"]), search="A test question") u = user(save=True) self.client.login(username=u.username, password="******") for i in range(0, 5): self.client.post(url, data, follow=True) response = self.client.post(url, data, follow=True) eq_(403, response.status_code)
def get_last_post_url(self): query = {'last': self.last_post_id} page = self.last_page if page > 1: query['page'] = page url = reverse('forums.posts', args=[self.forum.slug, self.id]) return urlparams(url, hash='post-%s' % self.last_post_id, **query)
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url_ = self.thread.get_absolute_url() return urlparams(url_, hash='post-%s' % self.id, **query)
def test_no_results(self): UserFactory(username='******', profile__name='Foo Bar') self.refresh() response = self.client.get( urlparams(reverse('community.search'), q='baz')) eq_(response.status_code, 200) assert 'No users were found' in response.content
def test_ratelimit(self): """Make sure posting new questions is ratelimited""" data = {'title': 'A test question', 'content': 'I have this question that I hope...', 'sites_affected': 'http://example.com', 'ff_version': '3.6.6', 'os': 'Intel Mac OS X 10.6', 'plugins': '* Shockwave Flash 10.1 r53', 'useragent': 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X ' '10.6; en-US; rv:1.9.2.6) Gecko/20100625 ' 'Firefox/3.6.6'} p = product(slug='firefox', save=True) l = QuestionLocale.objects.get(locale=settings.LANGUAGE_CODE) p.questions_locales.add(l) topic(slug='fix-problems', product=p, save=True) url = urlparams( reverse('questions.aaq_step5', args=['desktop', 'fix-problems']), search='A test question') u = user(save=True) self.client.login(username=u.username, password='******') for i in range(0, 5): self.client.post(url, data, follow=True) response = self.client.post(url, data, follow=True) eq_(403, response.status_code)
def _get_api_result(self, resource_name, **kwargs): """Helper to make API calls, parse the json and return the result.""" url = reverse("api_dispatch_list", kwargs={"resource_name": resource_name, "api_name": "v1"}) url = urlparams(url, format="json", **kwargs) response = self.client.get(url) eq_(200, response.status_code) return json.loads(response.content)
def login(request, template): """Try to log the user in.""" if request.method == 'GET' and not request.MOBILE: url = reverse('users.auth') + '?' + request.GET.urlencode() return HttpResponsePermanentRedirect(url) next_url = get_next_url(request) or reverse('home') form = handle_login(request) if request.user.is_authenticated(): # Add a parameter so we know the user just logged in. # fpa = "first page authed" or something. next_url = urlparams(next_url, fpa=1) res = HttpResponseRedirect(next_url) max_age = (None if settings.SESSION_EXPIRE_AT_BROWSER_CLOSE else settings.SESSION_COOKIE_AGE) res.set_cookie(settings.SESSION_EXISTS_COOKIE, '1', secure=False, max_age=max_age) return res if request.MOBILE: return render(request, template, { 'form': form, 'next_url': next_url}) return user_auth(request, login_form=form)
def login(request, template): """Try to log the user in.""" if request.method == 'GET' and not request.MOBILE: url = reverse('users.auth') + '?' + request.GET.urlencode() return HttpResponsePermanentRedirect(url) next_url = get_next_url(request) or reverse('home') form = handle_login(request) if request.user.is_authenticated(): # Add a parameter so we know the user just logged in. # fpa = "first page authed" or something. next_url = urlparams(next_url, fpa=1) res = HttpResponseRedirect(next_url) max_age = (None if settings.SESSION_EXPIRE_AT_BROWSER_CLOSE else settings.SESSION_COOKIE_AGE) res.set_cookie(settings.SESSION_EXISTS_COOKIE, '1', secure=False, max_age=max_age) return res if request.MOBILE: return render(request, template, {'form': form, 'next_url': next_url}) return user_auth(request, login_form=form)
def test_search_suggestions_archived_articles(self): """Verifies that archived articles aren't shown.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) d1 = document(title=u'document donut', category=10, save=True) d1.products.add(p) revision(document=d1, is_approved=True, save=True) d2 = document(title=u'document cupcake', category=10, is_archived=True, save=True) d2.products.add(p) revision(document=d1, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='document') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(len(doc('.result.document')), 1) assert 'donut' in doc('.result.document h3 a').text() assert 'cupcake' not in doc('.result.document h3 a').text()
def get_solution_url(self, watch): url = reverse('questions.solve', kwargs={ 'question_id': self.question_id, 'answer_id': self.id }) return urlparams(url, watch=watch.secret)
def test_search_suggestions_archived_articles(self): """Verifies that archived articles aren't shown.""" p = product(slug=u'firefox', save=True) topic(title='Fix problems', slug='fix-problems', product=p, save=True) d1 = document(title=u'document donut', category=10, save=True) d1.products.add(p) revision(document=d1, is_approved=True, save=True) d2 = document(title=u'document cupcake', category=10, is_archived=True, save=True) d2.products.add(p) revision(document=d1, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='document') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(len(doc('.result.document')), 1) assert 'donut' in doc('.result.document h3 a').text() assert 'cupcake' not in doc('.result.document h3 a').text()
def sub_test(locale, *titles): url = urlparams(reverse("questions.questions", locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc("section[id^=question]")), len(titles), "Wrong number of results for {0}".format(locale)) for substr in titles: assert substr in doc(".questions section .content h2 a").text()
def test_search_suggestion_question_age(self): """Verifies the view doesn't return old questions.""" p = product(slug=u'firefox', save=True) topic(title='Fix problems', slug='fix-problems', product=p, save=True) q1 = question(title='Fresh Cupcakes', save=True) q1.products.add(p) max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE too_old = datetime.now() - timedelta(seconds=max_age * 2) q2 = question(title='Stale Cupcakes', created=too_old, updated=too_old, save=True) q2.products.add(p) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) self.assertContains(response, q1.title) self.assertNotContains(response, q2.title)
def test_search_suggestion_question_age(self): """Verifies the view doesn't return old questions.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) q1 = question(title='Fresh Cupcakes', save=True) q1.products.add(p) max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE too_old = datetime.now() - timedelta(seconds=max_age * 2) q2 = question(title='Stale Cupcakes', created=too_old, updated=too_old, save=True) q2.products.add(p) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) self.assertContains(response, q1.title) self.assertNotContains(response, q2.title)
def _get_wiki_link(title, locale): """Checks the page exists, and returns its URL or the URL to create it. Return value is a dict: {'found': boolean, 'url': string}. found is False if the document does not exist. """ # Prevent circular import. sumo is conceptually a utils apps and # shouldn't have import-time (or really, any, but that's not going # to happen) dependencies on client apps. from kitsune.wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: # If the article redirects use its destination article while d.redirect_document(): d = d.redirect_document() # The locale in the link urls should always match the current # document's locale even if the document/slug being linked to # is in the default locale. url = reverse("wiki.document", locale=locale, args=[d.slug]) return {"found": True, "url": url, "text": d.title} # To avoid circular imports, wiki.models imports wiki_to_html from kitsune.sumo.helpers import urlparams return {"found": False, "text": title, "url": urlparams(reverse("wiki.new_document", locale=locale), title=title)}
def sub_test(locale, *titles): url = urlparams(reverse('questions.questions', locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc('section[id^=question]')), len(titles), 'Wrong number of results for {0}'.format(locale)) for substr in titles: assert substr in doc('.questions section .content h2 a').text()
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url = reverse('questions.details', kwargs={'question_id': self.question_id}) return urlparams(url, hash='answer-%s' % self.id, **query)
def test_gallery_image_search(self): """Test for ajax endpoint with search parameter.""" img = image() url = urlparams(reverse('gallery.async'), type='image', q='foobar') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) imgs = doc('#media-list li img') eq_(0, len(imgs)) url = urlparams(reverse('gallery.async'), type='image', q=img.title) response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) imgs = doc('#media-list li img') eq_(1, len(imgs)) eq_(img.thumbnail_url_if_set(), imgs[0].attrib['src'])
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url = reverse('questions.answers', kwargs={'question_id': self.question_id}) return urlparams(url, hash='answer-%s' % self.id, **query)
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url = reverse('questions.details', kwargs={'question_id': self.question_id}) return urlparams(url, hash='answer-{0!s}'.format(self.id), **query)
def test_post_absolute_url(self): t = thread(save=True) p = t.new_post(creator=t.creator, content='foo') url_ = reverse('wiki.discuss.posts', locale=p.thread.document.locale, args=[p.thread.document.slug, p.thread.id]) exp_ = urlparams(url_, hash='post-{0!s}'.format(p.id)) eq_(exp_, p.get_absolute_url())
def _new_question(self, post_it=False): """Post a new question and return the response.""" p = product(slug="mobile", save=True) t = topic(slug="fix-problems", product=p, save=True) url = urlparams(reverse("questions.aaq_step5", args=[p.slug, t.slug]), search="A test question") if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def test_post_absolute_url(self): t = ThreadFactory() p = t.new_post(creator=t.creator, content='foo') url_ = reverse('wiki.discuss.posts', locale=p.thread.document.locale, args=[p.thread.document.slug, p.thread.id]) exp_ = urlparams(url_, hash='post-%s' % p.id) eq_(exp_, p.get_absolute_url())
def test_no_update_edit(self): url = urlparams(reverse('questions.edit_question', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'title': 'A new title.', 'content': 'Some new content.' })
def sub_test(locale, *titles): url = urlparams(reverse( 'questions.list', args=['all'], locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc('section[id^=question]')), len(titles), 'Wrong number of results for {0}'.format(locale)) for substr in titles: assert substr in doc('.questions section .content h2 a').text()
def _get_api_result(self, resource_name, **kwargs): """Helper to make API calls, parse the json and return the result.""" url = reverse('api_dispatch_list', kwargs={'resource_name': resource_name, 'api_name': 'v1'}) url = urlparams(url, format='json', **kwargs) response = self.client.get(url) eq_(200, response.status_code) return json.loads(response.content)
def _new_question(self, post_it=False): """Post a new question and return the response.""" p = product(slug='mobile', save=True) t = topic(slug='fix-problems', product=p, save=True) url = urlparams(reverse('questions.aaq_step5', args=[p.slug, t.slug]), search='A test question') if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def _new_question(self, post_it=False): """Post a new question and return the response.""" topic(title='Fix problems', slug='fix-problems', save=True) url = urlparams( reverse('questions.aaq_step5', args=['desktop', 'fix-problems']), search='A test question') if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def new_thread(request, forum_slug): """Start a new thread.""" forum = get_object_or_404(Forum, slug=forum_slug) user = request.user if not forum.allows_posting_by(user): if forum.allows_viewing_by(user): raise PermissionDenied else: raise Http404 if request.method == 'GET': form = NewThreadForm() return render(request, 'forums/new_thread.html', { 'form': form, 'forum': forum }) form = NewThreadForm(request.POST) post_preview = None if form.is_valid(): if 'preview' in request.POST: thread = Thread(creator=request.user, title=form.cleaned_data['title']) post_preview = Post(thread=thread, author=request.user, content=form.cleaned_data['content']) post_preview.author_post_count = \ post_preview.author.post_set.count() elif (_skip_post_ratelimit(request) or not is_ratelimited(request, increment=True, rate='5/d', ip=False, keys=user_or_ip('forum-post'))): thread = forum.thread_set.create(creator=request.user, title=form.cleaned_data['title']) thread.save() statsd.incr('forums.thread') post = thread.new_post(author=request.user, content=form.cleaned_data['content']) post.save() NewThreadEvent(post).fire(exclude=post.author) # Add notification automatically if needed. if Setting.get_for_user(request.user, 'forums_watch_new_thread'): NewPostEvent.notify(request.user, thread) url = reverse('forums.posts', args=[forum_slug, thread.id]) return HttpResponseRedirect(urlparams(url, last=post.id)) return render(request, 'forums/new_thread.html', { 'form': form, 'forum': forum, 'post_preview': post_preview })