def test_post_absolute_url(self): p = Post.objects.get(pk=1) url_ = reverse("forums.posts", kwargs={"forum_slug": p.thread.forum.slug, "thread_id": p.thread.id}) exp_ = urlparams(url_, hash="post-%s" % p.id) eq_(exp_, p.get_absolute_url()) p = Post.objects.get(pk=24) url_ = reverse("forums.posts", kwargs={"forum_slug": p.thread.forum.slug, "thread_id": p.thread.id}) exp_ = urlparams(url_, hash="post-%s" % p.id, page=2) eq_(exp_, p.get_absolute_url())
def test_no_update_tagging(self): url = urlparams(reverse('questions.add_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'tag-name': 'foo' }) url = urlparams(reverse('questions.remove_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'remove-tag-foo': 1 })
def test_no_update_tagging(self): url = urlparams(reverse('questions.add_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={'tag-name': 'foo'}) url = urlparams(reverse('questions.remove_tag', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={'remove-tag-foo': 1})
def test_post_absolute_url(self): p = Post.objects.get(pk=1) url_ = reverse('wiki.discuss.posts', args=[p.thread.document.slug, p.thread.id]) exp_ = urlparams(url_, hash='post-%s' % p.id) eq_(exp_, p.get_absolute_url()) p = Post.objects.get(pk=24) url_ = reverse('wiki.discuss.posts', args=[p.thread.document.slug, p.thread.id]) exp_ = urlparams(url_, hash='post-%s' % p.id, page=2) eq_(exp_, p.get_absolute_url())
def test_compare_revisions_missing_query_param(self): """Try to compare two revisions, with a missing query string param.""" url = reverse("wiki.compare_revisions", args=[self.document.slug]) query = {"from": self.revision1.id} url = urlparams(url, **query) response = self.client.get(url) eq_(404, response.status_code) url = reverse("wiki.compare_revisions", args=[self.document.slug]) query = {"to": self.revision1.id} url = urlparams(url, **query) response = self.client.get(url) eq_(404, response.status_code)
def document(request, document_slug): """View a wiki document.""" # If a slug isn't available in the requested locale, fall back to en-US: try: doc = Document.objects.get(locale=request.locale, slug=document_slug) except Document.DoesNotExist: # Look in default language: doc = get_object_or_404(Document, locale=settings.WIKI_DEFAULT_LANGUAGE, slug=document_slug) # If there's a translation to the requested locale, take it: translation = doc.translated_to(request.locale) if translation and translation.current_revision: doc = translation url = doc.get_absolute_url() url = urlparams(url, query_dict=request.GET) return HttpResponseRedirect(url) # Obey explicit redirect pages: # Don't redirect on redirect=no (like Wikipedia), so we can link from a # redirected-to-page back to a "Redirected from..." link, so you can edit # the redirect. redirect_url = (None if request.GET.get('redirect') == 'no' else doc.redirect_url()) if redirect_url: url = urlparams(redirect_url, query_dict=request.GET, redirectslug=doc.slug, redirectlocale=doc.locale) return HttpResponseRedirect(url) # Get "redirected from" doc if we were redirected: redirect_slug = request.GET.get('redirectslug') redirect_locale = request.GET.get('redirectlocale') redirected_from = None if redirect_slug and redirect_locale: try: redirected_from = Document.objects.get(locale=redirect_locale, slug=redirect_slug) except Document.DoesNotExist: pass related = doc.related_documents.order_by('-related_to__in_common')[0:5] # Get the contributors. (To avoid this query, we could render the # the contributors right into the Document's html field.) contributors = doc.revisions.filter( is_approved=True).values_list('creator__username', flat=True) data = {'document': doc, 'redirected_from': redirected_from, 'related': related, 'contributors': contributors.distinct()} data.update(SHOWFOR_DATA) return jingo.render(request, 'wiki/document.html', data)
def test_post_absolute_url(self): p = Post.objects.get(pk=1) url_ = reverse('forums.posts', kwargs={'forum_slug': p.thread.forum.slug, 'thread_id': p.thread.id}) exp_ = urlparams(url_, hash='post-%s' % p.id) eq_(exp_, p.get_absolute_url()) p = Post.objects.get(pk=24) url_ = reverse('forums.posts', kwargs={'forum_slug': p.thread.forum.slug, 'thread_id': p.thread.id}) exp_ = urlparams(url_, hash='post-%s' % p.id, page=2) eq_(exp_, p.get_absolute_url())
def test_login_next_parameter_in_forms(self): """Test with a valid ?next=url parameter.""" # header and footer forms next = "/en-US/demos/submit" response = self.client.get(urlparams(reverse("demos_submit"))) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('#main-header input[name="next"]')[0].attrib["value"]) # user login page - someone logged-out clicks edit next = "/en-US/docs/Testing$edit" response = self.client.get(urlparams(reverse("users.login"), next=next)) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('input[name="next"]')[0].attrib["value"])
def test_search_suggestion_question_age(self): """Verifies the view doesn't return old questions.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) q1 = question(title='Fresh Cupcakes', save=True) q1.products.add(p) max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE too_old = datetime.now() - timedelta(seconds=max_age * 2) q2 = question(title='Stale Cupcakes', created=too_old, updated=too_old, save=True) q2.products.add(p) self.refresh() url = urlparams(reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) self.assertContains(response, q1.title) self.assertNotContains(response, q2.title)
def test_search_suggestions_archived_articles(self): """Verifies that archived articles aren't shown.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) d1 = document(title=u'document donut', category=10, save=True) d1.products.add(p) revision(document=d1, is_approved=True, save=True) d2 = document(title=u'document cupcake', category=10, is_archived=True, save=True) d2.products.add(p) revision(document=d1, is_approved=True, save=True) self.refresh() url = urlparams(reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='document') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(len(doc('.result.document')), 1) assert 'donut' in doc('.result.document h3 a').text() assert 'cupcake' not in doc('.result.document h3 a').text()
def test_no_reply_filter(self): url_ = urlparams(reverse('questions.questions'), filter='no-replies') response = self.client.get(url_) doc = pq(response.content) eq_('active', doc('div#filter ul li')[-1].attrib['class']) eq_('question-2', doc('ol.questions li')[0].attrib['id'])
def test_ratelimit(self): """Make sure posting new questions is ratelimited""" data = { 'title': 'A test question', 'content': 'I have this question that I hope...', 'sites_affected': 'http://example.com', 'ff_version': '3.6.6', 'os': 'Intel Mac OS X 10.6', 'plugins': '* Shockwave Flash 10.1 r53', 'useragent': 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X ' '10.6; en-US; rv:1.9.2.6) Gecko/20100625 ' 'Firefox/3.6.6' } topic(title='Fix problems', slug='fix-problems', save=True) url = urlparams(reverse('questions.aaq_step5', args=['desktop', 'fix-problems']), search='A test question') u = user(save=True) self.client.login(username=u.username, password='******') for i in range(0, 5): self.client.post(url, data, follow=True) response = self.client.post(url, data, follow=True) eq_(403, response.status_code)
def sub_test(locale, *titles): url = urlparams(reverse("questions.questions", locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc("section[id^=question]")), len(titles), "Wrong number of results for {0}".format(locale)) for substr in titles: assert substr in doc(".questions section .content h2 a").text()
def _new_question(self, post_it=False): """Post a new question and return the response.""" topic(title="Fix problems", slug="fix-problems", save=True) url = urlparams(reverse("questions.aaq_step5", args=["desktop", "fix-problems"]), search="A test question") if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def test_compare_revisions_invalid_from_int(self): """Provide invalid 'from' int for revision ids.""" url = reverse("wiki.compare_revisions", args=[self.document.slug]) query = {"from": "invalid", "to": ""} url = urlparams(url, **query) response = self.client.get(url) eq_(404, response.status_code)
def test_bad_parameters(self): """Ensure badly-formed revision parameters do not cause errors""" url = reverse("wiki.compare_revisions", args=[self.document.slug]) query = {"from": "1e309", "to": u"1e309"} url = urlparams(url, **query) response = self.client.get(url) eq_(404, response.status_code)
def get_solution_url(self, watch): url = reverse('questions.solve', kwargs={ 'question_id': self.question_id, 'answer_id': self.id }) return urlparams(url, watch=watch.secret)
def add_tag_async(request, question_id): """Add a (case-insensitive) tag to question asyncronously. Return empty. If the question already has the tag, do nothing. """ try: question, canonical_name = _add_tag(request, question_id) except Tag.DoesNotExist: return HttpResponse(json.dumps({'error': unicode(UNAPPROVED_TAG)}), mimetype='application/json', status=400) if canonical_name: question.clear_cached_tags() tag = Tag.objects.get(name=canonical_name) tag_url = urlparams(reverse('questions.questions'), tagged=tag.slug) data = {'canonicalName': canonical_name, 'tagUrl': tag_url} return HttpResponse(json.dumps(data), mimetype='application/json') return HttpResponse(json.dumps({'error': unicode(NO_TAG)}), mimetype='application/json', status=400)
def process_request(self, request): zones = (DocumentZone.objects.filter(url_root__isnull=False) .exclude(url_root='')) for zone in zones: root = '/%s' % zone.url_root orig_path = '/docs/%s' % zone.document.slug if request.path_info.startswith(orig_path): # Is this a request for the "original" wiki path? Redirect to # new URL root, if so. new_path = request.path_info.replace(orig_path, root, 1) new_path = '/%s%s' % (request.locale, new_path) query = request.GET.copy() if 'lang' in query: query.pop('lang') new_path = urlparams(new_path, query_dict=query) return HttpResponseRedirect(new_path) elif request.path_info.startswith(root): # Is this a request for the relocated wiki path? If so, rewrite # the path as a request for the proper wiki view. request.path_info = request.path_info.replace( root, '/docs/%s' % zone.document.slug, 1) break
def test_register_through_aaq(self, get_current): """Registering through AAQ form sends confirmation email.""" get_current.return_value.domain = 'testserver' self.client.logout() url = urlparams(reverse('questions.new_question'), product='desktop', category='d1', search='A test question', showform=1) # Register before asking question data = {'type': 'register', 'username': '******', 'password': '******', 'password2': 'testpass', 'email': '*****@*****.**'} data.update(**self.data) self.client.post(url, data, follow=True) # Confirmation email is sent eq_(1, len(mail.outbox)) assert mail.outbox[0].subject == 'Please confirm your email address' # Finally post question self.client.post(url, self.data, follow=True) # Verify question is in db now question = Question.objects.filter(title='A test question') eq_(1, question.count()) eq_('testaaq', question[0].creator.username) # And no confirmation email was sent (already sent on registration) # Note: there was already an email sent above eq_(1, len(mail.outbox))
def test_send_message_to_prefilled(self, flag_is_active): flag_is_active.return_value = True url = urlparams(reverse('messages.new'), to=self.user2.username) response = self.client.get(url, follow=True) eq_(200, response.status_code) eq_(self.user2.username, pq(response.content)('#id_to')[0].attrib['value'])
def test_topic_select_product(self, flag_is_active): """Verify that /topics/<slug>?selectproduct=1 renders products.""" flag_is_active.return_value = True # Create a topic t = topic(save=True) # Create 3 products prods = [] for i in range(3): prods.append(product(save=True)) # Create a document and assign the topic and two products. doc = revision(is_approved=True, save=True).document doc.topics.add(t) doc.products.add(prods[0]) doc.products.add(prods[1]) self.refresh() # GET the topic page and verify the content url = reverse('topics.topic', args=[t.slug]) url = urlparams(url, selectproduct=1) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(2, len(doc('#products-and-services li')))
def _get_wiki_link(title, locale): """Checks the page exists, and returns its URL or the URL to create it. Return value is a dict: {'found': boolean, 'url': string}. found is False if the document does not exist. """ # Prevent circular import. sumo is conceptually a utils apps and # shouldn't have import-time (or really, any, but that's not going # to happen) dependencies on client apps. from wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: # If the article redirects use its destination article while d.redirect_document(): d = d.redirect_document() # The locale in the link urls should always match the current # document's locale even if the document/slug being linked to # is in the default locale. url = reverse('wiki.document', locale=locale, args=[d.slug]) return {'found': True, 'url': url, 'text': d.title} # To avoid circular imports, wiki.models imports wiki_to_html from sumo.helpers import urlparams return {'found': False, 'text': title, 'url': urlparams(reverse('wiki.new_document', locale=locale), title=title)}
def test_search_suggestions_archived_articles(self): """Verifies that archived articles aren't shown.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) d1 = document(title=u'document donut', category=10, save=True) d1.products.add(p) revision(document=d1, is_approved=True, save=True) d2 = document(title=u'document cupcake', category=10, is_archived=True, save=True) d2.products.add(p) revision(document=d1, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='document') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) eq_(len(doc('.result.document')), 1) assert 'donut' in doc('.result.document h3 a').text() assert 'cupcake' not in doc('.result.document h3 a').text()
def process_request(self, request): zones = (DocumentZone.objects.filter(url_root__isnull=False).exclude( url_root='')) for zone in zones: root = '/%s' % zone.url_root orig_path = '/docs/%s' % zone.document.slug if request.path_info.startswith(orig_path): # Is this a request for the "original" wiki path? Redirect to # new URL root, if so. new_path = request.path_info.replace(orig_path, root, 1) new_path = '/%s%s' % (request.locale, new_path) query = request.GET.copy() if 'lang' in query: query.pop('lang') new_path = urlparams(new_path, query_dict=query) return HttpResponseRedirect(new_path) elif request.path_info.startswith(root): # Is this a request for the relocated wiki path? If so, rewrite # the path as a request for the proper wiki view. request.path_info = request.path_info.replace( root, '/docs/%s' % zone.document.slug, 1) break
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url_ = self.thread.get_absolute_url() return urlparams(url_, hash='post-%s' % self.id, **query)
def test_search_suggestion_question_age(self): """Verifies the view doesn't return old questions.""" topic(title='Fix problems', slug='fix-problems', save=True) p = product(slug=u'firefox', save=True) q1 = question(title='Fresh Cupcakes', save=True) q1.products.add(p) max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE too_old = datetime.now() - timedelta(seconds=max_age * 2) q2 = question(title='Stale Cupcakes', created=too_old, updated=too_old, save=True) q2.products.add(p) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) self.assertContains(response, q1.title) self.assertNotContains(response, q2.title)
def _get_wiki_link(title, locale): """Checks the page exists, and returns its URL or the URL to create it. Return value is a dict: {'found': boolean, 'url': string}. found is False if the document does not exist. """ # Prevent circular import. sumo is conceptually a utils apps and # shouldn't have import-time (or really, any, but that's not going # to happen) dependencies on client apps. from wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: # If the article redirects use its destination article while d.redirect_document(): d = d.redirect_document() # The locale in the link urls should always match the current # document's locale even if the document/slug being linked to # is in the default locale. url = reverse('wiki.document', locale=locale, args=[d.slug]) return {'found': True, 'url': url, 'text': d.title} # To avoid circular imports, wiki.models imports wiki_to_html from sumo.helpers import urlparams return { 'found': False, 'text': title, 'url': urlparams(reverse('wiki.new_document', locale=locale), title=title) }
def get_last_post_url(self): query = {'last': self.last_post_id} page = self.last_page if page > 1: query['page'] = page url = reverse('forums.posts', args=[self.forum.slug, self.id]) return urlparams(url, hash='post-%s' % self.last_post_id, **query)
def test_register_through_aaq(self, get_current): """Registering through AAQ form sends confirmation email.""" get_current.return_value.domain = 'testserver' self.client.logout() title = 'A test question' url = urlparams(reverse('questions.new_question'), product='desktop', category='d1', search=title, showform=1) # Register before asking question data = {'register': 'Register', 'username': '******', 'password': '******', 'password2': 'testpass', 'email': '*****@*****.**'} data.update(**self.data) self.client.post(url, data, follow=True) # Confirmation email is sent eq_(1, len(mail.outbox)) eq_(mail.outbox[0].subject, 'Please confirm your Firefox Help question') assert mail.outbox[0].body.find('(%s)' % title) > 0 # Finally post question self.client.post(url, self.data, follow=True) # Verify question is in db now question = Question.objects.filter(title=title) eq_(1, question.count()) eq_('testaaq', question[0].creator.username) # And no confirmation email was sent (already sent on registration) # Note: there was already an email sent above eq_(1, len(mail.outbox))
def login(request, template): """Try to log the user in.""" if request.method == 'GET' and not request.MOBILE: url = reverse('users.auth') + '?' + request.GET.urlencode() return HttpResponsePermanentRedirect(url) next_url = get_next_url(request) or reverse('home') form = handle_login(request) if request.user.is_authenticated(): # Add a parameter so we know the user just logged in. # fpa = "first page authed" or something. next_url = urlparams(next_url, fpa=1) res = HttpResponseRedirect(next_url) max_age = (None if settings.SESSION_EXPIRE_AT_BROWSER_CLOSE else settings.SESSION_COOKIE_AGE) res.set_cookie(settings.SESSION_EXISTS_COOKIE, '1', secure=False, max_age=max_age) return res if request.MOBILE: return jingo.render(request, template, { 'form': form, }) return user_auth(request, login_form=form)
def login(request, template): """Try to log the user in.""" if request.method == 'GET' and not request.MOBILE: url = reverse('users.auth') + '?' + request.GET.urlencode() return HttpResponsePermanentRedirect(url) next_url = get_next_url(request) or reverse('home') form = handle_login(request) if request.user.is_authenticated(): # Add a parameter so we know the user just logged in. # fpa = "first page authed" or something. next_url = urlparams(next_url, fpa=1) res = HttpResponseRedirect(next_url) max_age = (None if settings.SESSION_EXPIRE_AT_BROWSER_CLOSE else settings.SESSION_COOKIE_AGE) res.set_cookie(settings.SESSION_EXISTS_COOKIE, '1', secure=False, max_age=max_age) return res if request.MOBILE: return render(request, template, {'form': form}) return user_auth(request, login_form=form)
def process_request(self, request): prefixer = Prefixer(request) set_url_prefixer(prefixer) full_path = prefixer.fix(prefixer.shortened_path) if 'lang' in request.GET: # Blank out the locale so that we can set a new one. Remove lang # from the query params so we don't have an infinite loop. prefixer.locale = '' new_path = prefixer.fix(prefixer.shortened_path) query = dict((smart_str(k), v) for k, v in request.GET.iteritems() if k != 'lang') return HttpResponsePermanentRedirect(urlparams(new_path, **query)) if full_path != request.path: query_string = request.META.get('QUERY_STRING', '') full_path = urllib.quote(full_path.encode('utf-8')) if query_string: full_path = '%s?%s' % (full_path, query_string) response = HttpResponsePermanentRedirect(full_path) # Vary on Accept-Language if we changed the locale old_locale = prefixer.locale new_locale, _ = split_path(full_path) if old_locale != new_locale: response['Vary'] = 'Accept-Language' return response request.path_info = '/' + prefixer.shortened_path request.locale = prefixer.locale tower.activate(prefixer.locale)
def test_login_next_parameter_in_forms(self): '''Test with a valid ?next=url parameter.''' # header and footer forms next = '/en-US/demos/submit' response = self.client.get(urlparams(reverse('demos_submit'))) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('#main-header input[name="next"]')[0].attrib['value']) # user login page - someone logged-out clicks edit next = '/en-US/docs/Testing$edit' response = self.client.get(urlparams(reverse('users.login'), next=next)) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('input[name="next"]')[0].attrib['value'])
def test_document_listing(self, flag_is_active): """Verify /products/<product slug>/<topic slug> renders articles.""" flag_is_active.return_value = True # Create a topic and product. t1 = topic(save=True) t2 = topic(save=True) p = product(save=True) # Create 3 documents with the topic and product and one without. for i in range(3): doc = revision(is_approved=True, save=True).document doc.topics.add(t1) doc.products.add(p) if i == 1: # Only one document with t2 doc.topics.add(t2) doc = revision(is_approved=True, save=True).document self.refresh() # GET the page and verify the content. url = reverse('products.documents', args=[p.slug, t1.slug]) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(3, len(doc('#document-list > ul > li'))) # GET the page with refine topic and verify the content. url = reverse('products.documents', args=[p.slug, t1.slug]) url = urlparams(url, refine=t2.slug) r = self.client.get(url, follow=True) eq_(200, r.status_code) doc = pq(r.content) eq_(1, len(doc('#document-list > ul > li')))
def get_absolute_url(self): query = {} if self.page > 1: query = {"page": self.page} url = reverse("questions.answers", kwargs={"question_id": self.question_id}) return urlparams(url, hash="answer-%s" % self.id, **query)
def _get_wiki_link(title, locale): """Checks the page exists, and returns its URL or the URL to create it. Return value is a dict: {'found': boolean, 'url': string}. found is False if the document does not exist. """ # Prevent circular import. sumo is conceptually a utils apps and shouldn't # have import-time (or really, any, but that's not going to happen) # dependencies on client apps. from wiki.models import Document d = get_object_fallback(Document, locale=locale, title=title, is_template=False) if d: return {'found': True, 'url': d.get_absolute_url(), 'text': d.title} # To avoid circular imports, wiki.models imports wiki_to_html from sumo.helpers import urlparams return { 'found': False, 'text': title, 'url': urlparams(reverse('wiki.new_document', locale=locale), title=title) }
def test_search_suggestions(self): """Verifies the view doesn't kick up an HTTP 500""" topic(title='Fix problems', slug='fix-problems', save=True) q = question(title=u'CupcakesQuestion cupcakes', save=True) q.tags.add(u'desktop') q.save() d = document(title=u'CupcakesKB cupcakes', category=10, save=True) d.products.add(product(slug=u'firefox', save=True)) d.save() rev = revision(document=d, is_approved=True, save=True) self.refresh() url = urlparams( reverse('questions.aaq_step4', args=['desktop', 'fix-problems']), search='cupcakes') response = self.client.get(url, follow=True) eq_(200, response.status_code) assert 'CupcakesQuestion' in response.content assert 'CupcakesKB' in response.content
def test_login_next_parameter_in_forms(self): '''Test with a valid ?next=url parameter.''' # header and footer forms next = '/en-US/demos/submit' response = self.client.get(urlparams(reverse('demos_submit'))) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('#masthead input[name="next"]')[0].attrib['value']) # user login page - someone logged-out clicks edit next = '/en-US/docs/Testing$edit' response = self.client.get(urlparams(reverse('users.login'), next=next)) eq_(200, response.status_code) doc = pq(response.content) eq_(next, doc('input[name="next"]')[0].attrib['value'])
def _new_question(self, post_it=False): """Post a new question and return the response.""" url = urlparams(reverse('questions.aaq_step5', args=['desktop', 'd1']), search='A test question') if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url = self.question.get_absolute_url() return urlparams(url, hash='answer-%s' % self.id, **query)
def test_intermediate(self): """ Test that the intermediate DocumentAttachment gets created correctly when adding an Attachment with a document_id. """ doc = document(locale='en', slug='attachment-test-intermediate') doc.save() rev = revision(document=doc, is_approved=True) rev.save() file_for_upload = make_test_file( content='A file for testing intermediate attachment model.') post_data = { 'title': 'Intermediate test file', 'description': 'Intermediate test file', 'comment': 'Initial upload', 'file': file_for_upload, } add_url = urlparams(reverse('attachments.new_attachment'), document_id=doc.id) resp = self.client.post(add_url, data=post_data) eq_(302, resp.status_code) eq_(1, doc.files.count()) intermediates = DocumentAttachment.objects.filter(document__pk=doc.id) eq_(1, intermediates.count()) intermediate = intermediates[0] eq_('admin', intermediate.attached_by.username) eq_(file_for_upload.name.split('/')[-1], intermediate.name)
def request(cls, url, start, end, realm='Webtrends Basic Authentication'): """Make an authed request to the webtrends API. Make one attempt to fetch and reload the data. If something fails, it's the caller's responsibility to retry. """ # If start and/or end are date or datetime, convert to string. if isinstance(start, (date, datetime)): start = start.strftime('%Ym%md%d') if isinstance(end, (date, datetime)): end = end.strftime('%Ym%md%d') auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(realm=realm, uri=url, user=settings.WEBTRENDS_USER, passwd=settings.WEBTRENDS_PASSWORD) opener = build_opener(auth_handler) url = urlparams(url, start_period=start, end_period=end) try: # TODO: A wrong username or password results in a recursion depth # error. return opener.open(url).read() except IOError, e: raise StatsIOError(*e.args)
def get_absolute_url(self): query = {} if self.page > 1: query = {'page': self.page} url = reverse('questions.answers', kwargs={'question_id': self.question_id}) return urlparams(url, hash='answer-%s' % self.id, **query)
def test_post_absolute_url(self): t = thread(save=True) p = t.new_post(creator=t.creator, content='foo') url_ = reverse('wiki.discuss.posts', locale=p.thread.document.locale, args=[p.thread.document.slug, p.thread.id]) exp_ = urlparams(url_, hash='post-%s' % p.id) eq_(exp_, p.get_absolute_url())
def _post_new_question(self): """Post a new question and return the response.""" url = urlparams(reverse('questions.new_question'), product='desktop', category='d1', search='A test question', showform=1) return self.client.post(url, self.data, follow=True)
def test_gallery_image_search(self): """Test for ajax endpoint with search parameter.""" img = image() url = urlparams(reverse('gallery.async'), type='image', q='foobar') response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) imgs = doc('#media-list li img') eq_(0, len(imgs)) url = urlparams(reverse('gallery.async'), type='image', q=img.title) response = self.client.get(url, follow=True) eq_(200, response.status_code) doc = pq(response.content) imgs = doc('#media-list li img') eq_(1, len(imgs)) eq_(img.thumbnail_url_if_set(), imgs[0].attrib['src'])
def test_no_update_edit(self): url = urlparams(reverse('questions.edit_question', args=[self.q.id])) self._request_and_no_update(url, req_type='POST', data={ 'title': 'A new title.', 'content': 'Some new content.' })
def _my_contributions_test_helper(self, username, expected_qty): url_ = urlparams(reverse('questions.questions'), filter='my-contributions') self.client.login(username=username, password="******") response = self.client.get(url_) doc = pq(response.content) eq_('active', doc('div#filter ul li')[6].attrib['class']) eq_(expected_qty, len(doc('ol.questions li')))
def sub_test(locale, *titles): url = urlparams(reverse('questions.questions', locale=locale)) response = self.client.get(url, follow=True) doc = pq(response.content) eq_msg(len(doc('section[id^=question]')), len(titles), 'Wrong number of results for {0}'.format(locale)) for substr in titles: assert substr in doc('.questions section .content h2 a').text()
def test_persona_signup_copy(self): """ After a new user signs up with Persona, their username, an indication that Persona was used to log in, and a logout link appear in the auth tools section of the page. """ persona_signup_email = '*****@*****.**' persona_signup_username = '******' engine = import_module(settings.SESSION_ENGINE) store = engine.SessionStore() store.save() self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key with mock.patch('requests.post') as requests_mock: requests_mock.return_value.json.return_value = { 'status': 'okay', 'email': persona_signup_email, } r = self.client.post(reverse('persona_login'), follow=True) data = { 'username': persona_signup_username, 'email': persona_signup_email } r = self.client.post(reverse( 'socialaccount_signup', locale=settings.WIKI_DEFAULT_LANGUAGE), data=data, follow=True) profile_url = reverse('users.profile', kwargs={'username': persona_signup_username}, locale=settings.WIKI_DEFAULT_LANGUAGE) signout_url = urlparams( reverse('account_logout', locale=settings.WIKI_DEFAULT_LANGUAGE), next=reverse('home', locale=settings.WIKI_DEFAULT_LANGUAGE)) parsed = pq(r.content) login_info = parsed.find('.header-login .user-state') ok_(len(login_info.children())) signed_in_message = login_info.children()[0] ok_('title' in signed_in_message.attrib) eq_('Signed in with Persona', signed_in_message.attrib['title']) auth_links = login_info.children()[1].getchildren() ok_(len(auth_links)) profile_link = auth_links[0].getchildren()[0] ok_('href' in profile_link.attrib) eq_(profile_url, profile_link.attrib['href']) signout_link = auth_links[1].getchildren()[0] ok_('href' in signout_link.attrib) eq_( signout_url.replace('%2F', '/'), # urlparams() encodes slashes signout_link.attrib['href'])
def _new_question(self, post_it=False): """Post a new question and return the response.""" topic(title='Fix problems', slug='fix-problems', save=True) url = urlparams(reverse('questions.aaq_step5', args=['desktop', 'fix-problems']), search='A test question') if post_it: return self.client.post(url, self.data, follow=True) return self.client.get(url, follow=True)