def test_from_url(self): """Verify question returned from valid URL.""" q = QuestionFactory() eq_(q, Question.from_url('/en-US/questions/%s' % q.id)) eq_(q, Question.from_url('/es/questions/%s' % q.id)) eq_(q, Question.from_url('/questions/%s' % q.id))
def test_from_url(self): """Verify question returned from valid URL.""" q = question(save=True) eq_(q, Question.from_url('/en-US/questions/%s' % q.id)) eq_(q, Question.from_url('/es/questions/%s' % q.id)) eq_(q, Question.from_url('/questions/%s' % q.id))
def test_from_url(self): """Verify question returned from valid URL.""" q = QuestionFactory() eq_(q, Question.from_url("/en-US/questions/%s" % q.id)) eq_(q, Question.from_url("/es/questions/%s" % q.id)) eq_(q, Question.from_url("/questions/%s" % q.id))
def test_from_invalid_url(self): """Verify question returned from valid URL.""" q = question(save=True) eq_(None, Question.from_url('/en-US/questions/{0!s}/edit'.format(q.id))) eq_(None, Question.from_url('/en-US/kb/{0!s}'.format(q.id))) eq_(None, Question.from_url('/random/url')) eq_(None, Question.from_url('/en-US/questions/dashboard/metrics'))
def test_notification_created(self): """Creating a new question auto-watches it for answers.""" u = User.objects.get(pk=118533) q = Question(creator=u, title='foo', content='bar') q.save() assert QuestionReplyEvent.is_notifying(u, q)
def test_from_invalid_url(self): """Verify question returned from valid URL.""" q = question(save=True) eq_(None, Question.from_url('/en-US/questions/%s/edit' % q.id)) eq_(None, Question.from_url('/en-US/kb/%s' % q.id)) eq_(None, Question.from_url('/random/url')) eq_(None, Question.from_url('/en-US/questions/dashboard/metrics'))
def test_no_inactive_users(self): """Ensure that inactive users' questions don't appear in the feed.""" u = user(is_active=False, save=True) q = Question(title='Test Question', content='Lorem Ipsum Dolor', creator_id=u.id) q.save() assert q.id not in [x.id for x in QuestionsFeed().items({})]
def test_from_invalid_url(self): """Verify question returned from valid URL.""" q = question(save=True) eq_(None, Question.from_url('/en-US/questions/%s/edit' % q.id)) eq_(None, Question.from_url('/en-US/kb/%s' % q.id)) eq_(None, Question.from_url('/random/url')) eq_(None, Question.from_url('/en-US/questions/stats'))
def test_from_invalid_url(self): """Verify question returned from valid URL.""" q = QuestionFactory() eq_(None, Question.from_url('/en-US/questions/%s/edit' % q.id)) eq_(None, Question.from_url('/en-US/kb/%s' % q.id)) eq_(None, Question.from_url('/random/url')) eq_(None, Question.from_url('/en-US/questions/dashboard/metrics'))
def test_from_invalid_url(self): """Verify question returned from valid URL.""" q = QuestionFactory() eq_(None, Question.from_url("/en-US/questions/%s/edit" % q.id)) eq_(None, Question.from_url("/en-US/kb/%s" % q.id)) eq_(None, Question.from_url("/random/url")) eq_(None, Question.from_url("/en-US/questions/dashboard/metrics"))
def test_no_inactive_users(self): """Ensure that inactive users' questions don't appear in the feed.""" u = User.objects.get(pk=118533) u.is_active = False u.save() q = Question(title='Test Question', content='Lorem Ipsum Dolor', creator_id=118533) q.save() assert q.id not in [x.id for x in QuestionsFeed().items()]
def test_recent_counts(self): """Verify recent_asked_count and recent unanswered count.""" # create a question for each of past 4 days now = datetime.now() QuestionFactory(created=now) QuestionFactory(created=now - timedelta(hours=12), is_locked=True) q = QuestionFactory(created=now - timedelta(hours=23)) AnswerFactory(question=q) # 25 hours instead of 24 to avoid random test fails. QuestionFactory(created=now - timedelta(hours=25)) # Only 3 are recent from last 72 hours, 1 has an answer. eq_(3, Question.recent_asked_count()) eq_(1, Question.recent_unanswered_count())
def test_recent_counts(self): """Verify recent_asked_count and recent unanswered count.""" # create a question for each of past 4 days now = datetime.now() question(created=now, save=True) question(created=now - timedelta(hours=12), save=True, is_locked=True) q = question(created=now - timedelta(hours=23), save=True) answer(question=q, save=True) # 25 hours instead of 24 to avoid random test fails. question(created=now - timedelta(hours=25), save=True) # Only 3 are recent from last 72 hours, 1 has an answer. eq_(3, Question.recent_asked_count()) eq_(1, Question.recent_unanswered_count())
def question(save=False, **kwargs): defaults = dict(title=str(datetime.now()), content='', created=datetime.now(), num_answers=0, is_locked=0) defaults.update(kwargs) if 'creator' not in kwargs and 'creator_id' not in kwargs: defaults['creator'] = profile().user q = Question(**defaults) if save: q.save() if 'metadata' in defaults: if not save: raise ValueError('save must be True if metadata provided.') q.add_metadata(**defaults['metadata']) return q
def question(**kwargs): defaults = dict(title=str(datetime.now()), content='', created=datetime.now(), num_answers=0, is_locked=0) defaults.update(kwargs) if 'creator' not in kwargs and 'creator_id' not in kwargs: defaults['creator'] = profile().user return Question(**defaults)
def test_recent_counts_with_filter(self): """Verify that recent_asked_count and recent_unanswered_count respect filters passed.""" now = datetime.now() QuestionFactory(created=now, locale="en-US") q = QuestionFactory(created=now, locale="en-US") AnswerFactory(question=q) QuestionFactory(created=now, locale="pt-BR") QuestionFactory(created=now, locale="pt-BR") q = QuestionFactory(created=now, locale="pt-BR") AnswerFactory(question=q) # 5 asked recently, 3 are unanswered eq_(5, Question.recent_asked_count()) eq_(3, Question.recent_unanswered_count()) # check english (2 asked, 1 unanswered) locale_filter = Q(locale="en-US") eq_(2, Question.recent_asked_count(locale_filter)) eq_(1, Question.recent_unanswered_count(locale_filter)) # check pt-BR (3 asked, 2 unanswered) locale_filter = Q(locale="pt-BR") eq_(3, Question.recent_asked_count(locale_filter)) eq_(2, Question.recent_unanswered_count(locale_filter))
def test_recent_counts_with_filter(self): """Verify that recent_asked_count and recent_unanswered_count respect filters passed.""" now = datetime.now() question(created=now, locale='en-US', save=True) q = question(created=now, locale='en-US', save=True) answer(question=q, save=True) question(created=now, locale='pt-BR', save=True) question(created=now, locale='pt-BR', save=True) q = question(created=now, locale='pt-BR', save=True) answer(question=q, save=True) # 5 asked recently, 3 are unanswered eq_(5, Question.recent_asked_count()) eq_(3, Question.recent_unanswered_count()) # check english (2 asked, 1 unanswered) locale_filter = Q(locale='en-US') eq_(2, Question.recent_asked_count(locale_filter)) eq_(1, Question.recent_unanswered_count(locale_filter)) # check pt-BR (3 asked, 2 unanswered) locale_filter = Q(locale='pt-BR') eq_(3, Question.recent_asked_count(locale_filter)) eq_(2, Question.recent_unanswered_count(locale_filter))
def related_questions(self): """Return questions that are 'morelikethis' document.""" # Only documents in default IA categories have related. if ( self.redirect_url() or not self.current_revision or self.category not in settings.IA_DEFAULT_CATEGORIES or self.locale not in settings.AAQ_LANGUAGES ): return [] # First try to get the results from the cache key = "wiki_document:related_questions:%s" % self.id questions = cache.get(key) if questions is not None: statsd.incr("wiki.related_questions.cache.hit") log.debug("Getting MLT questions for {doc} from cache.".format(doc=repr(self))) return questions try: statsd.incr("wiki.related_questions.cache.miss") max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE start_date = int(time.time()) - max_age s = Question.get_mapping_type().search() questions = ( s.values_dict("id", "question_title", "url") .filter( question_locale=self.locale, product__in=[p.slug for p in self.get_products()], question_has_helpful=True, created__gte=start_date, ) .query( __mlt={ "fields": ["question_title", "question_content"], "like_text": self.title, "min_term_freq": 1, "min_doc_freq": 1, } )[:3] ) questions = list(questions) cache.add(key, questions) except ES_EXCEPTIONS: statsd.incr("wiki.related_questions.esexception") log.exception("ES MLT related_questions") questions = [] return questions
def pageviews_by_question(start_date, end_date): """Return the number of pageviews by question in a given date range. Returns a dict with pageviews for each document: {question_id>: <pageviews>, 1: 42, 7: 1337,...} """ counts = {} request = _build_request() start_index = 1 max_results = 10000 while True: # To deal with pagination @retry_503 def _make_request(): return request.get( ids='ga:' + profile_id, start_date=str(start_date), end_date=str(end_date), metrics='ga:pageviews', dimensions='ga:pagePath', filters='ga:pagePathLevel2==/questions/', max_results=max_results, start_index=start_index).execute() results = _make_request() for result in results['rows']: path = result[0] pageviews = int(result[1]) question_id = Question.from_url(path, id_only=True) if not question_id: continue # The same question can appear multiple times due to url params # and locale. counts[question_id] = counts.get(question_id, 0) + pageviews # Move to next page of results. start_index += max_results if start_index > results['totalResults']: break return counts
def related_questions(self): """Return questions that are 'morelikethis' document.""" # Only documents in default IA categories have related. if (self.redirect_url() or not self.current_revision or self.category not in settings.IA_DEFAULT_CATEGORIES): return [] # First try to get the results from the cache key = 'wiki_document:related_questions:%s' % self.id questions = cache.get(key) if questions is not None: statsd.incr('wiki.related_questions.cache.hit') log.debug('Getting MLT questions for {doc} from cache.' .format(doc=repr(self))) return questions try: statsd.incr('wiki.related_questions.cache.miss') max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE start_date = int(time.time()) - max_age s = Question.get_mapping_type().search() questions = s.values_dict('id', 'question_title', 'url').filter( question_locale=self.locale, product__in=[p.slug for p in self.get_products()], question_has_helpful=True, created__gte=start_date ).query( __mlt={ 'fields': ['question_title', 'question_content'], 'like_text': self.title, 'min_term_freq': 1, 'min_doc_freq': 1, } )[:3] questions = list(questions) cache.add(key, questions) except ES_EXCEPTIONS as exc: statsd.incr('wiki.related_questions.esexception') log.error('ES MLT {err} related_questions for {doc}'.format( doc=repr(self), err=str(exc))) questions = [] return questions
def related_questions(self): """Return questions that are 'morelikethis' document.""" # Only documents in default IA categories have related. if (self.redirect_url() or not self.current_revision or self.category not in settings.IA_DEFAULT_CATEGORIES or self.locale not in settings.AAQ_LANGUAGES): return [] # First try to get the results from the cache key = 'wiki_document:related_questions:%s' % self.id questions = cache.get(key) if questions is not None: statsd.incr('wiki.related_questions.cache.hit') log.debug('Getting MLT questions for {doc} from cache.' .format(doc=repr(self))) return questions try: statsd.incr('wiki.related_questions.cache.miss') max_age = settings.SEARCH_DEFAULT_MAX_QUESTION_AGE start_date = int(time.time()) - max_age s = Question.get_mapping_type().search() questions = s.values_dict('id', 'question_title', 'url').filter( question_locale=self.locale, product__in=[p.slug for p in self.get_products()], question_has_helpful=True, created__gte=start_date ).query( __mlt={ 'fields': ['question_title', 'question_content'], 'like_text': self.title, 'min_term_freq': 1, 'min_doc_freq': 1, } )[:3] questions = list(questions) cache.add(key, questions) except ES_EXCEPTIONS as exc: statsd.incr('wiki.related_questions.esexception') log.exception('ES MLT related_questions') questions = [] return questions
def test_from_url_id_only(self): """Verify question returned from valid URL.""" # When requesting the id, the existence of the question isn't checked. eq_(123, Question.from_url('/en-US/questions/123', id_only=True)) eq_(234, Question.from_url('/es/questions/234', id_only=True)) eq_(345, Question.from_url('/questions/345', id_only=True))
def pageviews_by_question(start_date, end_date, verbose=False): """Return the number of pageviews by question in a given date range. Returns a dict with pageviews for each document: {question_id>: <pageviews>, 1: 42, 7: 1337,...} """ counts = {} request = _build_request() max_results = 10000 end_date_step = end_date while True: # To reduce the size of result set request 3 months at a time start_date_step = end_date_step - timedelta(90) if start_date_step < start_date: start_date_step = start_date if verbose: print 'Fetching data for %s to %s:' % (start_date_step, end_date_step) start_index = 1 while True: # To deal with pagination @retry_503 def _make_request(): return request.get(ids='ga:' + profile_id, start_date=str(start_date_step), end_date=str(end_date_step), metrics='ga:pageviews', dimensions='ga:pagePath', filters='ga:pagePathLevel2==/questions/', max_results=max_results, start_index=start_index).execute() results = _make_request() if verbose: d = (max_results - 1 if start_index + max_results - 1 < results['totalResults'] else results['totalResults'] - start_index) print '- Got %s of %s results.' % (start_index + d, results['totalResults']) for result in results['rows']: path = result[0] pageviews = int(result[1]) question_id = Question.from_url(path, id_only=True) if not question_id: continue # The same question can appear multiple times due to url params # and locale. counts[question_id] = counts.get(question_id, 0) + pageviews # Move to next page of results. start_index += max_results if start_index > results['totalResults']: break end_date_step = start_date_step - timedelta(1) if start_date_step == start_date or end_date_step < start_date: break return counts
def question_list(request, product_slug): """View the list of questions.""" if settings.DISABLE_QUESTIONS_LIST_GLOBAL: messages.add_message(request, messages.WARNING, "You cannot list questions at this time.") return HttpResponseRedirect("/") filter_ = request.GET.get("filter") owner = request.GET.get("owner", request.session.get("questions_owner", "all")) show = request.GET.get("show") # Show defaults to NEEDS ATTENTION if show not in FILTER_GROUPS: show = "needs-attention" tagged = request.GET.get("tagged") tags = None topic_slug = request.GET.get("topic") order = request.GET.get("order", "updated") if order not in ORDER_BY: order == "updated" sort = request.GET.get("sort", "desc") product_slugs = product_slug.split(",") products = [] if len(product_slugs) > 1 or product_slugs[0] != "all": for slug in product_slugs: products.append(get_object_or_404(Product, slug=slug)) multiple = len(products) > 1 else: # We want all products (no product filtering at all). if settings.DISABLE_QUESTIONS_LIST_ALL: messages.add_message( request, messages.WARNING, "You cannot list all questions at this time.") return HttpResponseRedirect("/") products = None multiple = True if topic_slug and not multiple: # We don't support topics when there is more than one product. # There is no way to know what product the topic applies to. try: topic = Topic.objects.get(slug=topic_slug, product=products[0]) except Topic.DoesNotExist: topic = None else: topic = None question_qs = Question.objects if filter_ not in FILTER_GROUPS[show]: filter_ = None if filter_ == "new": question_qs = question_qs.new() elif filter_ == "unhelpful-answers": question_qs = question_qs.unhelpful_answers() elif filter_ == "needsinfo": question_qs = question_qs.needs_info() elif filter_ == "solution-provided": question_qs = question_qs.solution_provided() elif filter_ == "solved": question_qs = question_qs.solved() elif filter_ == "locked": question_qs = question_qs.locked() elif filter_ == "recently-unanswered": question_qs = question_qs.recently_unanswered() else: if show == "needs-attention": question_qs = question_qs.needs_attention() if show == "responded": question_qs = question_qs.responded() if show == "done": question_qs = question_qs.done() question_qs = question_qs.select_related("creator", "last_answer", "last_answer__creator") question_qs = question_qs.prefetch_related("topic", "topic__product") question_qs = question_qs.filter(creator__is_active=1) if not request.user.has_perm("flagit.can_moderate"): question_qs = question_qs.filter(is_spam=False) if owner == "mine" and request.user.is_authenticated: criteria = Q(answers__creator=request.user) | Q(creator=request.user) question_qs = question_qs.filter(criteria).distinct() else: owner = None feed_urls = (( urlparams(reverse("questions.feed"), product=product_slug, topic=topic_slug), QuestionsFeed().title(), ), ) if tagged: tag_slugs = tagged.split(",") tags = Tag.objects.filter(slug__in=tag_slugs) if tags: for t in tags: question_qs = question_qs.filter(tags__name__in=[t.name]) if len(tags) == 1: feed_urls += (( reverse("questions.tagged_feed", args=[tags[0].slug]), TaggedQuestionsFeed().title(tags[0]), ), ) else: question_qs = Question.objects.none() # Exclude questions over 90 days old without an answer. oldest_date = date.today() - timedelta(days=90) question_qs = question_qs.exclude(created__lt=oldest_date, num_answers=0) # Filter by products. if products: # This filter will match if any of the products on a question have the # correct id. question_qs = question_qs.filter(product__in=products).distinct() # Filter by topic. if topic: # This filter will match if any of the topics on a question have the # correct id. question_qs = question_qs.filter(topic__id=topic.id) # Filter by locale for AAQ locales, and by locale + default for others. if request.LANGUAGE_CODE in QuestionLocale.objects.locales_list(): locale_query = Q(locale=request.LANGUAGE_CODE) else: locale_query = Q(locale=request.LANGUAGE_CODE) locale_query |= Q(locale=settings.WIKI_DEFAULT_LANGUAGE) question_qs = question_qs.filter(locale_query) # Set the order. # Set a default value if a user requested a non existing order parameter order_by = ORDER_BY.get(order, ["updated"])[0] question_qs = question_qs.order_by(order_by if sort == "asc" else "-%s" % order_by) try: questions_page = simple_paginate(request, question_qs, per_page=config.QUESTIONS_PER_PAGE) except (PageNotAnInteger, EmptyPage): # If we aren't on page 1, redirect there. # TODO: Is 404 more appropriate? if request.GET.get("page", "1") != "1": url = build_paged_url(request) return HttpResponseRedirect(urlparams(url, page=1)) # Recent answered stats extra_filters = locale_query if products: extra_filters &= Q(product__in=products) recent_asked_count = Question.recent_asked_count(extra_filters) recent_unanswered_count = Question.recent_unanswered_count(extra_filters) if recent_asked_count: recent_answered_percent = int( (float(recent_asked_count - recent_unanswered_count) / recent_asked_count) * 100) else: recent_answered_percent = 0 # List of products to fill the selector. product_list = Product.objects.filter(visible=True) # List of topics to fill the selector. Only shows if there is exactly # one product selected. if products and not multiple: topic_list = Topic.objects.filter(visible=True, product=products[0])[:10] else: topic_list = [] # Store current filters in the session if request.user.is_authenticated: request.session["questions_owner"] = owner data = { "questions": questions_page, "feeds": feed_urls, "filter": filter_, "owner": owner, "show": show, "filters": FILTER_GROUPS[show], "order": order, "orders": ORDER_BY, "sort": sort, "tags": tags, "tagged": tagged, "recent_asked_count": recent_asked_count, "recent_unanswered_count": recent_unanswered_count, "recent_answered_percent": recent_answered_percent, "product_list": product_list, "products": products, "product_slug": product_slug, "multiple_products": multiple, "all_products": product_slug == "all", "topic_list": topic_list, "topic": topic, } return render(request, "questions/question_list.html", data)
def pageviews_by_question(start_date, end_date, verbose=False): """Return the number of pageviews by question in a given date range. Returns a dict with pageviews for each document: {question_id>: <pageviews>, 1: 42, 7: 1337,...} """ counts = {} request = _build_request() max_results = 10000 end_date_step = end_date while True: # To reduce the size of result set request 3 months at a time start_date_step = end_date_step - timedelta(90) if start_date_step < start_date: start_date_step = start_date if verbose: print 'Fetching data for {0!s} to {1!s}:'.format(start_date_step, end_date_step) start_index = 1 while True: # To deal with pagination @retry_503 def _make_request(): return request.get( ids='ga:' + profile_id, start_date=str(start_date_step), end_date=str(end_date_step), metrics='ga:pageviews', dimensions='ga:pagePath', filters='ga:pagePathLevel2==/questions/', max_results=max_results, start_index=start_index).execute() results = _make_request() if verbose: d = (max_results - 1 if start_index + max_results - 1 < results['totalResults'] else results['totalResults'] - start_index) print '- Got {0!s} of {1!s} results.'.format(start_index + d, results['totalResults']) for result in results['rows']: path = result[0] pageviews = int(result[1]) question_id = Question.from_url(path, id_only=True) if not question_id: continue # The same question can appear multiple times due to url params # and locale. counts[question_id] = counts.get(question_id, 0) + pageviews # Move to next page of results. start_index += max_results if start_index > results['totalResults']: break end_date_step = start_date_step - timedelta(1) if start_date_step == start_date or end_date_step < start_date: break return counts
def test_from_url_id_only(self): """Verify question returned from valid URL.""" # When requesting the id, the existence of the question isn't checked. eq_(123, Question.from_url("/en-US/questions/123", id_only=True)) eq_(234, Question.from_url("/es/questions/234", id_only=True)) eq_(345, Question.from_url("/questions/345", id_only=True))