Example #1
0
    def test_load_all_read_queryset(self):
        # stow
        old_site = haystack.site
        test_site = SearchSite()
        # Register a default SearchIndex first (without the SoftDeleteMangaer as the read_queryset)
        test_site.register(AFifthMockModel)
        haystack.site = test_site

        sqs = SearchQuerySet(query=MockSearchQuery(backend=ReadQuerySetMockSearchBackend()))
        results = sqs.load_all().all()
        results._fill_cache(0, 2)
        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        test_site.unregister(AFifthMockModel)
        # Register a SearchIndex with a read_queryset that returns deleted items
        test_site.register(AFifthMockModel, ReadQuerySetTestSearchIndex)

        sqs = SearchQuerySet(query=MockSearchQuery(backend=ReadQuerySetMockSearchBackend()))
        results = sqs.load_all().all()
        results._fill_cache(0, 2)
        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)


        # restore
        haystack.site = old_site
Example #2
0
    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Models with uuid primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = UUIDMockSearchBackend('uuid')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui
Example #3
0
    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections["default"]._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections["default"]._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Restore.
        connections["default"]._index = old_ui
Example #4
0
    def get_queryset(self):
        if self.request.method in SAFE_METHODS:
            # Listing/searching.

            validate_request_on_app_id(self.request)
            if "q" in self.request.query_params:
                search_q = self.request.query_params["q"]
            else:
                # No query is provided. Full list is for authenticated requests only.
                if not self.request.user.is_authenticated():
                    raise NotAuthenticated("")
                return Picture.objects.order_by("item_id")

            search_q = search_q.strip()
            search_q = search_q.lower()
            search_q = re.sub(r"\s+", " ", search_q)

            search_q = search_q.replace("-", " ")
            search_q = search_q.replace(".", "")
            search_q = search_q.replace("'", "")
            search_q = search_q.strip()

            if not search_q:
                return Picture.objects.none()

            sqs = SearchQuerySet().models(Picture)
            sqs = sqs.filter(content=AutoQuery(search_q))

            pictures = [sr.object for sr in sqs.load_all()]

            if not pictures:
                # Auto-correction.
                sugg = sqs.spelling_suggestion()
                if (sugg is not None) and ("suggestion" in sugg) and sugg["suggestion"]:
                    sqs = SearchQuerySet().models(Picture)
                    sqs = sqs.filter(content=sugg["suggestion"][0])
                    pictures = [sr.object for sr in sqs.load_all()]

            # Exclude hidden.
            pictures = [picture for picture in pictures if not picture.is_hidden]

            # Randomize leading good ones.
            first_non_good_picture_pos = None
            for i, picture in enumerate(pictures):
                if not picture.is_good:
                    first_non_good_picture_pos = i
                    break
            if first_non_good_picture_pos is not None:
                leading_good_pictures = pictures[:first_non_good_picture_pos]
                random.shuffle(leading_good_pictures)
                pictures = leading_good_pictures + pictures[first_non_good_picture_pos:]

            if BOOST_SEARCH_RESULTS_ON_NUM_USES:
                pictures = boost_search_results_on_num_uses(pictures)

            return pictures

        else:
            return Picture.objects.all()
Example #5
0
 def no_query_found(self):
     if self.searchqueryset:
         return self.searchqueryset
     sqs =  SearchQuerySet()
     if self.load_all:
         sqs = sqs.load_all()
     return sqs
Example #6
0
    def items(self,obj):

        words = obj["keywords"]
        urls = obj["sources"]
                
        #urls.append(urls[0])#.append(words
        #urls.extend(words)
        
        
        
        corpora = SearchQuerySet().models(Corpus)
        cfilter = {"words":SQ(), "urls":SQ()}
        for word in words:
            word = corpora.query.clean(word)
            cfilter["words"] = cfilter["words"] | SQ(content=word)
        
        for url in urls:
            url = corpora.query.clean(url)
            cfilter["urls"] = cfilter["urls"] | SQ(toplevel=url)
        
        if urls:
            cfilter = cfilter["words"] & cfilter["urls"]
        else:
            cfilter = cfilter["words"]
        
        corpora = corpora.filter(cfilter)
        
        if not len(corpora):
            raise(Http404)
        
        return (corpus.object for corpus in corpora.load_all())
Example #7
0
File: ajax.py Project: gage/proto
def related_contact(request, selector='all'):
    """ contact autocomplete
        indexes: first_name, last_name, email, phone, site_user
        type: 'all' (all), 'site' (site use only), 'no_site' (exclude site user)
    """
    q = request.GET.get('q', None)
    contact_id = request.GET.get('contact_id', None)
    limit = 5

    # Exclude already slaved contacts
    exclude_idl = list(Contact.objects.all_filter(master_contact=contact_id).values_list('id', flat=True))
    exclude_idl.append(contact_id)

    """ query handler """
    if q is None or q == '':
        return HttpJsonResponse()
    results = None
    has_query = False
    if not '@' in q:
        query_list = process_query(q)
        for q_o in query_list:
            if q_o:
                if not has_query:
                    has_query = True
                    results = SearchQuerySet().filter(SQ(first_name__startswith=q_o) | SQ(last_name__startswith=q_o) | \
                                 SQ(email__startswith=q_o) | SQ(phone__startswith=q_o) | SQ(site_user__startswith=q_o))
                else:
                    results = results.filter(SQ(first_name__startswith=q_o) | SQ(last_name__startswith=q_o) | \
                                 SQ(email__startswith=q_o) | SQ(phone__startswith=q_o) | SQ(site_user__startswith=q_o))
    else:
        has_query = True
        results = SearchQuerySet().auto_query(q)

    if has_query:
        results = results.filter(user=request.user.pk).exclude(is_slave=True).models(Contact).load_all()
    else:
        results = []

    if has_query:
        if selector == 'site':
            results = results.filter(site_user__range=['*','*'])
        elif selector == "no_site":
            results = results.exclude(site_user__range=['*','*'])
        results = results.exclude(cid__in=exclude_idl)
        results = results.load_all()[:limit]
    else:
        results = []

    tmp_response = []
    html_content = ""
    for result in results:
        # Use html to render the contact
        if not result:
            continue
        html_content += render_to_string('inc_contact_related.html', {'contact':result.object}, context_instance = RequestContext(request))
    data = {'html':html_content}
    return HttpJsonResponse(data)
class LiveSolrSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""

    fixtures = ["initial_data.json"]

    def setUp(self):
        super(LiveSolrSearchQuerySetTestCase, self).setUp()
        self.sqs = SearchQuerySet()

        # With the models registered, you get the proper bits.
        import haystack
        from haystack.sites import SearchSite

        # Stow.
        self.old_site = haystack.site
        test_site = SearchSite()
        test_site.register(MockModel)
        haystack.site = test_site

        # Force indexing of the content.
        for mock in MockModel.objects.all():
            mock.save()

    def tearDown(self):
        # Restore.
        import haystack

        haystack.site = self.old_site
        super(LiveSolrSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs[0].object.foo, "bar")

    def test_load_all_queryset(self):
        sqs = self.sqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=1))
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], [2, 3])
Example #9
0
 def get_queryset(self):
     term = self.request.GET.get('q', '').strip()
     sqs = SearchQuerySet().models(Proposal).filter(
         active=True, community=self.community.id,
         status=Proposal.statuses.ACCEPTED,
         type=ProposalType.TASK).order_by(self._get_order())
     if term:
         sqs = sqs.filter(content=AutoQuery(term)) \
                  .filter_or(assignee__contains=term)
     return sqs.load_all()
Example #10
0
File: views.py Project: PGower/PNS
 def get(self, request):
     # import pdb;pdb.set_trace()
     q = request.query_params.get('q')
     if q == '!!preload':
         results = Fullname.objects.all()
         results = FullnameSerializer(results, many=True)
         return Response({'results': results.data})
     else:
         query = SearchQuerySet().autocomplete(text=q)
         results = query.load_all()
         results = FullnameSerializer(results, many=True)
         return Response({'results': results.data})
Example #11
0
 def read(self, request, post_id=None, post_type=None, category=None):
     if post_id:
         return Post.objects.get(pk=post_id)
     else:
         search = SearchQuerySet()
         if post_type:
             search = search.filter(type=post_type)
         if category:
             search = search.filter(category=category)
         q = request.GET.get('q', None)
         if q:
             search = search.filter(content=q)
         return [r.object for r in search.load_all()]
Example #12
0
 def get_queryset(self):
     term = self.request.GET.get('q', '').strip()
     if not term:
         # try search by tag
         term = self.request.GET.get('t', '').strip()
     self.order_by = self.request.GET.get('ord', 'date') 
     ord_term = '-decided_at' if self.order_by == 'date' else 'title'
     sqs = SearchQuerySet().filter(
         active=True, community=self.community.id,
         status=Proposal.statuses.ACCEPTED,
         type=ProposalType.RULE).order_by(ord_term)
     if term:
         sqs = sqs.filter(content=AutoQuery(term))
     return sqs.load_all()
Example #13
0
 def test_load_all(self):
     # Models with character primary keys
     sqs = SearchQuerySet(query=MockSearchQuery(backend=CharPKMockSearchBackend()))
     results = sqs.load_all().all()
     self.assertEqual(len(results._result_cache), 0)
     results._fill_cache(0, 2)
     self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)
     
     # If nothing is registered, you get nothing.
     haystack.site.unregister(MockModel)
     haystack.site.unregister(CharPKMockModel)
     sqs = self.msqs.load_all()
     self.assert_(isinstance(sqs, SearchQuerySet))
     self.assertEqual(len(sqs), 0)
def search(request):
    form = forms.Search()
    if 'search' in request.GET or 'user' in request.GET:
        form = forms.Search(request.GET)
        if form.is_valid():
            words = form.cleaned_data.get('search')
            user = form.cleaned_data.get('user')
            subforums = form.cleaned_data.get('subforums')
            only_threads = form.cleaned_data.get('only_threads')
            sort_by = form.cleaned_data.get('sort_by')
            num_page = request.GET.get('page')
            get = request.GET.copy()
            if num_page:
                del get['page']
            get = get.urlencode()
            if only_threads:
                query = SearchQuerySet().models(Thread)
            else:
                query = SearchQuerySet()
            if words:
                query = query.filter(content=words)
            if user:
                query = query.filter(author=user)
            if subforums:
                query = query.filter(subforum__in=subforums)
            order_dict = {'p': '-pub_date', 'rt': '-rating'}
            sort_by = order_dict.get(sort_by)
            if sort_by:
                query = query.order_by(sort_by)
            query = query.load_all()
            paginator = Paginator(query, forum_settings.POSTS_ON_PAGE)
            try:
                page = paginator.page(num_page)
            except PageNotAnInteger:
                page = paginator.page(1)
            except EmptyPage:
                page = paginator.page(paginator.num_pages)
            init_list = list(paginator.page_range)
            p_index = page.number - 1
            i = p_index - 2 if p_index > 2 else 0
            pages_list = init_list[i:p_index + 3]
            num_res = len(query)
            context = {
                'query': page, 'pages_list': pages_list, 'form': form,
                'last_page': paginator.num_pages, 'get': get,
                'num_page': page.number, 'num_res': num_res,
            }
            return render(request, 'forum/search_form.html', context)
    return render(request, 'forum/search_form.html', {'form': form})
Example #15
0
 def get_queryset(self):
     qs = SearchQuerySet().all()
     if self.request.user.is_authenticated():
         if not permissions.can_moderate(models.URLPath.root().article, self.request.user):
             qs = qs.filter(
                 SQ(owner_id=self.request.user.id) |
                 (
                     SQ(group_id__in=self.request.user.groups.values_list('id', flat=True))
                     & SQ(group_read=True)
                 ) |
                 SQ(other_read=True)
             )
     else:
         qs = qs.exclude(other_read=False)
     
     qs = qs.filter(content=AutoQuery(self.query))
     qs = qs.load_all()
     return qs
Example #16
0
 def render_into_region(self, obj, context, **kwargs):
     sqs = SearchQuerySet().using(obj.connection)
     if obj.request_objects:
         sqs = sqs.load_all()
     try:
         context.update({
             'more_like_this': sqs.more_like_this(
                 obj.content_object)[0:obj.max_num]
         })
     except NotHandled:
         logger.exception("Haystack hasn't been configured to handle this "
                          "object type.")
         context.update({'more_like_this': ()})
     except SearchBackendError:
         logger.exception("Recovering from search backend error by failing "
                          "silently.")
         context.update({'more_like_this': ()})
     return render_to_string('editregions/search/mlt.html',
                             context_instance=context)
def get_documents(query_json):

    try:
        query = json.loads(query_json)
    except:
        query = dict()

    query = DocumentSearchForm(query)

    if not query.is_valid():
        return []

    qw = query.cleaned_data['qw']
    document_level = query.cleaned_data['document_level']
    tags = query.cleaned_data['tags']

    if qw is None and document_level is None and tags is None:
        #If no search criteria were specified, results an empty list.
        return []
    else:
        sqs = SearchQuerySet().models(StoredDocument)

        if qw:
            sqs = sqs.filter(content=Clean(qw)).highlight()

        if document_level:
            sqs = sqs.filter(document_level=Clean(document_level))

        if tags:
            for tag in tags:
                #Apply AND operation to labels
                sqs = sqs.filter(tags__contains=Clean(tag.tag))

    return [
    dict(id=sqr.object.id,
        name=sqr.object.name,
        serial_number=sqr.object.serial_number,
        url=sqr.object.get_absolute_url(),
        version=sqr.object.version,
        location=sqr.object.location,
        tags=list(sqr.object.tags.all().values_list('tag', flat=True)),
        highlighted_text= sqr.highlighted['text'][0] if sqr.highlighted else ''
    ) for sqr in sqs.load_all()]
Example #18
0
 def get_queryset(self):
     qs = SearchQuerySet().all()
     if self.request.user.is_authenticated():
         # TODO: This has a leak! It should say:
         # group=self.request.group.id AND group_read=True
         if not permissions.can_moderate(models.URLPath.root().article, 
             self.request.user):
             qs = qs.filter_or(
                 owner=self.request.user.id, 
                 group=self.request.group.id,
                 other_read=True
             )
     else:
         qs = qs.exclude(other_read=False)
     
     qs = qs.filter(content=AutoQuery(self.query))
     qs = qs.exclude(other_read=False)
     qs = qs.load_all()
     return qs
Example #19
0
    def get_results(self, request):
        if SEARCH_VAR not in request.GET or (len(request.GET[SEARCH_VAR]) is 0 and len(request.GET.keys()) is 1):
            return super(SearchChangeList, self).get_results(request)

        filters = self.custom_get_filters(request)

        # Note that pagination is 0-based, not 1-based.
        sqs = SearchQuerySet(self.haystack_connection).models(self.model)
        if request.GET[SEARCH_VAR]:
            sqs = sqs.auto_query(request.GET[SEARCH_VAR])
        if filters:
            sqs = sqs.filter(**filters)

        sqs = sqs.load_all()

        # Set ordering.
        ordering = self.get_ordering(request, sqs)
        sqs = sqs.order_by(*ordering)

        paginator = Paginator(sqs, self.list_per_page)
        # Get the number of objects, with admin filters applied.
        result_count = paginator.count
        full_result_count = SearchQuerySet(self.haystack_connection).models(self.model).all().count()

        can_show_all = result_count <= list_max_show_all(self)
        multi_page = result_count > self.list_per_page

        # Get the list of objects to display on this page.
        try:
            result_list = paginator.page(self.page_num + 1).object_list
            # Grab just the Django models, since that's what everything else is
            # expecting.
            result_list = [result.object for result in result_list if result]
        except InvalidPage:
            result_list = ()

        self.result_count = result_count
        self.full_result_count = full_result_count
        self.result_list = result_list
        self.can_show_all = can_show_all
        self.multi_page = multi_page
        self.paginator = paginator
Example #20
0
def reindex_microsite_topic(topic):
    from haystack.query import SearchQuerySet

    objects = set()

    # get all objects from this topic and all objects with this topic's keywords
    query = "indexed_topics:%s" % topic.id
    for result in SearchQuerySet().narrow(query).load_all():
        objects.add(result.object)

    topic_keywords = topic.keywords.values_list("slug", flat=True)
    microsite_keywords = topic.microsite.keywords.values_list("slug", flat=True)
    if topic_keywords and microsite_keywords:
        query = SearchQuerySet()
        query = query.narrow("keywords(%s)" % " OR ".join(["%s" % kw for kw in topic_keywords]))
        query = query.narrow("keywords(%s)" % " OR ".join(["%s" % kw for kw in microsite_keywords]))
        for result in query.load_all():
            objects.add(result.object)

    for instance in objects:
        reindex(instance)
Example #21
0
    def render_into_region(self, obj, context, **kwargs):
        sqs = SearchQuerySet().using(obj.connection)
        # and now, in advanced usage, we allow for boosting words in the results
        actual_boosts = obj.get_boosts()
        if actual_boosts:
            for boost_word, boost_value in actual_boosts:
                sqs = sqs.boost(boost_word, boost_value)

        # we may want to efficiently load all model instances, if the template
        # requires them.
        if obj.request_objects:
            sqs = sqs.load_all()
        try:
            context.update({
                'search_results': sqs.auto_query(obj.query)[0:obj.max_num]
            })
        except SearchBackendError:
            logger.exception("Recovering from search backend error by failing "
                             "silently.")
            context.update({'search_results': ()})
        return render_to_string('editregions/search/query_results.html',
                                context_instance=context)
Example #22
0
    def search(self):
        # First, store the SearchQuerySet received from other processing.
        sqs = super(TakerSearchForm, self).search()
        if not hasattr(self, 'cleaned_data'):
            return sqs
        miles = int(self.cleaned_data['radius'])
        # first pass at filtering based on rough bounding box
        latitude = self.cleaned_data['latitude']
        if not latitude:
            latitude = 42.361208
        longitude = self.cleaned_data['longitude']
        if not longitude:
            longitude = -71.081157
        bbox = bounding_box(latitude, longitude, miles)
        sqs = sqs.filter(SQ(latitude__gte=bbox[0]) | SQ(latitude__gt=199))
        sqs = sqs.filter(SQ(longitude__lte=bbox[1]) | SQ(longitude__gt=199))
        sqs = sqs.filter(SQ(latitude__lte=bbox[2]) | SQ(latitude__gt=199))
        sqs = sqs.filter(SQ(longitude__gte=bbox[3]) | SQ(longitude__gt=199))
        # now filter the results for actual distance matches
        ids = []
        center = Point(latitude, longitude)
        for result in sqs.all():
            if result.latitude > 199 or result.longitude > 199:
                # include results without locations, they'll appear last
                ids.append(result.pk)
            if distance_between(center, result.latitude, result.longitude) <= miles:
                ids.append(result.pk)
        if len(ids) > 0:
            sqs = SearchQuerySet().filter(django_id__in=ids)
        else:
            sqs = SearchQuerySet().none()

        results = list(sqs.load_all())
        for taker in results:
            if taker.latitude < 199 and taker.longitude < 199:
                taker.distance = distance_between(center, taker.latitude, taker.longitude)
                print taker.distance
        results = sorted(results, key=lambda o: o.distance if hasattr(o, 'distance') else 1000000)
        return results
Example #23
0
class SearchQuerySetTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_unified_index
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(
            repr(self.msqs).replace("u'", "'"),
            "[<SearchResult: core.mockmodel (pk='1')>, <SearchResult: core.mockmodel (pk='2')>, <SearchResult: core.mockmodel (pk='3')>, <SearchResult: core.mockmodel (pk='4')>, <SearchResult: core.mockmodel (pk='5')>, <SearchResult: core.mockmodel (pk='6')>, <SearchResult: core.mockmodel (pk='7')>, <SearchResult: core.mockmodel (pk='8')>, <SearchResult: core.mockmodel (pk='9')>, <SearchResult: core.mockmodel (pk='10')>, <SearchResult: core.mockmodel (pk='11')>, <SearchResult: core.mockmodel (pk='12')>, <SearchResult: core.mockmodel (pk='13')>, <SearchResult: core.mockmodel (pk='14')>, <SearchResult: core.mockmodel (pk='15')>, <SearchResult: core.mockmodel (pk='16')>, <SearchResult: core.mockmodel (pk='17')>, <SearchResult: core.mockmodel (pk='18')>, <SearchResult: core.mockmodel (pk='19')>, '...(remaining elements truncated)...']"
        )
        self.assertEqual(len(connections['default'].queries), 1)

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in msqs]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual([int(res.pk) for res in results[1:11]],
                         [res.pk for res in MOCK_SEARCH_RESULTS[1:11]])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(check, [
            u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11',
            u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20',
            u'21', u'22', u'23'
        ])

        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections['default']._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, [u'sometext', u'1234'])
        self.assertEqual(len(connections['default'].queries), 1)

        connections['default']._index = old_ui

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        sqs = SearchQuerySet().all()
        sqs.query.backend = MixedMockSearchBackend('default')
        results = sqs
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 0)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [])
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 9)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [1, 2, 3, 4, 5, 6, 7, 8, 10])
        self.assertEqual(len(connections['default'].queries), 2)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 17)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20])
        self.assertEqual(len(connections['default'].queries), 4)
        results._fill_cache(20, 30)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [
            1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22,
            23
        ])
        self.assertEqual(len(connections['default'].queries), 6)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue('foo' in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections['default']._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections['default']._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost('foo', 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content='Indx')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search('foo')), 23)
        self.assertEqual(
            len(
                self.msqs.raw_search(
                    '(content__exact:hello AND content__exact:world)')), 23)

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections['default']._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # Restore.
        connections['default']._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query('test search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__contains=test search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__contains=test "my thing" search -stuff>')

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__contains=test "my thing" search \'moar quotes\' -stuff>'
        )

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__contains=test "my thing" search \'moar quotes\' "foo -stuff>'
        )

        sqs = self.msqs.auto_query('test - stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND content__contains=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__contains="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query('test search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND title__contains=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff',
                                   fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND title__contains=test "my thing" search -stuff>')

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest('pub_date'), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet('foo').facet('bar')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet('foo',
                                       start_date=datetime.date(2008, 2, 25),
                                       end_date=datetime.date(2009, 2, 25),
                                       gap_by='smarblaph')
            self.fail()
        except FacetingError as e:
            self.assertEqual(
                str(e),
                "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second."
            )

        sqs = self.msqs.date_facet('foo',
                                   start_date=datetime.date(2008, 2, 25),
                                   end_date=datetime.date(2009, 2, 25),
                                   gap_by='month')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet('foo',
                                    start_date=datetime.date(2008, 2, 25),
                                    end_date=datetime.date(2009, 2, 25),
                                    gap_by='month').date_facet(
                                        'bar',
                                        start_date=datetime.date(2007, 2, 25),
                                        end_date=datetime.date(2009, 2, 25),
                                        gap_by='year')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)

    def test_query_facets(self):
        sqs = self.msqs.query_facet('foo', '[bar TO *]')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_facets), 1)

        sqs2 = self.msqs.query_facet('foo', '[bar TO *]').query_facet(
            'bar', '[100 TO 499]')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.query_facets), 2)

        # Test multiple query facets on a single field
        sqs3 = self.msqs.query_facet('foo', '[bar TO *]').query_facet(
            'bar', '[100 TO 499]').query_facet('foo', '[1000 TO 1499]')
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.query_facets), 3)

    def test_stats(self):
        sqs = self.msqs.stats_facet('foo', 'bar')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.stats), 1)

        sqs2 = self.msqs.stats_facet('foo', 'bar').stats_facet('foo', 'baz')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.stats), 1)

        sqs3 = self.msqs.stats_facet('foo', 'bar').stats_facet('moof', 'baz')
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.stats), 2)

    def test_narrow(self):
        sqs = self.msqs.narrow('foo:moof')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.narrow_queries), 1)

    def test_clone(self):
        results = self.msqs.filter(foo='bar', foo__lt='10')

        clone = results._clone()
        self.assertTrue(isinstance(clone, SearchQuerySet))
        self.assertEqual(str(clone.query), str(results.query))
        self.assertEqual(clone._result_cache, [])
        self.assertEqual(clone._result_count, None)
        self.assertEqual(clone._cache_full, False)
        self.assertEqual(clone._using, results._using)

    def test_using(self):
        sqs = SearchQuerySet(using='default')
        self.assertNotEqual(sqs.query, None)
        self.assertEqual(sqs.query._using, 'default')

    def test_chaining(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

        # A second instance should inherit none of the changes from above.
        sqs = self.msqs.filter(content='bar')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_none(self):
        sqs = self.msqs.none()
        self.assertTrue(isinstance(sqs, EmptySearchQuerySet))
        self.assertEqual(len(sqs), 0)

    def test___and__(self):
        sqs1 = self.msqs.filter(content='foo')
        sqs2 = self.msqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test___or__(self):
        sqs1 = self.msqs.filter(content='foo')
        sqs2 = self.msqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test_and_or(self):
        """
        Combining AND queries with OR should give
            AND(OR(a, b), OR(c, d))
        """
        sqs1 = self.msqs.filter(content='foo').filter(content='oof')
        sqs2 = self.msqs.filter(content='bar').filter(content='rab')
        sqs = sqs1 | sqs2

        self.assertEqual(sqs.query.query_filter.connector, 'OR')
        self.assertEqual(repr(sqs.query.query_filter.children[0]),
                         repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]),
                         repr(sqs2.query.query_filter))

    def test_or_and(self):
        """
        Combining OR queries with AND should give
            OR(AND(a, b), AND(c, d))
        """
        sqs1 = self.msqs.filter(content='foo').filter_or(content='oof')
        sqs2 = self.msqs.filter(content='bar').filter_or(content='rab')
        sqs = sqs1 & sqs2

        self.assertEqual(sqs.query.query_filter.connector, 'AND')
        self.assertEqual(repr(sqs.query.query_filter.children[0]),
                         repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]),
                         repr(sqs2.query.query_filter))
Example #24
0
    def list(self, request, *args, **kwargs):
        # If the incoming language is not specified, go with the default.
        self.lang_code = request.query_params.get('language', LANGUAGES[0])
        if self.lang_code not in LANGUAGES:
            raise ParseError(
                "Invalid language supplied. Supported languages: %s" %
                ','.join(LANGUAGES))

        specs = {
            'only_fields': self.request.query_params.get('only', None),
            'include_fields': self.request.query_params.get('include', None)
        }
        for key in specs.keys():
            if specs[key]:
                setattr(self, key, {})
                fields = [
                    x.strip().split('.') for x in specs[key].split(',') if x
                ]
                for f in fields:
                    try:
                        getattr(self, key).setdefault(f[0], []).append(f[1])
                    except IndexError:
                        logger.warning(
                            "Field '%s' given in unsupported non-dot-format: '%s'"
                            % (key, specs[key]))
            else:
                setattr(self, key, None)

        input_val = request.query_params.get('input', '').strip()
        q_val = request.query_params.get('q', '').strip()

        if not input_val and not q_val:
            raise ParseError(
                "Supply search terms with 'q=' or autocomplete entry with 'input='"
            )
        if input_val and q_val:
            raise ParseError("Supply either 'q' or 'input', not both")

        old_language = translation.get_language()[:2]
        translation.activate(self.lang_code)

        queryset = SearchQuerySet()

        if hasattr(request, 'accepted_media_type') and re.match(
                KML_REGEXP, request.accepted_media_type):
            queryset = queryset.models(Unit)
            self.only_fields['unit'].extend(['street_address', 'www'])

        if input_val:
            queryset = queryset.filter(
                SQ(autosuggest=input_val)
                | SQ(autosuggest_extra_searchwords=input_val)
                | SQ(autosuggest_exact__exact=input_val)
                | SQ(SQ(number=input_val) & SQ(autosuggest=input_val)))
        else:
            queryset = (queryset.filter(text=AutoQuery(q_val)).filter_or(
                extra_searchwords=q_val).filter_or(address=q_val))

        IS_NOT_UNIT_SQ = (SQ(django_ct='services.service')
                          | SQ(django_ct='services.servicenode')
                          | SQ(django_ct='munigeo.address'))

        if 'municipality' in request.query_params:
            val = request.query_params['municipality'].lower().strip()
            if len(val) > 0:
                municipalities = val.split(',')

                muni_sq = SQ(municipality=municipalities.pop().strip())
                for m in municipalities:
                    muni_sq |= SQ(municipality=m)

                queryset = queryset.filter(SQ(muni_sq | IS_NOT_UNIT_SQ))

        if 'city_as_department' in request.query_params:
            val = request.query_params['city_as_department'].lower().strip()

            if len(val) > 0:
                deps_uuid = val.split(',')

                # forming municipality search query
                deps = Department.objects.filter(
                    uuid__in=deps_uuid).select_related('municipality')
                munis = [d.municipality.name for d in deps]

                muni_sq = SQ(municipality=munis.pop())
                for m in munis:
                    muni_sq |= SQ(municipality=m)

                # forming root_deparment search query
                dep_sq = SQ(root_department=deps_uuid.pop().strip())
                for d in deps_uuid:
                    dep_sq |= SQ(root_department=d)

                # updating queryset
                queryset = queryset.filter(
                    SQ(muni_sq | dep_sq | IS_NOT_UNIT_SQ))

        service = request.query_params.get('service')
        if service:
            services = service.split(',')
            queryset = queryset.filter(django_ct='services.unit').filter(
                services__in=services)

        # Only units marked public should be returned. For other types,
        # public is always True.
        queryset = queryset.filter(public=True)

        models = set()
        types = request.query_params.get('type', '').split(',')
        for t in types:
            if t == 'service_node':
                models.add(ServiceNode)
            elif t == 'service':
                models.add(Service)
            elif t == 'unit':
                models.add(Unit)
            elif t == 'address':
                models.add(Address)
            elif t == 'administrative_division':
                models.add(AdministrativeDivision)
        if len(models) > 0:
            queryset = queryset.models(*list(models))
        else:
            # Hide the to-be-deprecated servicenode from default types
            queryset = queryset.models(Service, Unit, Address,
                                       AdministrativeDivision)

        only = getattr(self, 'only_fields') or {}
        include = getattr(self, 'include_fields') or {}
        Unit.search_objects.only_fields = only.get('unit')
        Unit.search_objects.include_fields = include.get('unit')

        self.object_list = queryset.load_all()

        # Switch between paginated or standard style responses
        page = self.paginate_queryset(self.object_list)
        serializer = self.get_serializer(page, many=True)
        resp = self.get_paginated_response(serializer.data)

        translation.activate(old_language)

        return resp
Example #25
0
def _query_results(query, person):
    """
    Actually build the query results for this person.

    Make sure any result.content_type values are reflected in RESULT_TYPE_DISPLAY for display to the user.
    """
    if len(query) < 2:
        return []

    query = query.replace(
        '@sfu.ca', '')  # hack to make email addresses searchable as userids
    query = Clean(query)

    # offerings person was a member of (coredata.CourseOffering)
    if person:
        members = Member.objects.filter(person=person).exclude(
            role='DROP').select_related('offering')
        offering_slugs = set(m.offering.slug for m in members)
        offering_results = SearchQuerySet().models(CourseOffering).filter(
            text__fuzzy=query)  # offerings that match the query
        offering_results = offering_results.filter(
            slug__in=offering_slugs)  # ... and this person was in
    else:
        members = []
        offering_results = []

    # pages this person can view (pages.Page)
    page_acl = set(['ALL'])
    for m in members:
        # builds a set of offering_slug+"_"+acl_value strings, which will match the permission_key field in the index
        member_acl = set("%s_%s" % (m.offering.slug, acl)
                         for acl in ACL_ROLES[m.role] if acl != 'ALL')
        page_acl |= member_acl

    page_results = SearchQuerySet().models(Page).filter(
        text__fuzzy=query)  # pages that match the query
    page_results = page_results.filter(
        permission_key__in=page_acl)  # ... and are visible to this user

    # discussion this person can view (discussion.DiscussionTopic)
    if person:
        discuss_results = SearchQuerySet().models(DiscussionTopic).filter(
            text__fuzzy=query)  # discussions that match the query
        discuss_results = discuss_results.filter(
            slug__in=offering_slugs)  # ... and this person was in
    else:
        discuss_results = []

    # students taught by instructor (coredata.Member)
    instr_members = Member.objects.filter(person=person, role__in=['INST','TA']).exclude(offering__component='CAN') \
        .select_related('offering')
    if person and instr_members:
        offering_slugs = set(m.offering.slug for m in instr_members)
        member_results = SearchQuerySet().models(Member).filter(
            text__fuzzy=query)  # members that match the query
        member_results = member_results.filter(
            offering_slug__in=offering_slugs
        )  # ... and this person was the instructor for
        member_results = member_results.load_all()
    else:
        member_results = []

    # combine and limit to best results
    results = itertools.chain(
        offering_results[:MAX_RESULTS],
        page_results[:MAX_RESULTS],
        member_results[:MAX_RESULTS],
        discuss_results[:MAX_RESULTS],
    )
    results = (r for r in results if r is not None)
    results = list(results)
    results.sort(key=lambda result: -result.score)
    results = results[:
                      MAX_RESULTS]  # (list before this could be n*MAX_RESULTS long)

    return results
Example #26
0
    def list(self, request, *args, **kwargs):
        # If the incoming language is not specified, go with the default.
        self.lang_code = request.QUERY_PARAMS.get('language', LANGUAGES[0])
        if self.lang_code not in LANGUAGES:
            raise ParseError(
                "Invalid language supplied. Supported languages: %s" %
                ','.join(LANGUAGES))

        context = {}

        specs = {
            'only_fields': self.request.QUERY_PARAMS.get('only', None),
            'include_fields': self.request.QUERY_PARAMS.get('include', None)
        }
        for key in specs.keys():
            if specs[key]:
                setattr(self, key, {})
                fields = [
                    x.strip().split('.') for x in specs[key].split(',') if x
                ]
                for f in fields:
                    try:
                        getattr(self, key).setdefault(f[0], []).append(f[1])
                    except IndexError:
                        logger.warning(
                            "Field '%s' given in unsupported non-dot-format: '%s'"
                            % (key, specs[key]))
            else:
                setattr(self, key, None)

        input_val = request.QUERY_PARAMS.get('input', '').strip()
        q_val = request.QUERY_PARAMS.get('q', '').strip()
        if not input_val and not q_val:
            raise ParseError(
                "Supply search terms with 'q=' or autocomplete entry with 'input='"
            )
        if input_val and q_val:
            raise ParseError("Supply either 'q' or 'input', not both")

        old_language = translation.get_language()[:2]
        translation.activate(self.lang_code)

        queryset = SearchQuerySet()

        if hasattr(request, 'accepted_media_type') and re.match(
                KML_REGEXP, request.accepted_media_type):
            queryset = queryset.models(Unit)
            self.only_fields['unit'].extend(['street_address', 'www_url'])

        if input_val:
            queryset = (queryset.filter(autosuggest=input_val).filter_or(
                autosuggest_extra_searchwords=input_val).filter_or(
                    autosuggest_exact__exact=input_val))
        else:
            queryset = (queryset.filter(text=AutoQuery(q_val)).filter_or(
                extra_searchwords=q_val).filter_or(address=q_val))
        if 'municipality' in request.QUERY_PARAMS:
            val = request.QUERY_PARAMS['municipality'].lower().strip()
            if len(val) > 0:
                municipalities = val.split(',')
                muni_q_objects = [
                    SQ(municipality=m.strip()) for m in municipalities
                ]
                muni_q = muni_q_objects.pop()
                for q in muni_q_objects:
                    muni_q |= q
                queryset = queryset.filter(
                    SQ(muni_q | SQ(django_ct='services.service')
                       | SQ(django_ct='munigeo.address')))

        service = request.QUERY_PARAMS.get('service')
        if service:
            services = service.split(',')
            queryset = queryset.filter(django_ct='services.unit').filter(
                services__in=services)

        models = set()
        types = request.QUERY_PARAMS.get('type', '').split(',')
        for t in types:
            if t == 'service':
                models.add(Service)
            elif t == 'unit':
                models.add(Unit)
            elif t == 'address':
                models.add(Address)
        if len(models) > 0:
            queryset = queryset.models(*list(models))

        only = getattr(self, 'only_fields') or {}
        include = getattr(self, 'include_fields') or {}
        Unit.search_objects.only_fields = only.get('unit')
        Unit.search_objects.include_fields = include.get('unit')

        self.object_list = queryset.load_all()

        # Switch between paginated or standard style responses
        page = self.paginate_queryset(self.object_list)
        serializer = self.get_serializer(page, many=True)
        resp = self.get_paginated_response(serializer.data)

        translation.activate(old_language)

        return resp
Example #27
0
class SearchQuerySetTestCase(TestCase):
    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()
        self.bsqs = SearchQuerySet(query=DummySearchQuery(backend=DummySearchBackend()))
        self.msqs = SearchQuerySet(query=MockSearchQuery(backend=MockSearchBackend()))
        self.mmsqs = SearchQuerySet(query=MockSearchQuery(backend=MixedMockSearchBackend()))
        
        # Stow.
        self.old_site = haystack.site
        test_site = SearchSite()
        test_site.register(MockModel)
        haystack.site = test_site
    
    def tearDown(self):
        # Restore.
        haystack.site = self.old_site
        super(SearchQuerySetTestCase, self).tearDown()
    
    def test_len(self):
        # Dummy always returns 0.
        self.assertEqual(len(self.bsqs), 0)
        
        self.assertEqual(len(self.msqs), 100)
    
    def test_iter(self):
        # Dummy always returns [].
        self.assertEqual([result for result in self.bsqs.all()], [])
        
        msqs = self.msqs.all()
        results = [result for result in msqs]
        self.assertEqual(results, MOCK_SEARCH_RESULTS)
    
    def test_slice(self):
        self.assertEqual(self.msqs.all()[1:11], MOCK_SEARCH_RESULTS[1:11])
        self.assertEqual(self.msqs.all()[50], MOCK_SEARCH_RESULTS[50])
    
    def test_manual_iter(self):
        results = self.msqs.all()
        
        for offset, result in enumerate(results._manual_iter()):
            self.assertEqual(result, MOCK_SEARCH_RESULTS[offset])
        
        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not in the SearchSite) than the hit count indicates.
        # This will hang indefinitely if broken.
        results = self.mmsqs.all()
        self.assertEqual([result.pk for result in results._manual_iter()], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29])
    
    def test_fill_cache(self):
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache()
        self.assertEqual(len(results._result_cache), 10)
        results._fill_cache()
        self.assertEqual(len(results._result_cache), 20)
        
        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not in the SearchSite) than the hit count indicates.
        results = self.mmsqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual([result.pk for result in results._result_cache], [])
        results._fill_cache()
        self.assertEqual(len(results._result_cache), 10)
        self.assertEqual([result.pk for result in results._result_cache], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10])
        results._fill_cache()
        self.assertEqual(len(results._result_cache), 20)
        self.assertEqual([result.pk for result in results._result_cache], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 17, 18, 19, 20, 21, 22])
        results._fill_cache()
        self.assertEqual(len(results._result_cache), 27)
        self.assertEqual([result.pk for result in results._result_cache], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29])
    
    def test_cache_is_full(self):
        # Dummy always has a count of 0 and an empty _result_cache, hence True.
        self.assertEqual(self.bsqs._cache_is_full(), True)
        
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
    
    def test_all(self):
        sqs = self.bsqs.all()
        self.assert_(isinstance(sqs, SearchQuerySet))
    
    def test_filter(self):
        sqs = self.bsqs.filter(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filters), 1)
    
    def test_exclude(self):
        sqs = self.bsqs.exclude(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filters), 1)
    
    def test_order_by(self):
        sqs = self.bsqs.order_by('foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assert_('foo' in sqs.query.order_by)
    
    def test_models(self):
        mock_index_site = SearchSite()
        mock_index_site.register(MockModel)
        mock_index_site.register(AnotherMockModel)
        
        bsqs = SearchQuerySet(site=mock_index_site)
        
        sqs = bsqs.all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)
        
        sqs = bsqs.models(MockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)
        
        sqs = bsqs.models(MockModel, AnotherMockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)
    
    def test_boost(self):
        sqs = self.bsqs.boost('foo', 10)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)
    
    def test_highlight(self):
        sqs = self.bsqs.highlight()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)
    
    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.bsqs.filter(content='Indx')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
    
    def test_raw_search(self):
        self.assertEqual(len(self.bsqs.raw_search('foo')), 0)
        self.assertEqual(len(self.bsqs.raw_search('content__exact hello AND content__exact world')), 1)
    
    def test_load_all(self):
        # If nothing is registered, you get nothing.
        haystack.site.unregister(MockModel)
        sqs = self.msqs.load_all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)
        
        # For full tests, see the solr_backend.
    
    def test_load_all_queryset(self):
        sqs = self.msqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)
        
        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=1))
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        
        # For full tests, see the solr_backend.
    
    def test_auto_query(self):
        sqs = self.bsqs.auto_query('test search -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual([repr(the_filter) for the_filter in sqs.query.query_filters], ['<QueryFilter: AND content__exact=test>', '<QueryFilter: AND content__exact=search>', '<QueryFilter: NOT content__exact=stuff>'])
        
        sqs = self.bsqs.auto_query('test "my thing" search -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual([repr(the_filter) for the_filter in sqs.query.query_filters], ['<QueryFilter: AND content__exact=my thing>', '<QueryFilter: AND content__exact=test>', '<QueryFilter: AND content__exact=search>', '<QueryFilter: NOT content__exact=stuff>'])
        
        sqs = self.bsqs.auto_query('test "my thing" search \'moar quotes\' -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual([repr(the_filter) for the_filter in sqs.query.query_filters], ['<QueryFilter: AND content__exact=my thing>', '<QueryFilter: AND content__exact=moar quotes>', '<QueryFilter: AND content__exact=test>', '<QueryFilter: AND content__exact=search>', '<QueryFilter: NOT content__exact=stuff>'])
        
        sqs = self.bsqs.auto_query('test "my thing" search \'moar quotes\' "foo -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual([repr(the_filter) for the_filter in sqs.query.query_filters], ['<QueryFilter: AND content__exact=my thing>', '<QueryFilter: AND content__exact=moar quotes>', '<QueryFilter: AND content__exact=test>', '<QueryFilter: AND content__exact=search>', '<QueryFilter: AND content__exact="foo>', '<QueryFilter: NOT content__exact=stuff>'])
        
        sqs = self.bsqs.auto_query('test - stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual([repr(the_filter) for the_filter in sqs.query.query_filters], ['<QueryFilter: AND content__exact=test>', '<QueryFilter: AND content__exact=->', '<QueryFilter: AND content__exact=stuff>'])
    
    def test_count(self):
        self.assertEqual(self.bsqs.count(), 0)
    
    def test_facet_counts(self):
        self.assertEqual(self.bsqs.facet_counts(), {})
    
    def test_best_match(self):
        self.assert_(isinstance(self.msqs.best_match(), SearchResult))
    
    def test_latest(self):
        self.assert_(isinstance(self.msqs.latest('pub_date'), SearchResult))
    
    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1
        
        self.assertEqual(len(self.msqs.more_like_this(mock)), 100)
    
    def test_facets(self):
        sqs = self.bsqs.facet('foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)
        
        sqs2 = self.bsqs.facet('foo').facet('bar')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)
    
    def test_date_facets(self):
        sqs = self.bsqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)
        
        sqs2 = self.bsqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month').date_facet('bar', start_date=datetime.date(2007, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='year')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)
    
    def test_query_facets(self):
        sqs = self.bsqs.query_facet('foo', '[bar TO *]')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_facets), 1)
        
        sqs2 = self.bsqs.query_facet('foo', '[bar TO *]').query_facet('bar', '[100 TO 499]')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.query_facets), 2)
    
    def test_narrow(self):
        sqs = self.bsqs.narrow('foo:moof')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.narrow_queries), 1)
    
    def test_clone(self):
        results = self.msqs.filter(foo='bar', foo__lt='10')
        
        clone = results._clone()
        self.assert_(isinstance(clone, SearchQuerySet))
        self.assertEqual(clone.site, results.site)
        self.assertEqual(str(clone.query), str(results.query))
        self.assertEqual(clone._result_cache, [])
        self.assertEqual(clone._result_count, None)
        self.assertEqual(clone._cache_full, False)
    
    def test_chaining(self):
        sqs = self.bsqs.filter(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filters), 1)
        
        # A second instance should inherit none of the changes from above.
        sqs = self.bsqs.filter(content='bar')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filters), 1)
    
    def test_none(self):
        sqs = self.bsqs.none()
        self.assert_(isinstance(sqs, EmptySearchQuerySet))
        self.assertEqual(len(sqs), 0)
Example #28
0
class LiveSolrSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""

    fixtures = ["solr_bulk_data.json"]

    def setUp(self):
        super(LiveSolrSearchQuerySetTestCase, self).setUp()

        # With the models registered, you get the proper bits.
        import haystack
        from haystack.sites import SearchSite

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_site = haystack.site
        test_site = SearchSite()
        test_site.register(MockModel)
        haystack.site = test_site

        self.sqs = SearchQuerySet()

        # Force indexing of the content.
        for mock in MockModel.objects.all():
            mock.save()

    def tearDown(self):
        # Restore.
        import haystack

        haystack.site = self.old_site
        settings.DEBUG = self.old_debug
        super(LiveSolrSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assert_(len(sqs) > 0)
        self.assertEqual(
            sqs[0].object.foo,
            u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:",
        )

    def test_load_all_queryset(self):
        sqs = self.sqs.load_all()
        self.assertRaises(HaystackError, sqs.load_all_queryset, MockModel, MockModel.objects.filter(id__gt=1))

    def test_iter(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        sqs = self.sqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(backends.queries), 3)

    def test_slice(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.sqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(backends.queries), 1)

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.sqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(backends.queries), 1)

    def test_manual_iter(self):
        results = self.sqs.all()

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(backends.queries), 3)

    def test_fill_cache(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(backends.queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(backends.queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(backends.queries), 2)

    def test_cache_is_full(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(backends.queries), 3)
class LiveSolrSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveSolrSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()
        self.rsqs = RelatedSearchQuerySet()

        # Ugly but not constantly reindexing saves us almost 50% runtime.
        global lssqstc_all_loaded

        if lssqstc_all_loaded is None:
            print 'Reloading data...'
            lssqstc_all_loaded = True

            # Wipe it clean.
            clear_solr_index()

            # Force indexing of the content.
            self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveSolrSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(sqs[0].object.foo, u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:")

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_count(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(len(sqs), 23)
        self.assertEqual(sqs.count(), 23)
        # Should only execute one query to count the length of the result set.
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.sqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test___and__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'(foo AND bar)')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar')
        sqs = sqs3 & sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 3)
        self.assertEqual(sqs.query.build_query(), u'(NOT (title:moof) AND (foo OR baz) AND bar)')

    def test___or__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'(foo OR bar)')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar').models(MockModel)
        sqs = sqs3 | sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((NOT (title:moof) AND (foo OR baz)) OR bar)')

    def test_auto_query(self):
        # Ensure bits in exact matches get escaped properly as well.
        # This will break horrifically if escaping isn't working.
        sqs = self.sqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__exact=pants\\:rule>')
        self.assertEqual(sqs.query.build_query(), u'"pants\\:rule"')
        self.assertEqual(len(sqs), 0)

    # Regressions

    def test_regression_proper_start_offsets(self):
        sqs = self.sqs.filter(text='index')
        self.assertNotEqual(sqs.count(), 0)

        id_counts = {}

        for item in sqs:
            if item.id in id_counts:
                id_counts[item.id] += 1
            else:
                id_counts[item.id] = 1

        for key, value in id_counts.items():
            if value > 1:
                self.fail("Result with id '%s' seen more than once in the results." % key)

    def test_regression_raw_search_breaks_slicing(self):
        sqs = self.sqs.raw_search('text: index')
        page_1 = [result.pk for result in sqs[0:10]]
        page_2 = [result.pk for result in sqs[10:20]]

        for pk in page_2:
            if pk in page_1:
                self.fail("Result with id '%s' seen more than once in the results." % pk)

    # RelatedSearchQuerySet Tests

    def test_related_load_all(self):
        sqs = self.rsqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(sqs[0].object.foo, u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:")

    def test_related_load_all_queryset(self):
        sqs = self.rsqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=1))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], range(2, 24))

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=10))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], range(11, 24))
        self.assertEqual([obj.object.id for obj in sqs[10:20]], [21, 22, 23])

    def test_related_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.rsqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(connections['default'].queries), 4)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[20:30]], [21, 22, 23])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_manual_iter(self):
        results = self.rsqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_related_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.rsqs._cache_is_full(), False)
        results = self.rsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 5)

    def test_quotes_regression(self):
        sqs = self.sqs.auto_query("44°48'40''N 20°28'32''E")
        # Should not have empty terms.
        self.assertEqual(sqs.query.build_query(), u"(44\ufffd\ufffd48'40''N AND 20\ufffd\ufffd28'32''E)")
        # Should not cause Solr to 500.
        self.assertEqual(sqs.count(), 0)

        sqs = self.sqs.auto_query('blazing')
        self.assertEqual(sqs.query.build_query(), u'blazing')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(blazing AND saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(\\"blazing AND saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'"blazing saddles"')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" AND mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'saddles"')
        self.assertEqual(sqs.query.build_query(), u'("blazing \'saddles" AND mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"')
        self.assertEqual(sqs.query.build_query(), u'("blazing \'\'saddles" AND mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'')
        self.assertEqual(sqs.query.build_query(), u'("blazing \'\'saddles" AND mel AND \')')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'"')
        self.assertEqual(sqs.query.build_query(), u'("blazing \'\'saddles" AND mel AND \'\\")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" AND mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel brooks')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" AND mel AND brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" brooks')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" AND mel AND brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" "brooks')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" AND mel AND \\"brooks)')
        self.assertEqual(sqs.count(), 0)

    def test_result_class(self):
        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))
Example #30
0
class SearchQuerySetTestCase(TestCase):
    fixtures = ["base_data.json", "bulk_data.json"]

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections["default"]._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.uuidmmsi = SimpleMockUUIDModelIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi, self.uuidmmsi])
        connections["default"]._index = self.ui

        # Update the "index".
        backend = connections["default"].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections["default"]._index = self.old_unified_index
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        self.assertRegexpMatches(
            repr(self.msqs),
            r"^<SearchQuerySet: query=<test_haystack.mocks.MockSearchQuery object"
            r" at 0x[0-9A-Fa-f]+>, using=None>$",
        )

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in iter(msqs)]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections["default"].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        results = self.msqs.all()
        self.assertEqual(
            [int(res.pk) for res in results[1:11]],
            [res.pk for res in MOCK_SEARCH_RESULTS[1:11]],
        )
        self.assertEqual(len(connections["default"].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections["default"].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(
            check,
            [
                "1",
                "2",
                "3",
                "4",
                "5",
                "6",
                "7",
                "8",
                "9",
                "10",
                "11",
                "12",
                "13",
                "14",
                "15",
                "16",
                "17",
                "18",
                "19",
                "20",
                "21",
                "22",
                "23",
            ],
        )

        self.assertEqual(len(connections["default"].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.

        # CharPK testing
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections["default"]._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, ["sometext", "1234"])
        self.assertEqual(len(connections["default"].queries), 1)

        # UUID testing
        self.ui.build(indexes=[self.uuidmmsi])
        connections["default"]._index = self.ui
        self.uuidmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(
            loaded,
            [
                "53554c58-7051-4350-bcc9-dad75eb248a9",
                "77554c58-7051-4350-bcc9-dad75eb24888",
            ],
        )

        connections["default"]._index = old_ui

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = list(results)
        self.assertEqual(23, len(fire_the_iterator_and_fill_cache))
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections["default"].queries), 4)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content="foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content="foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by("foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue("foo" in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections["default"]._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections["default"]._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost("foo", 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling_override(self):
        sqs = self.msqs.filter(content="not the spellchecking query")
        self.assertEqual(sqs.query.spelling_query, None)
        sqs = self.msqs.set_spelling_query("override")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.spelling_query, "override")

    def test_spelling_suggestions(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content="Indx")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion("indexy"), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search("foo")), 23)
        self.assertEqual(
            len(
                self.msqs.raw_search(
                    "(content__exact:hello AND content__exact:world)")),
            23,
        )

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend("charpk")
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # Models with uuid primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = UUIDMockSearchBackend("uuid")
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections["default"]._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections["default"]._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections["default"]._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections["default"]._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections["default"]._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # Restore.
        connections["default"]._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query("test search -stuff")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            "<SQ: AND content__content=test search -stuff>",
        )

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__content=test "my thing" search -stuff>',
        )

        sqs = self.msqs.auto_query(
            "test \"my thing\" search 'moar quotes' -stuff")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            "<SQ: AND content__content=test \"my thing\" search 'moar quotes' -stuff>",
        )

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__content=test "my thing" search \'moar quotes\' "foo -stuff>',
        )

        sqs = self.msqs.auto_query("test - stuff")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND content__content=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__content="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query("test search -stuff", fieldname="title")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND title__content=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff',
                                   fieldname="title")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND title__content=test "my thing" search -stuff>',
        )

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest("pub_date"), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet("foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet("foo").facet("bar")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet(
                "foo",
                start_date=datetime.date(2008, 2, 25),
                end_date=datetime.date(2009, 2, 25),
                gap_by="smarblaph",
            )
            self.fail()
        except FacetingError as e:
            self.assertEqual(
                str(e),
                "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second.",
            )

        sqs = self.msqs.date_facet(
            "foo",
            start_date=datetime.date(2008, 2, 25),
            end_date=datetime.date(2009, 2, 25),
            gap_by="month",
        )
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet(
            "foo",
            start_date=datetime.date(2008, 2, 25),
            end_date=datetime.date(2009, 2, 25),
            gap_by="month",
        ).date_facet(
            "bar",
            start_date=datetime.date(2007, 2, 25),
            end_date=datetime.date(2009, 2, 25),
            gap_by="year",
        )
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)

    def test_query_facets(self):
        sqs = self.msqs.query_facet("foo", "[bar TO *]")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_facets), 1)

        sqs2 = self.msqs.query_facet("foo", "[bar TO *]").query_facet(
            "bar", "[100 TO 499]")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.query_facets), 2)

        # Test multiple query facets on a single field
        sqs3 = (self.msqs.query_facet("foo", "[bar TO *]").query_facet(
            "bar", "[100 TO 499]").query_facet("foo", "[1000 TO 1499]"))
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.query_facets), 3)

    def test_stats(self):
        sqs = self.msqs.stats_facet("foo", "bar")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.stats), 1)

        sqs2 = self.msqs.stats_facet("foo", "bar").stats_facet("foo", "baz")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.stats), 1)

        sqs3 = self.msqs.stats_facet("foo", "bar").stats_facet("moof", "baz")
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.stats), 2)

    def test_narrow(self):
        sqs = self.msqs.narrow("foo:moof")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.narrow_queries), 1)

    def test_clone(self):
        results = self.msqs.filter(foo="bar", foo__lt="10")

        clone = results._clone()
        self.assertTrue(isinstance(clone, SearchQuerySet))
        self.assertEqual(str(clone.query), str(results.query))
        self.assertEqual(clone._result_cache, [])
        self.assertEqual(clone._result_count, None)
        self.assertEqual(clone._cache_full, False)
        self.assertEqual(clone._using, results._using)

    def test_using(self):
        sqs = SearchQuerySet(using="default")
        self.assertNotEqual(sqs.query, None)
        self.assertEqual(sqs.query._using, "default")

    def test_chaining(self):
        sqs = self.msqs.filter(content="foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

        # A second instance should inherit none of the changes from above.
        sqs = self.msqs.filter(content="bar")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_none(self):
        sqs = self.msqs.none()
        self.assertTrue(isinstance(sqs, EmptySearchQuerySet))
        self.assertEqual(len(sqs), 0)

    def test___and__(self):
        sqs1 = self.msqs.filter(content="foo")
        sqs2 = self.msqs.filter(content="bar")
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test___or__(self):
        sqs1 = self.msqs.filter(content="foo")
        sqs2 = self.msqs.filter(content="bar")
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test_and_or(self):
        """
        Combining AND queries with OR should give
            AND(OR(a, b), OR(c, d))
        """
        sqs1 = self.msqs.filter(content="foo").filter(content="oof")
        sqs2 = self.msqs.filter(content="bar").filter(content="rab")
        sqs = sqs1 | sqs2

        self.assertEqual(sqs.query.query_filter.connector, "OR")
        self.assertEqual(repr(sqs.query.query_filter.children[0]),
                         repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]),
                         repr(sqs2.query.query_filter))

    def test_or_and(self):
        """
        Combining OR queries with AND should give
            OR(AND(a, b), AND(c, d))
        """
        sqs1 = self.msqs.filter(content="foo").filter_or(content="oof")
        sqs2 = self.msqs.filter(content="bar").filter_or(content="rab")
        sqs = sqs1 & sqs2

        self.assertEqual(sqs.query.query_filter.connector, "AND")
        self.assertEqual(repr(sqs.query.query_filter.children[0]),
                         repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]),
                         repr(sqs2.query.query_filter))
Example #31
0
    def search(self, order_by='newest'):
        self.clean()
        if not order_by:
            order_by = 'newest'

        sqs = SearchQuerySet()
        user = self.user
        query = self.cleaned_data['q']

        # check to see if there is impersonation
        if hasattr(user,'impersonated_user'):
            if isinstance(user.impersonated_user, User):
                user = user.impersonated_user

        is_an_admin = user.profile.is_superuser

        # making a special case for corp memb because it needs to utilize two settings
        # (anonymoussearchcorporatemembers and membersearchcorporatemembers)
        if CorpMemb and self.cleaned_data['models'] == ["%s.%s" % (CorpMemb._meta.app_label,
                                                                   CorpMemb._meta.model_name)]:
            filter_and, filter_or = CorpMemb.get_search_filter(user)
            q_obj = None
            if filter_and:
                q_obj = Q(**filter_and)
            if filter_or:
                q_obj_or = reduce(operator.or_, [Q(**{key: value}) for key, value in filter_or.items()])
                if q_obj:
                    q_obj = reduce(operator.and_, [q_obj, q_obj_or])
                else:
                    q_obj = q_obj_or
            if q_obj:
                sqs = sqs.filter(q_obj)

            if query:
                sqs = sqs.auto_query(sqs.query.clean(query))
        else:

            if query:
                sqs = sqs.auto_query(sqs.query.clean(query))
                if user:
                    if not is_an_admin:
                        if not user.is_anonymous():
                        # if b/w admin and anon

                            # (status+status_detail+(anon OR user)) OR (who_can_view__exact)
                            anon_query = Q(**{'allow_anonymous_view':True,})
                            user_query = Q(**{'allow_user_view':True,})
                            sec1_query = Q(**{
                                'status':1,
                                'status_detail':'active',
                            })
                            sec2_query = Q(**{
                                'owner__exact':user.username
                            })
                            query = reduce(operator.or_, [anon_query, user_query])
                            query = reduce(operator.and_, [sec1_query, query])
                            query = reduce(operator.or_, [query, sec2_query])

                            sqs = sqs.filter(query)
                        else:
                        # if anonymous
                            sqs = sqs.filter(status=True).filter(status_detail='active')
                            sqs = sqs.filter(allow_anonymous_view=True)
                else:
                    # if anonymous
                    sqs = sqs.filter(status=True).filter(status_detail='active')
                    sqs = sqs.filter(allow_anonymous_view=True)
            else:
                if user:
                    if is_an_admin:
                        sqs = sqs.all()
                    else:
                        if not user.is_anonymous():
                            # (status+status_detail+anon OR who_can_view__exact)
                            sec1_query = Q(**{
                                'status':1,
                                'status_detail':'active',
                                'allow_anonymous_view':True,
                            })
                            sec2_query = Q(**{
                                'owner__exact':user.username
                            })
                            query = reduce(operator.or_, [sec1_query, sec2_query])
                            sqs = sqs.filter(query)
                        else:
                            # if anonymous
                            sqs = sqs.filter(status=True).filter(status_detail='active')
                            sqs = sqs.filter(allow_anonymous_view=True)
                else:
                    # if anonymous
                    sqs = sqs.filter(status=True).filter(status_detail='active')
                    sqs = sqs.filter(allow_anonymous_view=True)

            # for solr,
            # order_by can only called once so we have to do it here
            if order_by == 'newest':
                # changed order_by from create_dt to order because the order field
                # is a common field in search indexes and used by other modules(news)
                sqs = sqs.order_by('-order')
            else:
                sqs = sqs.order_by('order')

        if self.load_all:
            sqs = sqs.load_all()

        return sqs
Example #32
0
class SearchQuerySetTestCase(TestCase):
    fixtures = ["base_data.json", "bulk_data.json"]

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections["default"]._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections["default"]._index = self.ui

        # Update the "index".
        backend = connections["default"].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections["default"]._index = self.old_unified_index
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        self.assertRegexpMatches(
            repr(self.msqs),
            r"^<SearchQuerySet: query=<test_haystack.mocks.MockSearchQuery object" r" at 0x[0-9A-Fa-f]+>, using=None>$",
        )

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in iter(msqs)]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections["default"].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        results = self.msqs.all()
        self.assertEqual([int(res.pk) for res in results[1:11]], [res.pk for res in MOCK_SEARCH_RESULTS[1:11]])
        self.assertEqual(len(connections["default"].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections["default"].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(
            check,
            [
                "1",
                "2",
                "3",
                "4",
                "5",
                "6",
                "7",
                "8",
                "9",
                "10",
                "11",
                "12",
                "13",
                "14",
                "15",
                "16",
                "17",
                "18",
                "19",
                "20",
                "21",
                "22",
                "23",
            ],
        )

        self.assertEqual(len(connections["default"].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections["default"]._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, ["sometext", "1234"])
        self.assertEqual(len(connections["default"].queries), 1)

        connections["default"]._index = old_ui

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = list(results)
        self.assertEqual(23, len(fire_the_iterator_and_fill_cache))
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections["default"].queries), 4)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content="foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content="foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by("foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue("foo" in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections["default"]._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections["default"]._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost("foo", 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling_override(self):
        sqs = self.msqs.filter(content="not the spellchecking query")
        self.assertEqual(sqs.query.spelling_query, None)
        sqs = self.msqs.set_spelling_query("override")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.spelling_query, "override")

    def test_spelling_suggestions(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content="Indx")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion("indexy"), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search("foo")), 23)
        self.assertEqual(len(self.msqs.raw_search("(content__exact:hello AND content__exact:world)")), 23)

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend("charpk")
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections["default"]._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections["default"]._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections["default"]._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections["default"]._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections["default"]._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Restore.
        connections["default"]._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query("test search -stuff")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND content__content=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__content=test "my thing" search -stuff>')

        sqs = self.msqs.auto_query("test \"my thing\" search 'moar quotes' -stuff")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter), "<SQ: AND content__content=test \"my thing\" search 'moar quotes' -stuff>"
        )

        sqs = self.msqs.auto_query('test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__content=test "my thing" search \'moar quotes\' "foo -stuff>',
        )

        sqs = self.msqs.auto_query("test - stuff")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND content__content=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__content="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query("test search -stuff", fieldname="title")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND title__content=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff', fieldname="title")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND title__content=test "my thing" search -stuff>')

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest("pub_date"), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet("foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet("foo").facet("bar")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet(
                "foo", start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by="smarblaph"
            )
            self.fail()
        except FacetingError as e:
            self.assertEqual(
                str(e), "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second."
            )

        sqs = self.msqs.date_facet(
            "foo", start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by="month"
        )
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet(
            "foo", start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by="month"
        ).date_facet("bar", start_date=datetime.date(2007, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by="year")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)

    def test_query_facets(self):
        sqs = self.msqs.query_facet("foo", "[bar TO *]")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_facets), 1)

        sqs2 = self.msqs.query_facet("foo", "[bar TO *]").query_facet("bar", "[100 TO 499]")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.query_facets), 2)

        # Test multiple query facets on a single field
        sqs3 = (
            self.msqs.query_facet("foo", "[bar TO *]")
            .query_facet("bar", "[100 TO 499]")
            .query_facet("foo", "[1000 TO 1499]")
        )
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.query_facets), 3)

    def test_stats(self):
        sqs = self.msqs.stats_facet("foo", "bar")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.stats), 1)

        sqs2 = self.msqs.stats_facet("foo", "bar").stats_facet("foo", "baz")
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.stats), 1)

        sqs3 = self.msqs.stats_facet("foo", "bar").stats_facet("moof", "baz")
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.stats), 2)

    def test_narrow(self):
        sqs = self.msqs.narrow("foo:moof")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.narrow_queries), 1)

    def test_clone(self):
        results = self.msqs.filter(foo="bar", foo__lt="10")

        clone = results._clone()
        self.assertTrue(isinstance(clone, SearchQuerySet))
        self.assertEqual(str(clone.query), str(results.query))
        self.assertEqual(clone._result_cache, [])
        self.assertEqual(clone._result_count, None)
        self.assertEqual(clone._cache_full, False)
        self.assertEqual(clone._using, results._using)

    def test_using(self):
        sqs = SearchQuerySet(using="default")
        self.assertNotEqual(sqs.query, None)
        self.assertEqual(sqs.query._using, "default")

    def test_chaining(self):
        sqs = self.msqs.filter(content="foo")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

        # A second instance should inherit none of the changes from above.
        sqs = self.msqs.filter(content="bar")
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_none(self):
        sqs = self.msqs.none()
        self.assertTrue(isinstance(sqs, EmptySearchQuerySet))
        self.assertEqual(len(sqs), 0)

    def test___and__(self):
        sqs1 = self.msqs.filter(content="foo")
        sqs2 = self.msqs.filter(content="bar")
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test___or__(self):
        sqs1 = self.msqs.filter(content="foo")
        sqs2 = self.msqs.filter(content="bar")
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test_and_or(self):
        """
        Combining AND queries with OR should give
            AND(OR(a, b), OR(c, d))
        """
        sqs1 = self.msqs.filter(content="foo").filter(content="oof")
        sqs2 = self.msqs.filter(content="bar").filter(content="rab")
        sqs = sqs1 | sqs2

        self.assertEqual(sqs.query.query_filter.connector, "OR")
        self.assertEqual(repr(sqs.query.query_filter.children[0]), repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]), repr(sqs2.query.query_filter))

    def test_or_and(self):
        """
        Combining OR queries with AND should give
            OR(AND(a, b), AND(c, d))
        """
        sqs1 = self.msqs.filter(content="foo").filter_or(content="oof")
        sqs2 = self.msqs.filter(content="bar").filter_or(content="rab")
        sqs = sqs1 & sqs2

        self.assertEqual(sqs.query.query_filter.connector, "AND")
        self.assertEqual(repr(sqs.query.query_filter.children[0]), repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]), repr(sqs2.query.query_filter))
Example #33
0
    def post(self, request):
        r = request.data

        if type(r['query']) is str:
            # Simple search - searches for matches in all indexed fields
            query = r['query']

            # TODO:
            # Shouldn't have to filter for searchable=True here as they
            # shouldn't be indexed in the first place, but for some reason 
            # RealtimeSignalProcessor doesn't listen to the index_queryset
            # filtering in search_indexes.py.
            # Filtering added here as a temp fix.
            matches = SearchQuerySet().filter(content=AutoQuery(query)).filter(searchable=True)

            # Preload matching DB objects
            matches.load_all()

            # Generate summary list to return
            summary_list = []

            for match in matches.all():
                summary = match.object.summary
                summary_list.append(summary)

        elif type(r['query']) is dict:
            # Advanced search (not implemented)
            # TODO: port to haystack, expand
            # Current code gets only exact matches directly from the database
            query = r['query']

            logger.debug("ADVANCED SEARCH")
            logger.debug(query)

            # Filter search index-based things here
            matches = SearchQuerySet().filter(searchable=True,
                                              content=AutoQuery(query['text']), 
                                              fitter_name=query['fitter'])
                                              #options_dilute=query['options']['dilute'],
                                              #options_normalise=query['options']['normalise'])

            if 'method' in query['options'] and isinstance(query['options']['method'], list):
                matches = matches.filter(
                    options_method__in=query['options']['method']
                )

            if 'flavour' in query['options'] and isinstance(query['options']['flavour'], list):
                matches = matches.filter(
                    options_flavour__in=query['options']['flavour']
                )

            # Preload matching DB objects
            matches.load_all()

            # Generate summary list to return and filter by array values
            summary_list = []

            for match in matches.all():
                # Set to true below if match is filtered
                unmatch = False 

                fit = match.object.to_dict()

                # Filter each param by given parameter ranges
                for key, param in query['params'].items():
                    try:
                        param_min = float(param['value']['min'])
                        if fit['fit']['params'][key]['value'] < param_min:
                            unmatch = True 
                    except (ValueError, TypeError):
                        # No minimum bound
                        pass

                    try:
                        param_max = float(param['value']['max'])
                        if fit['fit']['params'][key]['value'] > param_max:
                            unmatch = True 
                    except (ValueError, TypeError):
                        # No maximum bound
                        pass

                if not unmatch:
                    summary = match.object.summary
                    summary_list.append(summary)

        else:
            # Bad query
            return Response({"detail": "Query must be a string or object."}, 
                             status=status.HTTP_400_BAD_REQUEST)

        response = {"matches": summary_list}

        return Response(response)
Example #34
0
def refined_search(data):
    searched_skills = (
        searched_locations
    ) = searched_industry = searched_edu = state = Skill.objects.none()
    sqs = SearchQuerySet().models(JobPost).filter_and(status="Live")
    if "refine_skill" in data and data.getlist("refine_skill"):
        term = data.getlist("refine_skill")
        sqs = sqs.filter_and(
            SQ(title__in=term)
            | SQ(skills__in=term)
            | SQ(description__in=term)
            | SQ(designation__in=term)
            | SQ(edu_qualification__in=term))
        searched_skills = Skill.objects.filter(name__in=term)

    location = data.getlist(
        "refine_location") if "refine_location" in data else []
    searched_locations = City.objects.filter(name__in=location)
    if "Across India" in location:
        india = Country.objects.filter(name="India")
        sqs = sqs.filter_and(
            SQ(location__in=india.values_list("state__state__name",
                                              flat=True)))
    elif location:
        other_cities = searched_locations.values_list("parent_city__name",
                                                      flat=True)
        sqs = sqs.filter_and(
            SQ(location__in=location) | SQ(location__in=other_cities))

    if "refine_state" in data and data.getlist("refine_state"):
        state = State.objects.filter(name__in=data.getlist("refine_state"))
        sqs = sqs.filter_and(
            location__in=state.values_list("state__name", flat=True))

    if data.get("job_type"):
        if data["job_type"] == "Fresher":
            sqs = sqs.filter_and(min_year__lte=int(0))
        else:
            sqs = sqs.filter_and(job_type__in=[data["job_type"]])

    if "refine_industry" in data and data.getlist("refine_industry"):
        term = data.getlist("refine_industry")
        sqs = sqs.filter_and(industry__in=term)
        searched_industry = Industry.objects.filter(name__in=term)

    if "refine_education" in data and data.getlist("refine_education"):
        term = data.getlist("refine_education")
        sqs = sqs.filter_and(edu_qualification__in=term)
        searched_edu = Qualification.objects.filter(name__in=term)

    if "functional_area" in data and data.getlist("functional_area"):
        term = data.getlist("functional_area")
        sqs = sqs.filter_or(functional_area__in=term)

    if data.get("refine_experience_min") or data.get(
            "refine_experience_min") == 0:
        sqs = sqs.filter_and(min_year__lte=int(data["refine_experience_min"]))

    if data.get("refine_experience_max") or data.get(
            "refine_experience_max") == 0:
        sqs = sqs.filter_and(max_year__lte=int(data["refine_experience_max"]))

    # TODO: this line is taking 500ms, nikhila has to look into it.
    # job_list = JobPost.objects.filter(status='Live', pk__in=results).select_related(
    #     'company', 'user').prefetch_related('location', 'skills', 'industry')
    sqs = sqs.load_all().order_by("-published_on")
    return (
        sqs,
        searched_skills,
        searched_locations,
        searched_industry,
        searched_edu,
        state,
    )
Example #35
0
class SearchQuerySetTestCase(TestCase):
    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()
        self.bsqs = SearchQuerySet(query=DummySearchQuery(backend=DummySearchBackend()))
        self.msqs = SearchQuerySet(query=MockSearchQuery(backend=MockSearchBackend()))
        self.mmsqs = SearchQuerySet(query=MockSearchQuery(backend=MixedMockSearchBackend()))
        
        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_site = haystack.site
        test_site = SearchSite()
        test_site.register(MockModel)
        test_site.register(CharPKMockModel)
        haystack.site = test_site
        
        backends.reset_search_queries()
    
    def tearDown(self):
        # Restore.
        haystack.site = self.old_site
        settings.DEBUG = self.old_debug
        super(SearchQuerySetTestCase, self).tearDown()
    
    def test_len(self):
        # Dummy always returns 0.
        self.assertEqual(len(self.bsqs), 0)
        
        self.assertEqual(len(self.msqs), 100)
    
    def test_repr(self):
        self.assertEqual(repr(self.bsqs), '[]')
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        self.assertEqual(repr(self.msqs), "[<SearchResult: core.MockModel (pk=0)>, <SearchResult: core.MockModel (pk=1)>, <SearchResult: core.MockModel (pk=2)>, <SearchResult: core.MockModel (pk=3)>, <SearchResult: core.MockModel (pk=4)>, <SearchResult: core.MockModel (pk=5)>, <SearchResult: core.MockModel (pk=6)>, <SearchResult: core.MockModel (pk=7)>, <SearchResult: core.MockModel (pk=8)>, <SearchResult: core.MockModel (pk=9)>, <SearchResult: core.MockModel (pk=10)>, <SearchResult: core.MockModel (pk=11)>, <SearchResult: core.MockModel (pk=12)>, <SearchResult: core.MockModel (pk=13)>, <SearchResult: core.MockModel (pk=14)>, <SearchResult: core.MockModel (pk=15)>, <SearchResult: core.MockModel (pk=16)>, <SearchResult: core.MockModel (pk=17)>, <SearchResult: core.MockModel (pk=18)>, '...(remaining elements truncated)...']")
        self.assertEqual(len(backends.queries), 1)
    
    def test_iter(self):
        # Dummy always returns [].
        self.assertEqual([result for result in self.bsqs.all()], [])
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        msqs = self.msqs.all()
        results = [result for result in msqs]
        self.assertEqual(results, MOCK_SEARCH_RESULTS)
        self.assertEqual(len(backends.queries), 10)
    
    def test_slice(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.msqs.all()
        self.assertEqual(results[1:11], MOCK_SEARCH_RESULTS[1:11])
        self.assertEqual(len(backends.queries), 1)
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.msqs.all()
        self.assertEqual(results[50], MOCK_SEARCH_RESULTS[50])
        self.assertEqual(len(backends.queries), 1)
    
    def test_manual_iter(self):
        results = self.msqs.all()
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        
        for offset, result in enumerate(results._manual_iter()):
            self.assertEqual(result, MOCK_SEARCH_RESULTS[offset])
        
        self.assertEqual(len(backends.queries), 10)
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not in the SearchSite) than the hit count indicates.
        # This will hang indefinitely if broken.
        results = self.mmsqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29])
        self.assertEqual(len(backends.queries), 8)
    
    def test_fill_cache(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(backends.queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(backends.queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(backends.queries), 2)
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not in the SearchSite) than the hit count indicates.
        results = self.mmsqs.all()
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 0)
        self.assertEqual([result.pk for result in results._result_cache if result is not None], [])
        self.assertEqual(len(backends.queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 9)
        self.assertEqual([result.pk for result in results._result_cache if result is not None], [0, 1, 2, 3, 4, 5, 6, 7, 8])
        self.assertEqual(len(backends.queries), 2)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 17)
        self.assertEqual([result.pk for result in results._result_cache if result is not None], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19])
        self.assertEqual(len(backends.queries), 4)
        results._fill_cache(20, 30)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 27)
        self.assertEqual([result.pk for result in results._result_cache if result is not None], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29])
        self.assertEqual(len(backends.queries), 6)
    
    def test_cache_is_full(self):
        # Dummy always has a count of 0 and an empty _result_cache, hence True.
        self.assertEqual(self.bsqs._cache_is_full(), False)
        results = self.bsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(backends.queries), 10)
    
    def test_all(self):
        sqs = self.bsqs.all()
        self.assert_(isinstance(sqs, SearchQuerySet))
    
    def test_filter(self):
        sqs = self.bsqs.filter(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)
    
    def test_exclude(self):
        sqs = self.bsqs.exclude(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)
    
    def test_order_by(self):
        sqs = self.bsqs.order_by('foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assert_('foo' in sqs.query.order_by)
    
    def test_models(self):
        mock_index_site = SearchSite()
        mock_index_site.register(MockModel)
        mock_index_site.register(AnotherMockModel)
        
        bsqs = SearchQuerySet(site=mock_index_site)
        
        sqs = bsqs.all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)
        
        sqs = bsqs.models(MockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)
        
        sqs = bsqs.models(MockModel, AnotherMockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)
        
        # This will produce a warning.
        mock_index_site.unregister(AnotherMockModel)
        sqs = bsqs.models(AnotherMockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)
    
    def test_result_class(self):
        sqs = self.bsqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))
        
        # Custom class.
        class IttyBittyResult(object):
            pass
        
        sqs = self.bsqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))
        
        # Reset to default.
        sqs = self.bsqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))
    
    def test_boost(self):
        sqs = self.bsqs.boost('foo', 10)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)
    
    def test_highlight(self):
        sqs = self.bsqs.highlight()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)
    
    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.bsqs.filter(content='Indx')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)
    
    def test_raw_search(self):
        self.assertEqual(len(self.bsqs.raw_search('foo')), 0)
        self.assertEqual(len(self.bsqs.raw_search('(content__exact hello AND content__exact world)')), 1)
    
    def test_load_all(self):
        # Models with character primary keys
        sqs = SearchQuerySet(query=MockSearchQuery(backend=CharPKMockSearchBackend()))
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)
        
        # If nothing is registered, you get nothing.
        haystack.site.unregister(MockModel)
        haystack.site.unregister(CharPKMockModel)
        sqs = self.msqs.load_all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)
        
        # For full tests, see the solr_backend.
    
    def test_auto_query(self):
        sqs = self.bsqs.auto_query('test search -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND (content__exact=test AND content__exact=search AND NOT (content__exact=stuff))>')
        
        sqs = self.bsqs.auto_query('test "my thing" search -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND (content__exact=my thing AND content__exact=test AND content__exact=search AND NOT (content__exact=stuff))>')
        
        sqs = self.bsqs.auto_query('test "my thing" search \'moar quotes\' -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND (content__exact=my thing AND content__exact=test AND content__exact=search AND content__exact='moar AND content__exact=quotes' AND NOT (content__exact=stuff))>")
        
        sqs = self.bsqs.auto_query('test "my thing" search \'moar quotes\' "foo -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND (content__exact=my thing AND content__exact=test AND content__exact=search AND content__exact=\'moar AND content__exact=quotes\' AND content__exact="foo AND NOT (content__exact=stuff))>')
        
        sqs = self.bsqs.auto_query('test - stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND (content__exact=test AND content__exact=- AND content__exact=stuff)>')
        
        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.bsqs.auto_query('"pants:rule"')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__exact=pants:rule>')
    
    def test_count(self):
        self.assertEqual(self.bsqs.count(), 0)
    
    def test_facet_counts(self):
        self.assertEqual(self.bsqs.facet_counts(), {})
    
    def test_best_match(self):
        self.assert_(isinstance(self.msqs.best_match(), SearchResult))
    
    def test_latest(self):
        self.assert_(isinstance(self.msqs.latest('pub_date'), SearchResult))
    
    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1
        
        self.assertEqual(len(self.msqs.more_like_this(mock)), 100)
    
    def test_facets(self):
        sqs = self.bsqs.facet('foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)
        
        sqs2 = self.bsqs.facet('foo').facet('bar')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)
    
    def test_date_facets(self):
        try:
            sqs = self.bsqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='smarblaph')
            self.fail()
        except FacetingError, e:
            self.assertEqual(str(e), "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second.")
        
        sqs = self.bsqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)
        
        sqs2 = self.bsqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month').date_facet('bar', start_date=datetime.date(2007, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='year')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)
Example #36
0
class LiveSolrSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveSolrSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()
        self.rsqs = RelatedSearchQuerySet()

        # Ugly but not constantly reindexing saves us almost 50% runtime.
        global lssqstc_all_loaded

        if lssqstc_all_loaded is None:
            print 'Reloading data...'
            lssqstc_all_loaded = True

            # Wipe it clean.
            clear_solr_index()

            # Force indexing of the content.
            self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveSolrSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(
            sqs[0].object.foo,
            u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:"
        )

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]],
                         [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_count(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(len(sqs), 23)
        self.assertEqual(sqs.count(), 23)
        # Should only execute one query to count the length of the result set.
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.sqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test___and__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'(foo AND bar)')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(
            title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar')
        sqs = sqs3 & sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 3)
        self.assertEqual(sqs.query.build_query(),
                         u'(NOT (title:moof) AND (foo OR baz) AND bar)')

    def test___or__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'(foo OR bar)')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(
            title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar').models(MockModel)
        sqs = sqs3 | sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(),
                         u'((NOT (title:moof) AND (foo OR baz)) OR bar)')

    def test_auto_query(self):
        # Ensure bits in exact matches get escaped properly as well.
        # This will break horrifically if escaping isn't working.
        sqs = self.sqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__contains="pants:rule">')
        self.assertEqual(sqs.query.build_query(), u'"pants\\:rule"')
        self.assertEqual(len(sqs), 0)

    # Regressions

    def test_regression_proper_start_offsets(self):
        sqs = self.sqs.filter(text='index')
        self.assertNotEqual(sqs.count(), 0)

        id_counts = {}

        for item in sqs:
            if item.id in id_counts:
                id_counts[item.id] += 1
            else:
                id_counts[item.id] = 1

        for key, value in id_counts.items():
            if value > 1:
                self.fail(
                    "Result with id '%s' seen more than once in the results." %
                    key)

    def test_regression_raw_search_breaks_slicing(self):
        sqs = self.sqs.raw_search('text: index')
        page_1 = [result.pk for result in sqs[0:10]]
        page_2 = [result.pk for result in sqs[10:20]]

        for pk in page_2:
            if pk in page_1:
                self.fail(
                    "Result with id '%s' seen more than once in the results." %
                    pk)

    # RelatedSearchQuerySet Tests

    def test_related_load_all(self):
        sqs = self.rsqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(
            sqs[0].object.foo,
            u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:"
        )

    def test_related_load_all_queryset(self):
        sqs = self.rsqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)

        sqs = sqs.load_all_queryset(MockModel,
                                    MockModel.objects.filter(id__gt=1))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], range(2, 24))

        sqs = sqs.load_all_queryset(MockModel,
                                    MockModel.objects.filter(id__gt=10))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], range(11, 24))
        self.assertEqual([obj.object.id for obj in sqs[10:20]], [21, 22, 23])

    def test_related_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.rsqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]],
                         [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(connections['default'].queries), 4)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[20:30]],
                         [21, 22, 23])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_manual_iter(self):
        results = self.rsqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_related_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.rsqs._cache_is_full(), False)
        results = self.rsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 5)

    def test_quotes_regression(self):
        sqs = self.sqs.auto_query(u"44°48'40''N 20°28'32''E")
        # Should not have empty terms.
        self.assertEqual(sqs.query.build_query(),
                         u"44\xb048'40''N 20\xb028'32''E")
        # Should not cause Solr to 500.
        self.assertEqual(sqs.count(), 0)

        sqs = self.sqs.auto_query('blazing')
        self.assertEqual(sqs.query.build_query(), u'blazing')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'blazing saddles')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'\\"blazing saddles')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'"blazing saddles"')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'mel "blazing saddles"')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'saddles"')
        self.assertEqual(sqs.query.build_query(), u'mel "blazing \'saddles"')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"')
        self.assertEqual(sqs.query.build_query(), u'mel "blazing \'\'saddles"')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'')
        self.assertEqual(sqs.query.build_query(),
                         u'mel "blazing \'\'saddles" \'')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'"')
        self.assertEqual(sqs.query.build_query(),
                         u'mel "blazing \'\'saddles" \'\\"')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel')
        self.assertEqual(sqs.query.build_query(), u'"blazing saddles" mel')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel brooks')
        self.assertEqual(sqs.query.build_query(),
                         u'"blazing saddles" mel brooks')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" brooks')
        self.assertEqual(sqs.query.build_query(),
                         u'mel "blazing saddles" brooks')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" "brooks')
        self.assertEqual(sqs.query.build_query(),
                         u'mel "blazing saddles" \\"brooks')
        self.assertEqual(sqs.count(), 0)

    def test_result_class(self):
        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))
Example #37
0
class SearchQuerySetTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True

        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_unified_index
        settings.DEBUG = self.old_debug
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(repr(self.msqs), "[<SearchResult: core.mockmodel (pk=u'1')>, <SearchResult: core.mockmodel (pk=u'2')>, <SearchResult: core.mockmodel (pk=u'3')>, <SearchResult: core.mockmodel (pk=u'4')>, <SearchResult: core.mockmodel (pk=u'5')>, <SearchResult: core.mockmodel (pk=u'6')>, <SearchResult: core.mockmodel (pk=u'7')>, <SearchResult: core.mockmodel (pk=u'8')>, <SearchResult: core.mockmodel (pk=u'9')>, <SearchResult: core.mockmodel (pk=u'10')>, <SearchResult: core.mockmodel (pk=u'11')>, <SearchResult: core.mockmodel (pk=u'12')>, <SearchResult: core.mockmodel (pk=u'13')>, <SearchResult: core.mockmodel (pk=u'14')>, <SearchResult: core.mockmodel (pk=u'15')>, <SearchResult: core.mockmodel (pk=u'16')>, <SearchResult: core.mockmodel (pk=u'17')>, <SearchResult: core.mockmodel (pk=u'18')>, <SearchResult: core.mockmodel (pk=u'19')>, '...(remaining elements truncated)...']")
        self.assertEqual(len(connections['default'].queries), 1)

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in msqs]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual([int(res.pk) for res in results[1:11]], [res.pk for res in MOCK_SEARCH_RESULTS[1:11]])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(check, [u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20', u'21', u'22', u'23'])

        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections['default']._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, [u'sometext', u'1234'])
        self.assertEqual(len(connections['default'].queries), 1)

        connections['default']._index = old_ui

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        sqs = SearchQuerySet().all()
        sqs.query.backend = MixedMockSearchBackend('default')
        results = sqs
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 0)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [])
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 9)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10])
        self.assertEqual(len(connections['default'].queries), 2)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 17)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20])
        self.assertEqual(len(connections['default'].queries), 4)
        results._fill_cache(20, 30)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22, 23])
        self.assertEqual(len(connections['default'].queries), 6)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue('foo' in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections['default']._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections['default']._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost('foo', 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content='Indx')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search('foo')), 23)
        self.assertEqual(len(self.msqs.raw_search('(content__exact:hello AND content__exact:world)')), 23)

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections['default']._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Restore.
        connections['default']._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query('test search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search \'moar quotes\' -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search \'moar quotes\' -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search \'moar quotes\' "foo -stuff>')

        sqs = self.msqs.auto_query('test - stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND content__contains=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query('test search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND title__contains=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND title__contains=test "my thing" search -stuff>')

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest('pub_date'), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet('foo').facet('bar')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='smarblaph')
            self.fail()
        except FacetingError, e:
            self.assertEqual(str(e), "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second.")

        sqs = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month').date_facet('bar', start_date=datetime.date(2007, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='year')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)
Example #38
0
class SearchQuerySetTestCase(TestCase):
    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()
        self.bsqs = SearchQuerySet(query=DummySearchQuery(
            backend=DummySearchBackend()))
        self.msqs = SearchQuerySet(query=MockSearchQuery(
            backend=MockSearchBackend()))
        self.mmsqs = SearchQuerySet(query=MockSearchQuery(
            backend=MixedMockSearchBackend()))

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_site = haystack.site
        test_site = SearchSite()
        test_site.register(MockModel)
        test_site.register(CharPKMockModel)
        haystack.site = test_site

        backends.reset_search_queries()

    def tearDown(self):
        # Restore.
        haystack.site = self.old_site
        settings.DEBUG = self.old_debug
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        # Dummy always returns 0.
        self.assertEqual(len(self.bsqs), 0)

        self.assertEqual(len(self.msqs), 100)

    def test_repr(self):
        self.assertEqual(repr(self.bsqs), '[]')

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        self.assertEqual(
            repr(self.msqs),
            "[<SearchResult: core.MockModel (pk=0)>, <SearchResult: core.MockModel (pk=1)>, <SearchResult: core.MockModel (pk=2)>, <SearchResult: core.MockModel (pk=3)>, <SearchResult: core.MockModel (pk=4)>, <SearchResult: core.MockModel (pk=5)>, <SearchResult: core.MockModel (pk=6)>, <SearchResult: core.MockModel (pk=7)>, <SearchResult: core.MockModel (pk=8)>, <SearchResult: core.MockModel (pk=9)>, <SearchResult: core.MockModel (pk=10)>, <SearchResult: core.MockModel (pk=11)>, <SearchResult: core.MockModel (pk=12)>, <SearchResult: core.MockModel (pk=13)>, <SearchResult: core.MockModel (pk=14)>, <SearchResult: core.MockModel (pk=15)>, <SearchResult: core.MockModel (pk=16)>, <SearchResult: core.MockModel (pk=17)>, <SearchResult: core.MockModel (pk=18)>, '...(remaining elements truncated)...']"
        )
        self.assertEqual(len(backends.queries), 1)

    def test_iter(self):
        # Dummy always returns [].
        self.assertEqual([result for result in self.bsqs.all()], [])

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        msqs = self.msqs.all()
        results = [result for result in msqs]
        self.assertEqual(results, MOCK_SEARCH_RESULTS)
        self.assertEqual(len(backends.queries), 10)

    def test_slice(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.msqs.all()
        self.assertEqual(results[1:11], MOCK_SEARCH_RESULTS[1:11])
        self.assertEqual(len(backends.queries), 1)

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.msqs.all()
        self.assertEqual(results[50], MOCK_SEARCH_RESULTS[50])
        self.assertEqual(len(backends.queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)

        for offset, result in enumerate(results._manual_iter()):
            self.assertEqual(result, MOCK_SEARCH_RESULTS[offset])

        self.assertEqual(len(backends.queries), 10)

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not in the SearchSite) than the hit count indicates.
        # This will hang indefinitely if broken.
        results = self.mmsqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, [
            0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 16, 17, 18,
            19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29
        ])
        self.assertEqual(len(backends.queries), 8)

    def test_fill_cache(self):
        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(backends.queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(backends.queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(backends.queries), 2)

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not in the SearchSite) than the hit count indicates.
        results = self.mmsqs.all()
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 0)
        self.assertEqual([
            result.pk for result in results._result_cache if result is not None
        ], [])
        self.assertEqual(len(backends.queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 9)
        self.assertEqual([
            result.pk for result in results._result_cache if result is not None
        ], [0, 1, 2, 3, 4, 5, 6, 7, 8])
        self.assertEqual(len(backends.queries), 2)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 17)
        self.assertEqual([
            result.pk for result in results._result_cache if result is not None
        ], [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19])
        self.assertEqual(len(backends.queries), 4)
        results._fill_cache(20, 30)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 27)
        self.assertEqual([
            result.pk for result in results._result_cache if result is not None
        ], [
            0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21,
            22, 23, 24, 25, 26, 27, 28, 29
        ])
        self.assertEqual(len(backends.queries), 6)

    def test_cache_is_full(self):
        # Dummy always has a count of 0 and an empty _result_cache, hence True.
        self.assertEqual(self.bsqs._cache_is_full(), False)
        results = self.bsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)

        backends.reset_search_queries()
        self.assertEqual(len(backends.queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(backends.queries), 10)

    def test_all(self):
        sqs = self.bsqs.all()
        self.assert_(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.bsqs.filter(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.bsqs.exclude(content='foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.bsqs.order_by('foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assert_('foo' in sqs.query.order_by)

    def test_models(self):
        mock_index_site = SearchSite()
        mock_index_site.register(MockModel)
        mock_index_site.register(AnotherMockModel)

        bsqs = SearchQuerySet(site=mock_index_site)

        sqs = bsqs.all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = bsqs.models(MockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = bsqs.models(MockModel, AnotherMockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        mock_index_site.unregister(AnotherMockModel)
        sqs = bsqs.models(AnotherMockModel)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.bsqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.bsqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.bsqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.bsqs.boost('foo', 10)
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.bsqs.highlight()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.bsqs.filter(content='Indx')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)

    def test_raw_search(self):
        self.assertEqual(len(self.bsqs.raw_search('foo')), 0)
        self.assertEqual(
            len(
                self.bsqs.raw_search(
                    '(content__exact hello AND content__exact world)')), 1)

    def test_load_all(self):
        # Models with character primary keys
        sqs = SearchQuerySet(query=MockSearchQuery(
            backend=CharPKMockSearchBackend()))
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # If nothing is registered, you get nothing.
        haystack.site.unregister(MockModel)
        haystack.site.unregister(CharPKMockModel)
        sqs = self.msqs.load_all()
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # stow
        old_site = haystack.site
        test_site = SearchSite()
        # Register a default SearchIndex first (without the SoftDeleteMangaer as the read_queryset)
        test_site.register(AFifthMockModel)
        haystack.site = test_site

        sqs = SearchQuerySet(query=MockSearchQuery(
            backend=ReadQuerySetMockSearchBackend()))
        results = sqs.load_all().all()
        results._fill_cache(0, 2)
        # The deleted result isn't returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 1)

        test_site.unregister(AFifthMockModel)
        # Register a SearchIndex with a read_queryset that returns deleted items
        test_site.register(AFifthMockModel, ReadQuerySetTestSearchIndex)

        sqs = SearchQuerySet(query=MockSearchQuery(
            backend=ReadQuerySetMockSearchBackend()))
        results = sqs.load_all().all()
        results._fill_cache(0, 2)
        # Both the deleted and not deleted items are returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # restore
        haystack.site = old_site

    def test_auto_query(self):
        sqs = self.bsqs.auto_query('test search -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND (content__exact=test AND content__exact=search AND NOT (content__exact=stuff))>'
        )

        sqs = self.bsqs.auto_query('test "my thing" search -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND (content__exact=my thing AND content__exact=test AND content__exact=search AND NOT (content__exact=stuff))>'
        )

        sqs = self.bsqs.auto_query(
            'test "my thing" search \'moar quotes\' -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            "<SQ: AND (content__exact=my thing AND content__exact=test AND content__exact=search AND content__exact='moar AND content__exact=quotes' AND NOT (content__exact=stuff))>"
        )

        sqs = self.bsqs.auto_query(
            'test "my thing" search \'moar quotes\' "foo -stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND (content__exact=my thing AND content__exact=test AND content__exact=search AND content__exact=\'moar AND content__exact=quotes\' AND content__exact="foo AND NOT (content__exact=stuff))>'
        )

        sqs = self.bsqs.auto_query('test - stuff')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND (content__exact=test AND content__exact=- AND content__exact=stuff)>'
        )

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.bsqs.auto_query('"pants:rule"')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__exact=pants:rule>')

    def test_count(self):
        self.assertEqual(self.bsqs.count(), 0)

    def test_facet_counts(self):
        self.assertEqual(self.bsqs.facet_counts(), {})

    def test_best_match(self):
        self.assert_(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assert_(isinstance(self.msqs.latest('pub_date'), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 100)

    def test_facets(self):
        sqs = self.bsqs.facet('foo')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.bsqs.facet('foo').facet('bar')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.bsqs.date_facet('foo',
                                       start_date=datetime.date(2008, 2, 25),
                                       end_date=datetime.date(2009, 2, 25),
                                       gap_by='smarblaph')
            self.fail()
        except FacetingError, e:
            self.assertEqual(
                str(e),
                "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second."
            )

        sqs = self.bsqs.date_facet('foo',
                                   start_date=datetime.date(2008, 2, 25),
                                   end_date=datetime.date(2009, 2, 25),
                                   gap_by='month')
        self.assert_(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.bsqs.date_facet('foo',
                                    start_date=datetime.date(2008, 2, 25),
                                    end_date=datetime.date(2009, 2, 25),
                                    gap_by='month').date_facet(
                                        'bar',
                                        start_date=datetime.date(2007, 2, 25),
                                        end_date=datetime.date(2009, 2, 25),
                                        gap_by='year')
        self.assert_(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)
Example #39
0
    def search(self, order_by='newest'):
        self.clean()
        sqs = SearchQuerySet()
        if self.is_valid():
            query = self.cleaned_data['q']
        else:
            query = ''
        
        if not query:
            return sqs.none()
        
        if not order_by:
            order_by = 'newest'

        sqs = sqs.filter(status=True)
        user = self.user

        is_an_admin = user.profile.is_superuser

        # making a special case for corp memb because it needs to utilize two settings
        # (anonymoussearchcorporatemembers and membersearchcorporatemembers)
        if CorpMemb and self.cleaned_data['models'] == ["%s.%s" % (CorpMemb._meta.app_label,
                                                                   CorpMemb._meta.model_name)]:
            filter_and, filter_or = CorpMemb.get_search_filter(user)
            q_obj = None
            if filter_and:
                q_obj = Q(**filter_and)
            if filter_or:
                q_obj_or = reduce(operator.or_, [Q(**{key: value}) for key, value in filter_or.items()])
                if q_obj:
                    q_obj = reduce(operator.and_, [q_obj, q_obj_or])
                else:
                    q_obj = q_obj_or
            if q_obj:
                sqs = sqs.filter(q_obj)

            if query:
                sqs = sqs.auto_query(sqs.query.clean(query))
        else:

            if query:
                sqs = sqs.auto_query(sqs.query.clean(query))
                if user:
                    if not is_an_admin:
                        if not user.is_anonymous:
                        # if b/w admin and anon

                            # (status+status_detail+(anon OR user)) OR (who_can_view__exact)
                            anon_query = Q(**{'allow_anonymous_view':True,})
                            user_query = Q(**{'allow_user_view':True,})
                            sec1_query = Q(**{
                                'status_detail':'active',
                            })
                            sec2_query = Q(**{
                                'owner__exact':user.username
                            })
                            query = reduce(operator.or_, [anon_query, user_query])
                            query = reduce(operator.and_, [sec1_query, query])
                            query = reduce(operator.or_, [query, sec2_query])

                            sqs = sqs.filter(query)
                        else:
                        # if anonymous
                            sqs = sqs.filter(status_detail='active')
                            sqs = sqs.filter(allow_anonymous_view=True)
                else:
                    # if anonymous
                    sqs = sqs.filter(status_detail='active')
                    sqs = sqs.filter(allow_anonymous_view=True)
            else:
                if user:
                    if is_an_admin:
                        sqs = sqs.all()
                    else:
                        if not user.is_anonymous:
                            # (status+status_detail+anon OR who_can_view__exact)
                            sec1_query = Q(**{
                                'status_detail':'active',
                                'allow_anonymous_view':True,
                            })
                            sec2_query = Q(**{
                                'owner__exact':user.username
                            })
                            query = reduce(operator.or_, [sec1_query, sec2_query])
                            sqs = sqs.filter(query)
                        else:
                            # if anonymous
                            sqs = sqs.filter(status_detail='active')
                            sqs = sqs.filter(allow_anonymous_view=True)
                else:
                    # if anonymous
                    sqs = sqs.filter(status_detail='active')
                    sqs = sqs.filter(allow_anonymous_view=True)

            # for solr,
            # order_by can only called once so we have to do it here
            if order_by == 'newest':
                # changed order_by from create_dt to order because the order field
                # is a common field in search indexes and used by other modules(news)
                sqs = sqs.order_by('-order')
            else:
                sqs = sqs.order_by('order')

        if self.load_all:
            sqs = sqs.load_all()

        return sqs
class LiveElasticsearchSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveElasticsearchSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = ElasticsearchMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()
        self.rsqs = RelatedSearchQuerySet()

        # Ugly but not constantly reindexing saves us almost 50% runtime.
        global lssqstc_all_loaded

        if lssqstc_all_loaded is None:
            print 'Reloading data...'
            lssqstc_all_loaded = True

            # Wipe it clean.
            clear_elasticsearch_index()

            # Force indexing of the content.
            self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveElasticsearchSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(sqs[0].object.foo, u'In addition, you may specify other fields to be populated along with the document. In this case, we also index the user who authored the document as well as the date the document was published. The variable you assign the SearchField to should directly map to the field your search backend is expecting. You instantiate most search fields with a parameter that points to the attribute of the object to populate that field with.')

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        results = sorted([int(result.pk) for result in sqs])
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [7, 12, 17, 1, 6, 11, 16, 23, 5, 10])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(int(results[21].pk), 18)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_count(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(len(sqs), 23)
        self.assertEqual(sqs.count(), 23)
        # Should only execute one query to count the length of the result set.
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.sqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, [2, 7, 12, 17, 1, 6, 11, 16, 23, 5, 10, 15, 22, 4, 9, 14, 19, 21, 3, 8, 13, 18, 20])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test___and__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((foo) AND (bar))')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar')
        sqs = sqs3 & sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 3)
        self.assertEqual(sqs.query.build_query(), u'(NOT (title:(moof)) AND ((foo) OR (baz)) AND (bar))')

    def test___or__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((foo) OR (bar))')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar').models(MockModel)
        sqs = sqs3 | sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((NOT (title:(moof)) AND ((foo) OR (baz))) OR (bar))')

    def test_auto_query(self):
        # Ensure bits in exact matches get escaped properly as well.
        # This will break horrifically if escaping isn't working.
        sqs = self.sqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains="pants:rule">')
        self.assertEqual(sqs.query.build_query(), u'("pants\\:rule")')
        self.assertEqual(len(sqs), 0)

    # Regressions

    def test_regression_proper_start_offsets(self):
        sqs = self.sqs.filter(text='index')
        self.assertNotEqual(sqs.count(), 0)

        id_counts = {}

        for item in sqs:
            if item.id in id_counts:
                id_counts[item.id] += 1
            else:
                id_counts[item.id] = 1

        for key, value in id_counts.items():
            if value > 1:
                self.fail("Result with id '%s' seen more than once in the results." % key)

    def test_regression_raw_search_breaks_slicing(self):
        sqs = self.sqs.raw_search('text:index')
        page_1 = [result.pk for result in sqs[0:10]]
        page_2 = [result.pk for result in sqs[10:20]]

        for pk in page_2:
            if pk in page_1:
                self.fail("Result with id '%s' seen more than once in the results." % pk)

    # RelatedSearchQuerySet Tests

    def test_related_load_all(self):
        sqs = self.rsqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(sqs[0].object.foo, u'In addition, you may specify other fields to be populated along with the document. In this case, we also index the user who authored the document as well as the date the document was published. The variable you assign the SearchField to should directly map to the field your search backend is expecting. You instantiate most search fields with a parameter that points to the attribute of the object to populate that field with.')

    def test_related_load_all_queryset(self):
        sqs = self.rsqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=1))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual(sorted([obj.object.id for obj in sqs]), range(2, 24))

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=10))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], [12, 17, 11, 16, 23, 15, 22, 14, 19, 21, 13, 18, 20])
        self.assertEqual([obj.object.id for obj in sqs[10:20]], [13, 18, 20])

    def test_related_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.rsqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, [2, 7, 12, 17, 1, 6, 11, 16, 23, 5, 10, 15, 22, 4, 9, 14, 19, 21, 3, 8, 13, 18, 20])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [7, 12, 17, 1, 6, 11, 16, 23, 5, 10])
        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(int(results[21].pk), 18)
        self.assertEqual(len(connections['default'].queries), 4)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[20:30]], [13, 18, 20])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_manual_iter(self):
        results = self.rsqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = sorted([int(result.pk) for result in results._manual_iter()])
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_related_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.rsqs._cache_is_full(), False)
        results = self.rsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 5)

    def test_quotes_regression(self):
        sqs = self.sqs.auto_query(u"44°48'40''N 20°28'32''E")
        # Should not have empty terms.
        self.assertEqual(sqs.query.build_query(), u"(44\xb048'40''N 20\xb028'32''E)")
        # Should not cause Elasticsearch to 500.
        self.assertEqual(sqs.count(), 0)

        sqs = self.sqs.auto_query('blazing')
        self.assertEqual(sqs.query.build_query(), u'(blazing)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(blazing saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(\\"blazing saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'\'saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'\'saddles" \')')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'\'saddles" \'\\")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel brooks')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" mel brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" brooks')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles" brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" "brooks')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles" \\"brooks)')
        self.assertEqual(sqs.count(), 0)

    def test_query_generation(self):
        sqs = self.sqs.filter(SQ(content=AutoQuery("hello world")) | SQ(title=AutoQuery("hello world")))
        self.assertEqual(sqs.query.build_query(), u"((hello world) OR title:(hello world))")

    def test_result_class(self):
        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))