def setUp(self):
        super(AlteredInternalNamesTestCase, self).setUp()

        self.old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        ui.build(indexes=[MockModelSearchIndex()])
        connections['default']._index = ui
Example #2
0
    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections["default"]._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections["default"]._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend("default")
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Restore.
        connections["default"]._index = old_ui
Example #3
0
    def test_all_cases(self, mock_send_request, mock_log):
        self.sample_objs = []

        for i in xrange(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = datetime.date(2009, 2, 25) - datetime.timedelta(days=i)
            self.sample_objs.append(mock)

        # Setup the rest of the bits.
        ui = UnifiedIndex()
        smmi = SolrMockSearchIndex()
        ui.build(indexes=[smmi])
        connections['default']._index = ui
        sb = connections['default'].get_backend()

        # Prior to the addition of the try/except bits, these would all fail miserably.
        sb.update(smmi, self.sample_objs)
        self.assertEqual(mock_log.call_count, 1)

        sb.remove(self.sample_objs[0])
        self.assertEqual(mock_log.call_count, 2)

        sb.search('search')
        self.assertEqual(mock_log.call_count, 3)

        sb.more_like_this(self.sample_objs[0])
        self.assertEqual(mock_log.call_count, 4)

        sb.clear([MockModel])
        self.assertEqual(mock_log.call_count, 5)

        sb.clear()
        self.assertEqual(mock_log.call_count, 6)
    def test_build_schema(self):
        old_ui = connections['default'].get_unified_index()

        (content_field_name, mapping) = self.sb.build_schema(old_ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(mapping), 4)
        self.assertEqual(mapping, {
            'text': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'pub_date': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'name': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'name_exact': {'index': 'not_analyzed', 'term_vector': 'with_positions_offsets', 'boost': 1.0, 'store': 'yes', 'type': 'string'}
        })

        ui = UnifiedIndex()
        ui.build(indexes=[ElasticsearchComplexFacetsMockSearchIndex()])
        (content_field_name, mapping) = self.sb.build_schema(ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(mapping), 15)
        self.assertEqual(mapping, {
            'name': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'is_active_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'boolean'},
            'created': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'post_count': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'long'},
            'created_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'sites_exact': {'index': 'not_analyzed', 'term_vector': 'with_positions_offsets', 'boost': 1.0, 'store': 'yes', 'type': 'string'},
            'is_active': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'boolean'},
            'sites': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'post_count_i': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'long'},
            'average_rating': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'float'},
            'text': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'pub_date_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'name_exact': {'index': 'not_analyzed', 'term_vector': 'with_positions_offsets', 'boost': 1.0, 'store': 'yes', 'type': 'string'},
            'pub_date': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'average_rating_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'float'}
        })
Example #5
0
class HaystackBackendTestCase(object):
    """
    Abstract TestCase that implements an hack to ensure `connections`
    has the right index

    It has a method get_index() that returns a SearchIndex
    that must be overwritten.
    """
    def get_index(self):
        raise NotImplementedError

    def get_objects(self):
        raise NotImplementedError

    def setUp(self):
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.index = self.get_index()
        self.ui.build(indexes=[self.index])
        self.backend = connections['default'].get_backend()
        connections['default']._index = self.ui

    def tearDown(self):
        self.backend.clear()
        connections['default']._index = self.old_ui
class LiveSimpleSearchQuerySetTestCase(TestCase):
    fixtures = ['bulk_data.json']
    
    def setUp(self):
        super(LiveSimpleSearchQuerySetTestCase, self).setUp()
        
        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SimpleMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui
        
        self.sample_objs = MockModel.objects.all()
        self.sqs = SearchQuerySet()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveSimpleSearchQuerySetTestCase, self).tearDown()
    
    def test_general_queries(self):
        # For now, just make sure these don't throw an exception.
        # They won't work until the simple backend is improved.
        self.assertTrue(len(self.sqs.auto_query('daniel')) > 0)
        self.assertTrue(len(self.sqs.filter(text='index')) > 0)
        self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0)
        self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0)
Example #7
0
class ResultsPerPageTestCase(TestCase):
    urls = 'test_haystack.results_per_page_urls'

    def setUp(self):
        super(ResultsPerPageTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

    def tearDown(self):
        connections['default']._index = self.old_unified_index
        super(ResultsPerPageTestCase, self).tearDown()

    def test_custom_results_per_page(self):
        response = self.client.get('/search/', {'q': 'haystack'})
        self.assertEqual(response.status_code, 200)
        self.assertEqual(len(response.context[-1]['page'].object_list), 1)
        self.assertEqual(response.context[-1]['paginator'].per_page, 1)

        response = self.client.get('/search2/', {'q': 'hello world'})
        self.assertEqual(response.status_code, 200)
        self.assertEqual(len(response.context[-1]['page'].object_list), 2)
        self.assertEqual(response.context[-1]['paginator'].per_page, 2)
    def setUp(self):
        self.sample_objs = []

        for i in xrange(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = datetime.date(2009, 2, 25) - datetime.timedelta(days=i)
            self.sample_objs.append(mock)

        # Stow.
        # Point the backend at a URL that doesn't exist so we can watch the
        # sparks fly.
        self.old_es_url = settings.HAYSTACK_CONNECTIONS['default']['URL']
        settings.HAYSTACK_CONNECTIONS['default']['URL'] = "%s/foo/" % self.old_es_url
        self.cap = CaptureHandler()
        logging.getLogger('haystack').addHandler(self.cap)
        import haystack
        logging.getLogger('haystack').removeHandler(haystack.stream)

        # Setup the rest of the bits.
        self.old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        self.smmi = ElasticsearchMockSearchIndex()
        ui.build(indexes=[self.smmi])
        connections['default']._index = ui
        self.sb = connections['default'].get_backend()
Example #9
0
class FacetedSearchFormTestCase(TestCase):
    def setUp(self):
        super(FacetedSearchFormTestCase, self).setUp()
        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections['default']._index = self.ui
        
        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())
        
        self.sqs = SearchQuerySet()
    
    def tearDown(self):
        connections['default']._index = self.old_unified_index
        super(FacetedSearchFormTestCase, self).tearDown()
    
    def test_init_with_selected_facets(self):
        sf = FacetedSearchForm({}, searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, [])
        
        sf = FacetedSearchForm({}, selected_facets=[], searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, [])
        
        sf = FacetedSearchForm({}, selected_facets=['author:daniel'], searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, ['author:daniel'])
        
        sf = FacetedSearchForm({}, selected_facets=['author:daniel', 'author:chris'], searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, ['author:daniel', 'author:chris'])
    
    def test_search(self):
        sf = FacetedSearchForm({'q': 'test'}, selected_facets=[], searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set())
        
        # Test the "skip no-colon" bits.
        sf = FacetedSearchForm({'q': 'test'}, selected_facets=['authordaniel'], searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set())
        
        sf = FacetedSearchForm({'q': 'test'}, selected_facets=['author:daniel'], searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set([u'author:"daniel"']))
        
        sf = FacetedSearchForm({'q': 'test'}, selected_facets=['author:daniel', 'author:chris'], searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set([u'author:"daniel"', u'author:"chris"']))
Example #10
0
    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Models with uuid primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = UUIDMockSearchBackend('uuid')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui
Example #11
0
class SearchFormTestCase(TestCase):
    def setUp(self):
        super(SearchFormTestCase, self).setUp()
        
        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections['default']._index = self.ui
        
        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())
        
        self.sqs = SearchQuerySet()
    
    def tearDown(self):
        connections['default']._index = self.old_unified_index
        super(SearchFormTestCase, self).tearDown()
    
    def test_unbound(self):
        sf = SearchForm({}, searchqueryset=self.sqs)
        
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        
        # This shouldn't blow up.
        sqs = sf.search()
        self.assertTrue(isinstance(sqs, EmptySearchQuerySet))
Example #12
0
    def test_models(self):
        # Stow.
        old_unified_index = connections["default"]._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections["default"]._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)
Example #13
0
class ResultsPerPageTestCase(TestCase):
    fixtures = ["base_data"]

    def setUp(self):
        super(ResultsPerPageTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections["default"]._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections["default"]._index = self.ui

        # Update the "index".
        backend = connections["default"].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

    def tearDown(self):
        connections["default"]._index = self.old_unified_index
        super(ResultsPerPageTestCase, self).tearDown()

    def test_custom_results_per_page(self):
        response = self.client.get("/search/", {"q": "haystack"})
        self.assertEqual(response.status_code, 200)
        self.assertEqual(len(response.context[-1]["page"].object_list), 1)
        self.assertEqual(response.context[-1]["paginator"].per_page, 1)

        response = self.client.get("/search2/", {"q": "hello world"})
        self.assertEqual(response.status_code, 200)
        self.assertEqual(len(response.context[-1]["page"].object_list), 2)
        self.assertEqual(response.context[-1]["paginator"].per_page, 2)
Example #14
0
class SearchModelAdminTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(SearchModelAdminTestCase, self).setUp()

        # With the models setup, you get the proper bits.
        # Stow.
        self.old_ui = connections['solr'].get_unified_index()
        self.ui = UnifiedIndex()
        smmsi = SolrMockModelSearchIndex()
        self.ui.build(indexes=[smmsi])
        connections['solr']._index = self.ui

        # Wipe it clean.
        clear_solr_index()

        # Force indexing of the content.
        smmsi.update(using='solr')

        superuser = User.objects.create_superuser(
            username='******',
            password='******',
            email='*****@*****.**',
        )

    def tearDown(self):
        # Restore.
        connections['solr']._index = self.old_ui
        super(SearchModelAdminTestCase, self).tearDown()

    def test_usage(self):
        reset_search_queries()
        self.assertEqual(len(connections['solr'].queries), 0)

        self.assertEqual(self.client.login(username='******', password='******'), True)

        # First, non-search behavior.
        resp = self.client.get('/admin/core/mockmodel/')
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections['solr'].queries), 0)
        self.assertEqual(resp.context['cl'].full_result_count, 23)

        # Then search behavior.
        resp = self.client.get('/admin/core/mockmodel/', data={'q': 'Haystack'})
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections['solr'].queries), 3)
        self.assertEqual(resp.context['cl'].full_result_count, 23)
        # Ensure they aren't search results.
        self.assertEqual(isinstance(resp.context['cl'].result_list[0], MockModel), True)

        result_pks = [i.pk for i in resp.context['cl'].result_list]
        self.assertIn(5, result_pks)

        # Make sure only changelist is affected.
        resp = self.client.get('/admin/core/mockmodel/1/')
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections['solr'].queries), 3)
        self.assertEqual(resp.context['original'].id, 1)
class LiveWhooshMoreLikeThisTestCase(WhooshTestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveWhooshMoreLikeThisTestCase, self).setUp()

        # Stow.
        self.old_ui = connections['whoosh'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.wamsi = WhooshAnotherMockSearchIndex()
        self.ui.build(indexes=[self.wmmi, self.wamsi])
        self.sb = connections['whoosh'].get_backend()
        connections['whoosh']._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
        self.sb.delete_index()

        self.wmmi.update()
        self.wamsi.update()

        self.sqs = SearchQuerySet('whoosh')

    def tearDown(self):
        connections['whoosh']._index = self.old_ui
        super(LiveWhooshMoreLikeThisTestCase, self).tearDown()

    # We expect failure here because, despite not changing the code, Whoosh
    # 2.5.1 returns incorrect counts/results. Huzzah.
    @unittest.expectedFailure
    def test_more_like_this(self):
        mlt = self.sqs.more_like_this(MockModel.objects.get(pk=22))
        self.assertEqual(mlt.count(), 22)
        self.assertEqual(sorted([result.pk for result in mlt]), sorted([u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'23']))
        self.assertEqual(len([result.pk for result in mlt]), 22)

        alt_mlt = self.sqs.filter(name='daniel3').more_like_this(MockModel.objects.get(pk=13))
        self.assertEqual(alt_mlt.count(), 8)
        self.assertEqual(sorted([result.pk for result in alt_mlt]), sorted([u'4', u'3', u'22', u'19', u'17', u'16', u'10', u'23']))
        self.assertEqual(len([result.pk for result in alt_mlt]), 8)

        alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(MockModel.objects.get(pk=11))
        self.assertEqual(alt_mlt_with_models.count(), 22)
        self.assertEqual(sorted([result.pk for result in alt_mlt_with_models]), sorted([u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'10', u'23']))
        self.assertEqual(len([result.pk for result in alt_mlt_with_models]), 22)

        if hasattr(MockModel.objects, 'defer'):
            # Make sure MLT works with deferred bits.
            mi = MockModel.objects.defer('foo').get(pk=21)
            self.assertEqual(mi._deferred, True)
            deferred = self.sqs.models(MockModel).more_like_this(mi)
            self.assertEqual(deferred.count(), 0)
            self.assertEqual([result.pk for result in deferred], [])
            self.assertEqual(len([result.pk for result in deferred]), 0)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(isinstance(self.sqs.result_class(MockSearchResult).more_like_this(MockModel.objects.get(pk=21))[0], MockSearchResult))
class LiveWhooshAutocompleteTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveWhooshAutocompleteTestCase, self).setUp()

        # Stow.
        temp_path = os.path.join('tmp', 'test_whoosh_query')
        self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wacsi = WhooshAutocompleteMockModelSearchIndex()
        self.ui.build(indexes=[self.wacsi])
        self.sb = connections['default'].get_backend()
        connections['default']._index = self.ui

        # Stow.
        import haystack
        self.old_debug = settings.DEBUG
        settings.DEBUG = True

        self.sb.setup()
        self.sqs = SearchQuerySet()

        # Wipe it clean.
        self.sqs.query.backend.clear()

        for mock in MockModel.objects.all():
            self.wacsi.update_object(mock)

    def tearDown(self):
        if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
            shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])

        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveWhooshAutocompleteTestCase, self).tearDown()

    def test_autocomplete(self):
        autocomplete = self.sqs.autocomplete(text_auto='mod')
        self.assertEqual(autocomplete.count(), 5)
        self.assertEqual([result.pk for result in autocomplete], [u'1', u'12', u'6', u'7', u'14'])
        self.assertTrue('mod' in autocomplete[0].text.lower())
        self.assertTrue('mod' in autocomplete[1].text.lower())
        self.assertTrue('mod' in autocomplete[2].text.lower())
        self.assertTrue('mod' in autocomplete[3].text.lower())
        self.assertTrue('mod' in autocomplete[4].text.lower())
        self.assertEqual(len([result.pk for result in autocomplete]), 5)

    def test_edgengram_regression(self):
        autocomplete = self.sqs.autocomplete(text_auto='ngm')
        self.assertEqual(autocomplete.count(), 0)

    def test_extra_whitespace(self):
        autocomplete = self.sqs.autocomplete(text_auto='mod ')
        self.assertEqual(autocomplete.count(), 5)
Example #17
0
class WhooshBoostBackendTestCase(TestCase):
    def setUp(self):
        super(WhooshBoostBackendTestCase, self).setUp()
        
        # Stow.
        temp_path = os.path.join('tmp', 'test_whoosh_query')
        self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
        
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshBoostMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections['default'].get_backend()
        connections['default']._index = self.ui
        
        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
        self.sb.delete_index()
        self.sample_objs = []
        
        for i in xrange(1, 5):
            mock = AFourthMockModel()
            mock.id = i
            
            if i % 2:
                mock.author = 'daniel'
                mock.editor = 'david'
            else:
                mock.author = 'david'
                mock.editor = 'daniel'
            
            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            self.sample_objs.append(mock)
    
    def tearDown(self):
        if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
            shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
        
        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
        connections['default']._index = self.ui
        super(WhooshBoostBackendTestCase, self).tearDown()
    
    def test_boost(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.raw_whoosh = self.raw_whoosh.refresh()
        searcher = self.raw_whoosh.searcher()
        self.assertEqual(len(searcher.search(self.parser.parse(u'*'), limit=1000)), 4)
        
        results = SearchQuerySet().filter(SQ(author='daniel') | SQ(editor='daniel'))
        
        self.assertEqual([result.id for result in results], [
            'core.afourthmockmodel.1',
            'core.afourthmockmodel.3',
            'core.afourthmockmodel.2',
            'core.afourthmockmodel.4'
        ])
        self.assertEqual(results[0].boost, 1.1)
class LiveWhooshRamStorageTestCase(TestCase):
    def setUp(self):
        super(LiveWhooshRamStorageTestCase, self).setUp()

        # Stow.
        self.old_whoosh_storage = settings.HAYSTACK_CONNECTIONS['whoosh'].get('STORAGE', 'file')
        settings.HAYSTACK_CONNECTIONS['whoosh']['STORAGE'] = 'ram'

        self.old_ui = connections['whoosh'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wrtsi = WhooshRoundTripSearchIndex()
        self.ui.build(indexes=[self.wrtsi])
        self.sb = connections['whoosh'].get_backend()
        connections['whoosh']._index = self.ui

        # Stow.
        import haystack

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)

        self.sqs = SearchQuerySet('whoosh')

        # Wipe it clean.
        self.sqs.query.backend.clear()

        # Fake indexing.
        mock = MockModel()
        mock.id = 1
        self.sb.update(self.wrtsi, [mock])

    def tearDown(self):
        self.sqs.query.backend.clear()

        settings.HAYSTACK_CONNECTIONS['whoosh']['STORAGE'] = self.old_whoosh_storage
        connections['whoosh']._index = self.old_ui
        super(LiveWhooshRamStorageTestCase, self).tearDown()

    def test_ram_storage(self):
        results = self.sqs.filter(id='core.mockmodel.1')

        # Sanity check.
        self.assertEqual(results.count(), 1)

        # Check the individual fields.
        result = results[0]
        self.assertEqual(result.id, 'core.mockmodel.1')
        self.assertEqual(result.text, 'This is some example text.')
        self.assertEqual(result.name, 'Mister Pants')
        self.assertEqual(result.is_active, True)
        self.assertEqual(result.post_count, 25)
        self.assertEqual(result.average_rating, 3.6)
        self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
        self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
        self.assertEqual(result.tags, ['staff', 'outdoor', 'activist', 'scientist'])
        self.assertEqual(result.sites, [u'3', u'5', u'1'])
        self.assertEqual(result.empty_list, [])
Example #19
0
class ModelSearchFormTestCase(TestCase):
    def setUp(self):
        super(ModelSearchFormTestCase, self).setUp()
        # Stow.
        self.old_unified_index = connections["default"]._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections["default"]._index = self.ui

        # Update the "index".
        backend = connections["default"].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.sqs = SearchQuerySet()

    def tearDown(self):
        connections["default"]._index = self.old_unified_index
        super(ModelSearchFormTestCase, self).tearDown()

    def test_models_regression_1(self):
        # Regression for issue #1.
        msf = ModelSearchForm(
            {"query": "test", "models": ["core.mockmodel", "core.anothermockmodel"]},
            searchqueryset=self.sqs,
        )

        self.assertEqual(
            msf.fields["models"].choices,
            [
                ("core.anothermockmodel", "Another mock models"),
                ("core.mockmodel", "Mock models"),
            ],
        )
        self.assertEqual(msf.errors, {})
        self.assertEqual(msf.is_valid(), True)

        sqs_with_models = msf.search()
        self.assertEqual(len(sqs_with_models.query.models), 2)

    def test_model_choices(self):
        self.assertEqual(len(model_choices()), 2)
        self.assertEqual(
            [option[1] for option in model_choices()],
            ["Another mock models", "Mock models"],
        )

    def test_model_choices_unicode(self):
        stowed_verbose_name_plural = MockModel._meta.verbose_name_plural
        MockModel._meta.verbose_name_plural = "☃"
        self.assertEqual(len(model_choices()), 2)
        self.assertEqual(
            [option[1] for option in model_choices()], ["Another mock models", "☃"]
        )
        MockModel._meta.verbose_name_plural = stowed_verbose_name_plural
class ManagementCommandTestCase(TestCase):
    fixtures = ['bulk_data.json']
    
    def setUp(self):
        super(ManagementCommandTestCase, self).setUp()
        self.solr = pysolr.Solr(settings.HAYSTACK_CONNECTIONS['default']['URL'])
        
        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui
    
    def tearDown(self):
        connections['default']._index = self.old_ui
        super(ManagementCommandTestCase, self).tearDown()
    
    def test_basic_commands(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)
        
        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)
        
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)
        
        call_command('rebuild_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)
    
    def test_remove(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)
        
        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)
        
        # Remove a model instance.
        MockModel.objects.get(pk=1).delete()
        self.assertEqual(self.solr.search('*:*').hits, 23)
        
        # Plain ``update_index`` doesn't fix it.
        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)
        
        # With the remove flag, it's gone.
        call_command('update_index', remove=True, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 22)
    
    def test_multiprocessing(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)
        
        # Watch the output, make sure there are multiple pids.
        call_command('update_index', verbosity=2, workers=2, batchsize=5)
        self.assertEqual(self.solr.search('*:*').hits, 23)
class LiveElasticsearchSearchQueryTestCase(TestCase):
    fixtures = ['initial_data.json']

    def setUp(self):
        super(LiveElasticsearchSearchQueryTestCase, self).setUp()

        # Wipe it clean.
        clear_elasticsearch_index()

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = ElasticsearchMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui
        self.sb = connections['default'].get_backend()
        self.sq = connections['default'].get_query()

        # Force indexing of the content.
        self.smmi.update()

    def tearDown(self):
        connections['default']._index = self.old_ui
        super(LiveElasticsearchSearchQueryTestCase, self).tearDown()

    def test_log_query(self):
        from django.conf import settings
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Stow.
        old_debug = settings.DEBUG
        settings.DEBUG = False

        len(self.sq.get_results())
        self.assertEqual(len(connections['default'].queries), 0)

        settings.DEBUG = True
        # Redefine it to clear out the cached results.
        self.sq = connections['default'].query()
        self.sq.add_filter(SQ(name='bar'))
        len(self.sq.get_results())
        self.assertEqual(len(connections['default'].queries), 1)
        self.assertEqual(connections['default'].queries[0]['query_string'], 'name:(bar)')

        # And again, for good measure.
        self.sq = connections['default'].query()
        self.sq.add_filter(SQ(name='bar'))
        self.sq.add_filter(SQ(text='moof'))
        len(self.sq.get_results())
        self.assertEqual(len(connections['default'].queries), 2)
        self.assertEqual(connections['default'].queries[0]['query_string'], 'name:(bar)')
        self.assertEqual(connections['default'].queries[1]['query_string'], u'(name:(bar) AND text:(moof))')

        # Restore.
        settings.DEBUG = old_debug
Example #22
0
class SearchModelAdminTestCase(TestCase):
    fixtures = ["bulk_data.json"]

    def setUp(self):
        super(SearchModelAdminTestCase, self).setUp()

        # With the models setup, you get the proper bits.
        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections["default"].get_unified_index()
        self.ui = UnifiedIndex()
        smmsi = SolrMockModelSearchIndex()
        self.ui.build(indexes=[smmsi])
        connections["default"]._index = self.ui

        # Wipe it clean.
        clear_solr_index()

        # Force indexing of the content.
        smmsi.update()

        superuser = User.objects.create_superuser(username="******", password="******", email="*****@*****.**")

    def tearDown(self):
        # Restore.
        connections["default"]._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(SearchModelAdminTestCase, self).tearDown()

    def test_usage(self):
        reset_search_queries()
        self.assertEqual(len(connections["default"].queries), 0)

        self.assertEqual(self.client.login(username="******", password="******"), True)

        # First, non-search behavior.
        resp = self.client.get("/admin/core/mockmodel/")
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections["default"].queries), 0)
        self.assertEqual(resp.context["cl"].full_result_count, 23)

        # Then search behavior.
        resp = self.client.get("/admin/core/mockmodel/", data={"q": "Haystack"})
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections["default"].queries), 3)
        self.assertEqual(resp.context["cl"].full_result_count, 23)
        # Ensure they aren't search results.
        self.assertEqual(isinstance(resp.context["cl"].result_list[0], MockModel), True)
        self.assertEqual(resp.context["cl"].result_list[0].id, 5)

        # Make sure only changelist is affected.
        resp = self.client.get("/admin/core/mockmodel/1/")
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections["default"].queries), 3)
        self.assertEqual(resp.context["original"].id, 1)
class LiveElasticsearchMoreLikeThisTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveElasticsearchMoreLikeThisTestCase, self).setUp()

        # Wipe it clean.
        clear_elasticsearch_index()

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = ElasticsearchMockModelSearchIndex()
        self.sammi = ElasticsearchAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.smmi, self.sammi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()

        self.smmi.update()
        self.sammi.update()


    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        super(LiveElasticsearchMoreLikeThisTestCase, self).tearDown()

    @unittest.expectedFailure
    def test_more_like_this(self):
        mlt = self.sqs.more_like_this(MockModel.objects.get(pk=1))
        self.assertEqual(mlt.count(), 4)
        self.assertEqual([result.pk for result in mlt], [u'2', u'6', u'16', u'23'])
        self.assertEqual(len([result.pk for result in mlt]), 4)

        alt_mlt = self.sqs.filter(name='daniel3').more_like_this(MockModel.objects.get(pk=2))
        self.assertEqual(alt_mlt.count(), 6)
        self.assertEqual([result.pk for result in alt_mlt], [u'2', u'6', u'16', u'23', u'1', u'11'])
        self.assertEqual(len([result.pk for result in alt_mlt]), 6)

        alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(MockModel.objects.get(pk=1))
        self.assertEqual(alt_mlt_with_models.count(), 4)
        self.assertEqual([result.pk for result in alt_mlt_with_models], [u'2', u'6', u'16', u'23'])
        self.assertEqual(len([result.pk for result in alt_mlt_with_models]), 4)

        if hasattr(MockModel.objects, 'defer'):
            # Make sure MLT works with deferred bits.
            mi = MockModel.objects.defer('foo').get(pk=1)
            self.assertEqual(mi._deferred, True)
            deferred = self.sqs.models(MockModel).more_like_this(mi)
            self.assertEqual(deferred.count(), 0)
            self.assertEqual([result.pk for result in deferred], [])
            self.assertEqual(len([result.pk for result in deferred]), 0)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(isinstance(self.sqs.result_class(MockSearchResult).more_like_this(MockModel.objects.get(pk=1))[0], MockSearchResult))
Example #24
0
class FacetedSearchViewTestCase(TestCase):
    def setUp(self):
        super(FacetedSearchViewTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections["default"]._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections["default"]._index = self.ui

        # Update the "index".
        backend = connections["default"].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

    def tearDown(self):
        connections["default"]._index = self.old_unified_index
        super(FacetedSearchViewTestCase, self).tearDown()

    def test_search_no_query(self):
        response = self.client.get(reverse("haystack_faceted_search"))
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context["facets"], {})

    def test_empty_results(self):
        fsv = FacetedSearchView()
        fsv.request = HttpRequest()
        fsv.request.GET = QueryDict("")
        fsv.form = fsv.build_form()
        self.assertTrue(isinstance(fsv.get_results(), EmptySearchQuerySet))

    def test_default_form(self):
        fsv = FacetedSearchView()
        fsv.request = HttpRequest()
        fsv.request.GET = QueryDict("")
        fsv.form = fsv.build_form()
        self.assertTrue(isinstance(fsv.form, FacetedSearchForm))

    def test_list_selected_facets(self):
        fsv = FacetedSearchView()
        fsv.request = HttpRequest()
        fsv.request.GET = QueryDict("")
        fsv.form = fsv.build_form()
        self.assertEqual(fsv.form.selected_facets, [])

        fsv = FacetedSearchView()
        fsv.request = HttpRequest()
        fsv.request.GET = QueryDict(
            "selected_facets=author:daniel&selected_facets=author:chris"
        )
        fsv.form = fsv.build_form()
        self.assertEqual(fsv.form.selected_facets, ["author:daniel", "author:chris"])
    def setUp(self):
        super(LiveXapianSearchQueryTestCase, self).setUp()

        self.old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        index = LiveXapianMockSearchIndex()
        ui.build(indexes=[index])
        backend = connections['default'].get_backend()
        connections['default']._index = ui
        backend.update(index, MockModel.objects.all())

        self.sq = connections['default'].get_query()
    def setUp(self):
        check_solr()
        super(AlteredInternalNamesTestCase, self).setUp()

        self.old_ui = connections['solr'].get_unified_index()
        ui = UnifiedIndex()
        ui.build(indexes=[MockModelSearchIndex()])
        connections['solr']._index = ui

        constants.ID  = 'my_id'
        constants.DJANGO_CT  = 'my_django_ct'
        constants.DJANGO_ID  = 'my_django_id'
class LiveSolrAutocompleteTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveSolrAutocompleteTestCase, self).setUp()

        # Wipe it clean.
        clear_solr_index()

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrAutocompleteMockModelSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()

        self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        super(LiveSolrAutocompleteTestCase, self).tearDown()

    def test_autocomplete(self):
        autocomplete = self.sqs.autocomplete(text_auto='mod')
        self.assertEqual(autocomplete.count(), 5)
        self.assertEqual([result.pk for result in autocomplete], ['1', '12', '6', '7', '14'])
        self.assertTrue('mod' in autocomplete[0].text.lower())
        self.assertTrue('mod' in autocomplete[1].text.lower())
        self.assertTrue('mod' in autocomplete[2].text.lower())
        self.assertTrue('mod' in autocomplete[3].text.lower())
        self.assertTrue('mod' in autocomplete[4].text.lower())
        self.assertEqual(len([result.pk for result in autocomplete]), 5)

        # Test multiple words.
        autocomplete_2 = self.sqs.autocomplete(text_auto='your mod')
        self.assertEqual(autocomplete_2.count(), 3)
        self.assertEqual([result.pk for result in autocomplete_2], ['1', '14', '6'])
        self.assertTrue('your' in autocomplete_2[0].text.lower())
        self.assertTrue('mod' in autocomplete_2[0].text.lower())
        self.assertTrue('your' in autocomplete_2[1].text.lower())
        self.assertTrue('mod' in autocomplete_2[1].text.lower())
        self.assertTrue('your' in autocomplete_2[2].text.lower())
        self.assertTrue('mod' in autocomplete_2[2].text.lower())
        self.assertEqual(len([result.pk for result in autocomplete_2]), 3)

        # Test multiple fields.
        autocomplete_3 = self.sqs.autocomplete(text_auto='Django', name_auto='dan')
        self.assertEqual(autocomplete_3.count(), 4)
        self.assertEqual([result.pk for result in autocomplete_3], ['12', '1', '14', '22'])
        self.assertEqual(len([result.pk for result in autocomplete_3]), 4)
    def test_verify_type(self):
        old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        smtmmi = SolrMaintainTypeMockSearchIndex()
        ui.build(indexes=[smtmmi])
        connections['default']._index = ui
        sb = connections['default'].get_backend()
        sb.update(smtmmi, self.sample_objs)

        self.assertEqual(sb.search('*:*')['hits'], 3)
        self.assertEqual([result.month for result in sb.search('*:*')['results']], [u'02', u'02', u'02'])
        connections['default']._index = old_ui
Example #29
0
class LiveSimpleSearchQuerySetTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveSimpleSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SimpleMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sample_objs = MockModel.objects.all()
        self.sqs = SearchQuerySet()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveSimpleSearchQuerySetTestCase, self).tearDown()

    def test_general_queries(self):
        # For now, just make sure these don't throw an exception.
        # They won't work until the simple backend is improved.
        self.assertTrue(len(self.sqs.auto_query('daniel')) > 0)
        self.assertTrue(len(self.sqs.filter(text='index')) > 0)
        self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0)
        self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0)

    def test_general_queries_unicode(self):
        self.assertEqual(len(self.sqs.auto_query(u'Привет')), 0)

    def test_more_like_this(self):
        # MLT shouldn't be horribly broken. This used to throw an exception.
        mm1 = MockModel.objects.get(pk=1)
        self.assertEqual(len(self.sqs.filter(text=1).more_like_this(mm1)), 0)

    def test_values_queries(self):
        sqs = self.sqs.auto_query('daniel')
        self.assertTrue(len(sqs) > 0)

        flat_scores = sqs.values_list("score", flat=True)
        self.assertEqual(flat_scores[0], 0)

        scores = sqs.values_list("id", "score")
        self.assertEqual(scores[0], [1, 0])

        scores_dict = sqs.values("id", "score")
        self.assertEqual(scores_dict[0], {"id": 1, "score": 0})
    def test_verify_type(self):
        old_ui = connections['whoosh'].get_unified_index()
        ui = UnifiedIndex()
        wmtmmi = WhooshMaintainTypeMockSearchIndex()
        ui.build(indexes=[wmtmmi])
        connections['whoosh']._index = ui
        sb = connections['whoosh'].get_backend()
        sb.setup()
        sb.update(wmtmmi, self.sample_objs)

        self.assertEqual(sb.search(u'*')['hits'], 23)
        self.assertEqual([result.month for result in sb.search(u'*')['results']], [u'06', u'07', u'06', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07'])
        connections['whoosh']._index = old_ui
Example #31
0
class LiveWhooshMoreLikeThisTestCase(WhooshTestCase):
    fixtures = ["bulk_data.json"]

    def setUp(self):
        super().setUp()

        # Stow.
        self.old_ui = connections["whoosh"].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.wamsi = WhooshAnotherMockSearchIndex()
        self.ui.build(indexes=[self.wmmi, self.wamsi])
        self.sb = connections["whoosh"].get_backend()
        connections["whoosh"]._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
        self.sb.delete_index()

        self.wmmi.update()
        self.wamsi.update()

        self.sqs = SearchQuerySet("whoosh")

    def tearDown(self):
        connections["whoosh"]._index = self.old_ui
        super().tearDown()

    # We expect failure here because, despite not changing the code, Whoosh
    # 2.5.1 returns incorrect counts/results. Huzzah.
    @unittest.expectedFailure
    def test_more_like_this(self):
        mlt = self.sqs.more_like_this(MockModel.objects.get(pk=22))
        self.assertEqual(mlt.count(), 22)
        self.assertEqual(
            sorted([result.pk for result in mlt]),
            sorted(
                [
                    "9",
                    "8",
                    "7",
                    "6",
                    "5",
                    "4",
                    "3",
                    "2",
                    "1",
                    "21",
                    "20",
                    "19",
                    "18",
                    "17",
                    "16",
                    "15",
                    "14",
                    "13",
                    "12",
                    "11",
                    "10",
                    "23",
                ]
            ),
        )
        self.assertEqual(len([result.pk for result in mlt]), 22)

        alt_mlt = self.sqs.filter(name="daniel3").more_like_this(
            MockModel.objects.get(pk=13)
        )
        self.assertEqual(alt_mlt.count(), 8)
        self.assertEqual(
            sorted([result.pk for result in alt_mlt]),
            sorted(["4", "3", "22", "19", "17", "16", "10", "23"]),
        )
        self.assertEqual(len([result.pk for result in alt_mlt]), 8)

        alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(
            MockModel.objects.get(pk=11)
        )
        self.assertEqual(alt_mlt_with_models.count(), 22)
        self.assertEqual(
            sorted([result.pk for result in alt_mlt_with_models]),
            sorted(
                [
                    "9",
                    "8",
                    "7",
                    "6",
                    "5",
                    "4",
                    "3",
                    "2",
                    "1",
                    "22",
                    "21",
                    "20",
                    "19",
                    "18",
                    "17",
                    "16",
                    "15",
                    "14",
                    "13",
                    "12",
                    "10",
                    "23",
                ]
            ),
        )
        self.assertEqual(len([result.pk for result in alt_mlt_with_models]), 22)

        if hasattr(MockModel.objects, "defer"):
            # Make sure MLT works with deferred bits.
            mi = MockModel.objects.defer("foo").get(pk=22)
            deferred = self.sqs.models(MockModel).more_like_this(mi)
            self.assertEqual(deferred.count(), 22)
            self.assertEqual(
                sorted([result.pk for result in deferred]),
                sorted(
                    [
                        "9",
                        "8",
                        "7",
                        "6",
                        "5",
                        "4",
                        "3",
                        "2",
                        "1",
                        "21",
                        "20",
                        "19",
                        "18",
                        "17",
                        "16",
                        "15",
                        "14",
                        "13",
                        "12",
                        "11",
                        "10",
                        "23",
                    ]
                ),
            )
            self.assertEqual(len([result.pk for result in deferred]), 22)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.sqs.result_class(MockSearchResult).more_like_this(
                    MockModel.objects.get(pk=21)
                )[0],
                MockSearchResult,
            )
        )
class LiveWhooshRoundTripTestCase(TestCase):
    def setUp(self):
        super(LiveWhooshRoundTripTestCase, self).setUp()

        # Stow.
        temp_path = os.path.join('tmp', 'test_whoosh_query')
        self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wrtsi = WhooshRoundTripSearchIndex()
        self.ui.build(indexes=[self.wrtsi])
        self.sb = connections['default'].get_backend()
        connections['default']._index = self.ui

        self.old_debug = settings.DEBUG
        settings.DEBUG = True

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)
        self.sb.delete_index()

        self.sqs = SearchQuerySet()

        # Wipe it clean.
        self.sqs.query.backend.clear()

        # Fake indexing.
        mock = MockModel()
        mock.id = 1
        self.sb.update(self.wrtsi, [mock])

    def tearDown(self):
        if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
            shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])

        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
        settings.DEBUG = self.old_debug
        super(LiveWhooshRoundTripTestCase, self).tearDown()

    def test_round_trip(self):
        results = self.sqs.filter(id='core.mockmodel.1')

        # Sanity check.
        self.assertEqual(results.count(), 1)

        # Check the individual fields.
        result = results[0]
        self.assertEqual(result.id, 'core.mockmodel.1')
        self.assertEqual(result.text, 'This is some example text.')
        self.assertEqual(result.name, 'Mister Pants')
        self.assertEqual(result.is_active, True)
        self.assertEqual(result.post_count, 25)
        self.assertEqual(result.average_rating, 3.6)
        self.assertEqual(result.price, u'24.99')
        self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
        self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
        self.assertEqual(result.tags,
                         ['staff', 'outdoor', 'activist', 'scientist'])
        self.assertEqual(result.sites, [u'3', u'5', u'1'])
        self.assertEqual(result.empty_list, [])

        # Check boolean filtering...
        results = self.sqs.filter(id='core.mockmodel.1', is_active=True)
        self.assertEqual(results.count(), 1)
Example #33
0
class LiveSolrSearchQueryTestCase(TestCase):
    fixtures = ['initial_data.json']

    def setUp(self):
        super(LiveSolrSearchQueryTestCase, self).setUp()

        # Wipe it clean.
        clear_solr_index()

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui
        self.sb = connections['default'].get_backend()
        self.sq = connections['default'].get_query()

        # Force indexing of the content.
        self.smmi.update()

    def tearDown(self):
        connections['default']._index = self.old_ui
        super(LiveSolrSearchQueryTestCase, self).tearDown()

    def test_get_spelling(self):
        self.sq.add_filter(SQ(content='Indexy'))
        self.assertEqual(self.sq.get_spelling_suggestion(), u'(index)')
        self.assertEqual(self.sq.get_spelling_suggestion('indexy'), u'(index)')

    def test_log_query(self):
        from django.conf import settings
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Stow.
        old_debug = settings.DEBUG
        settings.DEBUG = False

        len(self.sq.get_results())
        self.assertEqual(len(connections['default'].queries), 0)

        settings.DEBUG = True
        # Redefine it to clear out the cached results.
        self.sq = connections['default'].query()
        self.sq.add_filter(SQ(name='bar'))
        len(self.sq.get_results())
        self.assertEqual(len(connections['default'].queries), 1)
        self.assertEqual(connections['default'].queries[0]['query_string'],
                         'name:(bar)')

        # And again, for good measure.
        self.sq = connections['default'].query()
        self.sq.add_filter(SQ(name='bar'))
        self.sq.add_filter(SQ(text='moof'))
        len(self.sq.get_results())
        self.assertEqual(len(connections['default'].queries), 2)
        self.assertEqual(connections['default'].queries[0]['query_string'],
                         'name:(bar)')
        self.assertEqual(connections['default'].queries[1]['query_string'],
                         u'(name:(bar) AND text:(moof))')

        # Restore.
        settings.DEBUG = old_debug
class FacetedSearchFormTestCase(TestCase):
    def setUp(self):
        super(FacetedSearchFormTestCase, self).setUp()
        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.sqs = SearchQuerySet()

    def tearDown(self):
        connections['default']._index = self.old_unified_index
        super(FacetedSearchFormTestCase, self).tearDown()

    def test_init_with_selected_facets(self):
        sf = FacetedSearchForm({}, searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, [])

        sf = FacetedSearchForm({}, selected_facets=[], searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, [])

        sf = FacetedSearchForm({},
                               selected_facets=['author:daniel'],
                               searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, ['author:daniel'])

        sf = FacetedSearchForm(
            {},
            selected_facets=['author:daniel', 'author:chris'],
            searchqueryset=self.sqs)
        self.assertEqual(sf.errors, {})
        self.assertEqual(sf.is_valid(), True)
        self.assertEqual(sf.selected_facets, ['author:daniel', 'author:chris'])

    def test_search(self):
        sf = FacetedSearchForm({'q': 'test'},
                               selected_facets=[],
                               searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set())

        # Test the "skip no-colon" bits.
        sf = FacetedSearchForm({'q': 'test'},
                               selected_facets=['authordaniel'],
                               searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set())

        sf = FacetedSearchForm({'q': 'test'},
                               selected_facets=['author:daniel'],
                               searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries, set([u'author:"daniel"']))

        sf = FacetedSearchForm(
            {'q': 'test'},
            selected_facets=['author:daniel', 'author:chris'],
            searchqueryset=self.sqs)
        sqs = sf.search()
        self.assertEqual(sqs.query.narrow_queries,
                         set([u'author:"daniel"', u'author:"chris"']))
Example #35
0
class SearchViewTestCase(TestCase):
    fixtures = ['base_data']

    def setUp(self):
        super(SearchViewTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

    def tearDown(self):
        connections['default']._index = self.old_unified_index
        super(SearchViewTestCase, self).tearDown()

    def test_search_no_query(self):
        response = self.client.get(reverse('haystack_search'))
        self.assertEqual(response.status_code, 200)

    def test_search_query(self):
        response = self.client.get(reverse('haystack_search'), {'q': 'haystack'})
        self.assertEqual(response.status_code, 200)
        self.assertIn('page', response.context)
        self.assertNotIn('page_obj', response.context)
        self.assertEqual(len(response.context[-1]['page'].object_list), 3)
        self.assertEqual(response.context[-1]['page'].object_list[0].content_type(), u'core.mockmodel')
        self.assertEqual(response.context[-1]['page'].object_list[0].pk, '1')

    def test_invalid_page(self):
        response = self.client.get(reverse('haystack_search'), {'q': 'haystack', 'page': '165233'})
        self.assertEqual(response.status_code, 404)

    def test_empty_results(self):
        sv = SearchView()
        sv.request = HttpRequest()
        sv.form = sv.build_form()
        self.assertTrue(isinstance(sv.get_results(), EmptySearchQuerySet))

    def test_initial_data(self):
        sv = SearchView(form_class=InitialedSearchForm)
        sv.request = HttpRequest()
        form = sv.build_form()
        self.assertTrue(isinstance(form, InitialedSearchForm))
        self.assertEqual(form.fields['q'].initial, 'Search for...')
        para = form.as_p()
        self.assertTrue(u'<label for="id_q">Search:</label>' in para)
        self.assertTrue(u'value="Search for..."' in para)

    def test_pagination(self):
        response = self.client.get(reverse('haystack_search'), {'q': 'haystack', 'page': 0})
        self.assertEqual(response.status_code, 404)
        response = self.client.get(reverse('haystack_search'), {'q': 'haystack', 'page': 1})
        self.assertEqual(response.status_code, 200)
        self.assertEqual(len(response.context[-1]['page'].object_list), 3)
        response = self.client.get(reverse('haystack_search'), {'q': 'haystack', 'page': 2})
        self.assertEqual(response.status_code, 404)

    def test_thread_safety(self):
        exceptions = []

        def threaded_view(resp_queue, view, request):
            time.sleep(2)

            try:
                view(request)
                resp_queue.put(request.GET['name'])
            except Exception as e:
                exceptions.append(e)
                raise

        class ThreadedSearchView(SearchView):
            def __call__(self, request):
                print("Name: %s" % request.GET['name'])
                return super(ThreadedSearchView, self).__call__(request)

        view = search_view_factory(view_class=ThreadedSearchView)
        resp_queue = queue.Queue()
        request_1 = HttpRequest()
        request_1.GET = {'name': 'foo'}
        request_2 = HttpRequest()
        request_2.GET = {'name': 'bar'}

        th1 = Thread(target=threaded_view, args=(resp_queue, view, request_1))
        th2 = Thread(target=threaded_view, args=(resp_queue, view, request_2))

        th1.start()
        th2.start()
        th1.join()
        th2.join()

        foo = resp_queue.get()
        bar = resp_queue.get()
        self.assertNotEqual(foo, bar)

    def test_spelling(self):
        # Stow.
        from django.conf import settings
        old = settings.HAYSTACK_CONNECTIONS['default'].get('INCLUDE_SPELLING', None)

        settings.HAYSTACK_CONNECTIONS['default']['INCLUDE_SPELLING'] = True

        sv = SearchView()
        sv.query = 'Nothing'
        sv.results = []
        sv.build_page = lambda: (None, None)
        sv.create_response()
        context = sv.get_context()

        self.assertIn('suggestion', context,
                      msg='Spelling suggestions should be present even if'
                          ' no results were returned')
        self.assertEqual(context['suggestion'], None)

        # Restore
        settings.HAYSTACK_CONNECTIONS['default']['INCLUDE_SPELLING'] = old

        if old is None:
            del settings.HAYSTACK_CONNECTIONS['default']['INCLUDE_SPELLING']
Example #36
0
class SolrSearchBackendTestCase(TestCase):
    def setUp(self):
        super(SolrSearchBackendTestCase, self).setUp()

        # Wipe it clean.
        self.raw_solr = pysolr.Solr(
            settings.HAYSTACK_CONNECTIONS['default']['URL'])
        clear_solr_index()

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.smtmmi = SolrMaintainTypeMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui
        self.sb = connections['default'].get_backend()

        self.sample_objs = []

        for i in xrange(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = datetime.date(2009, 2,
                                          25) - datetime.timedelta(days=i)
            self.sample_objs.append(mock)

    def tearDown(self):
        connections['default']._index = self.old_ui
        super(SolrSearchBackendTestCase, self).tearDown()

    def test_non_silent(self):
        bad_sb = connections['default'].backend(
            'bad',
            URL='http://omg.wtf.bbq:1000/solr',
            SILENTLY_FAIL=False,
            TIMEOUT=1)

        try:
            bad_sb.update(self.smmi, self.sample_objs)
            self.fail()
        except:
            pass

        try:
            bad_sb.remove('core.mockmodel.1')
            self.fail()
        except:
            pass

        try:
            bad_sb.clear()
            self.fail()
        except:
            pass

        try:
            bad_sb.search('foo')
            self.fail()
        except:
            pass

    def test_update(self):
        self.sb.update(self.smmi, self.sample_objs)

        # Check what Solr thinks is there.
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)
        self.assertEqual(
            self.raw_solr.search('*:*').docs, [{
                'django_id': '1',
                'django_ct': 'core.mockmodel',
                'name': 'daniel1',
                'name_exact': 'daniel1',
                'text': 'Indexed!\n1',
                'pub_date': '2009-02-24T00:00:00Z',
                'id': 'core.mockmodel.1'
            }, {
                'django_id': '2',
                'django_ct': 'core.mockmodel',
                'name': 'daniel2',
                'name_exact': 'daniel2',
                'text': 'Indexed!\n2',
                'pub_date': '2009-02-23T00:00:00Z',
                'id': 'core.mockmodel.2'
            }, {
                'django_id': '3',
                'django_ct': 'core.mockmodel',
                'name': 'daniel3',
                'name_exact': 'daniel3',
                'text': 'Indexed!\n3',
                'pub_date': '2009-02-22T00:00:00Z',
                'id': 'core.mockmodel.3'
            }])

    def test_remove(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        self.sb.remove(self.sample_objs[0])
        self.assertEqual(self.raw_solr.search('*:*').hits, 2)
        self.assertEqual(
            self.raw_solr.search('*:*').docs, [{
                'django_id': '2',
                'django_ct': 'core.mockmodel',
                'name': 'daniel2',
                'name_exact': 'daniel2',
                'text': 'Indexed!\n2',
                'pub_date': '2009-02-23T00:00:00Z',
                'id': 'core.mockmodel.2'
            }, {
                'django_id': '3',
                'django_ct': 'core.mockmodel',
                'name': 'daniel3',
                'name_exact': 'daniel3',
                'text': 'Indexed!\n3',
                'pub_date': '2009-02-22T00:00:00Z',
                'id': 'core.mockmodel.3'
            }])

    def test_clear(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        self.sb.clear()
        self.assertEqual(self.raw_solr.search('*:*').hits, 0)

        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        self.sb.clear([AnotherMockModel])
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        self.sb.clear([MockModel])
        self.assertEqual(self.raw_solr.search('*:*').hits, 0)

        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        self.sb.clear([AnotherMockModel, MockModel])
        self.assertEqual(self.raw_solr.search('*:*').hits, 0)

    def test_search(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        self.assertEqual(self.sb.search(''), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search('*:*')['hits'], 3)
        self.assertEqual(
            [result.pk for result in self.sb.search('*:*')['results']],
            ['1', '2', '3'])

        self.assertEqual(self.sb.search('', highlight=True), {
            'hits': 0,
            'results': []
        })
        self.assertEqual(self.sb.search('Index', highlight=True)['hits'], 3)
        self.assertEqual([
            result.highlighted['text'][0]
            for result in self.sb.search('Index', highlight=True)['results']
        ], [
            '<em>Indexed</em>!\n1', '<em>Indexed</em>!\n2',
            '<em>Indexed</em>!\n3'
        ])

        self.assertEqual(self.sb.search('Indx')['hits'], 0)
        self.assertEqual(
            self.sb.search('indax')['spelling_suggestion'], 'index')
        self.assertEqual(
            self.sb.search('Indx',
                           spelling_query='indexy')['spelling_suggestion'],
            'index')

        self.assertEqual(self.sb.search('', facets=['name']), {
            'hits': 0,
            'results': []
        })
        results = self.sb.search('Index', facets=['name'])
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['fields']['name'], [('daniel1', 1),
                                                               ('daniel2', 1),
                                                               ('daniel3', 1)])

        self.assertEqual(
            self.sb.search('',
                           date_facets={
                               'pub_date': {
                                   'start_date': datetime.date(2008, 2, 26),
                                   'end_date': datetime.date(2008, 3, 26),
                                   'gap_by': 'month',
                                   'gap_amount': 1
                               }
                           }), {
                               'hits': 0,
                               'results': []
                           })
        results = self.sb.search('Index',
                                 date_facets={
                                     'pub_date': {
                                         'start_date':
                                         datetime.date(2008, 2, 26),
                                         'end_date':
                                         datetime.date(2008, 3, 26),
                                         'gap_by': 'month',
                                         'gap_amount': 1
                                     }
                                 })
        self.assertEqual(results['hits'], 3)
        # DRL_TODO: Correct output but no counts. Another case of needing better test data?
        # self.assertEqual(results['facets']['dates']['pub_date'], {'end': '2008-02-26T00:00:00Z', 'gap': '/MONTH'})

        self.assertEqual(
            self.sb.search('', query_facets=[('name', '[* TO e]')]), {
                'hits': 0,
                'results': []
            })
        results = self.sb.search('Index', query_facets=[('name', '[* TO e]')])
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['queries'], {'name:[* TO e]': 3})

        self.assertEqual(
            self.sb.search('', narrow_queries=set(['name:daniel1'])), {
                'hits': 0,
                'results': []
            })
        results = self.sb.search('Index', narrow_queries=set(['name:daniel1']))
        self.assertEqual(results['hits'], 1)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.sb.search(u'index document',
                               result_class=MockSearchResult)['results'][0],
                MockSearchResult))

        # Check the use of ``limit_to_registered_models``.
        self.assertEqual(self.sb.search('', limit_to_registered_models=False),
                         {
                             'hits': 0,
                             'results': []
                         })
        self.assertEqual(
            self.sb.search('*:*', limit_to_registered_models=False)['hits'], 3)
        self.assertEqual([
            result.pk for result in self.sb.search(
                '*:*', limit_to_registered_models=False)['results']
        ], ['1', '2', '3'])

        # Stow.
        old_limit_to_registered_models = getattr(
            settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = False

        self.assertEqual(self.sb.search(''), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search('*:*')['hits'], 3)
        self.assertEqual(
            [result.pk for result in self.sb.search('*:*')['results']],
            ['1', '2', '3'])

        # Restore.
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = old_limit_to_registered_models

    def test_more_like_this(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_solr.search('*:*').hits, 3)

        # A functional MLT example with enough data to work is below. Rely on
        # this to ensure the API is correct enough.
        self.assertEqual(
            self.sb.more_like_this(self.sample_objs[0])['hits'], 0)
        self.assertEqual([
            result.pk for result in self.sb.more_like_this(self.sample_objs[0])
            ['results']
        ], [])

    def test_build_schema(self):
        old_ui = connections['default'].get_unified_index()

        (content_field_name,
         fields) = self.sb.build_schema(old_ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(fields), 4)
        self.assertEqual(fields, [{
            'indexed': 'true',
            'type': 'text_en',
            'stored': 'true',
            'field_name': 'text',
            'multi_valued': 'false'
        }, {
            'indexed': 'true',
            'type': 'date',
            'stored': 'true',
            'field_name': 'pub_date',
            'multi_valued': 'false'
        }, {
            'indexed': 'true',
            'type': 'text_en',
            'stored': 'true',
            'field_name': 'name',
            'multi_valued': 'false'
        }, {
            'indexed': 'true',
            'field_name': 'name_exact',
            'stored': 'true',
            'type': 'string',
            'multi_valued': 'false'
        }])

        ui = UnifiedIndex()
        ui.build(indexes=[SolrComplexFacetsMockSearchIndex()])
        (content_field_name,
         fields) = self.sb.build_schema(ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(fields), 15)
        fields = sorted(fields, key=lambda field: field['field_name'])
        self.assertEqual(fields, [{
            'field_name': 'average_rating',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'float'
        }, {
            'field_name': 'average_rating_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'float'
        }, {
            'field_name': 'created',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'created_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'is_active',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'boolean'
        }, {
            'field_name': 'is_active_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'boolean'
        }, {
            'field_name': 'name',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'text_en'
        }, {
            'field_name': 'name_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'string'
        }, {
            'field_name': 'post_count',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'long'
        }, {
            'field_name': 'post_count_i',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'long'
        }, {
            'field_name': 'pub_date',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'pub_date_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'sites',
            'indexed': 'true',
            'multi_valued': 'true',
            'stored': 'true',
            'type': 'text_en'
        }, {
            'field_name': 'sites_exact',
            'indexed': 'true',
            'multi_valued': 'true',
            'stored': 'true',
            'type': 'string'
        }, {
            'field_name': 'text',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'text_en'
        }])

    def test_verify_type(self):
        old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        smtmmi = SolrMaintainTypeMockSearchIndex()
        ui.build(indexes=[smtmmi])
        connections['default']._index = ui
        sb = connections['default'].get_backend()
        sb.update(smtmmi, self.sample_objs)

        self.assertEqual(sb.search('*:*')['hits'], 3)
        self.assertEqual(
            [result.month for result in sb.search('*:*')['results']],
            [u'02', u'02', u'02'])
        connections['default']._index = old_ui
class LiveElasticsearchSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveElasticsearchSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = ElasticsearchMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()
        self.rsqs = RelatedSearchQuerySet()

        # Ugly but not constantly reindexing saves us almost 50% runtime.
        global lssqstc_all_loaded

        if lssqstc_all_loaded is None:
            lssqstc_all_loaded = True

            # Wipe it clean.
            clear_elasticsearch_index()

            # Force indexing of the content.
            self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveElasticsearchSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(sqs[0].object.foo, u'In addition, you may specify other fields to be populated along with the document. In this case, we also index the user who authored the document as well as the date the document was published. The variable you assign the SearchField to should directly map to the field your search backend is expecting. You instantiate most search fields with a parameter that points to the attribute of the object to populate that field with.')

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        results = sorted([int(result.pk) for result in sqs])
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [7, 12, 17, 1, 6, 11, 16, 23, 5, 10])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(int(results[21].pk), 18)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_count(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(len(sqs), 23)
        self.assertEqual(sqs.count(), 23)
        # Should only execute one query to count the length of the result set.
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.sqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, [2, 7, 12, 17, 1, 6, 11, 16, 23, 5, 10, 15, 22, 4, 9, 14, 19, 21, 3, 8, 13, 18, 20])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test___and__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((foo) AND (bar))')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar')
        sqs = sqs3 & sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 3)
        self.assertEqual(sqs.query.build_query(), u'(NOT (title:(moof)) AND ((foo) OR (baz)) AND (bar))')

    def test___or__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((foo) OR (bar))')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar').models(MockModel)
        sqs = sqs3 | sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((NOT (title:(moof)) AND ((foo) OR (baz))) OR (bar))')

    def test_auto_query(self):
        # Ensure bits in exact matches get escaped properly as well.
        # This will break horrifically if escaping isn't working.
        sqs = self.sqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains="pants:rule">')
        self.assertEqual(sqs.query.build_query(), u'("pants\\:rule")')
        self.assertEqual(len(sqs), 0)

    # Regressions

    @unittest.expectedFailure
    def test_regression_proper_start_offsets(self):
        sqs = self.sqs.filter(text='index')
        self.assertNotEqual(sqs.count(), 0)

        id_counts = {}

        for item in sqs:
            if item.id in id_counts:
                id_counts[item.id] += 1
            else:
                id_counts[item.id] = 1

        for key, value in id_counts.items():
            if value > 1:
                self.fail("Result with id '%s' seen more than once in the results." % key)

    def test_regression_raw_search_breaks_slicing(self):
        sqs = self.sqs.raw_search('text:index')
        page_1 = [result.pk for result in sqs[0:10]]
        page_2 = [result.pk for result in sqs[10:20]]

        for pk in page_2:
            if pk in page_1:
                self.fail("Result with id '%s' seen more than once in the results." % pk)

    # RelatedSearchQuerySet Tests

    def test_related_load_all(self):
        sqs = self.rsqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(sqs[0].object.foo, u'In addition, you may specify other fields to be populated along with the document. In this case, we also index the user who authored the document as well as the date the document was published. The variable you assign the SearchField to should directly map to the field your search backend is expecting. You instantiate most search fields with a parameter that points to the attribute of the object to populate that field with.')

    def test_related_load_all_queryset(self):
        sqs = self.rsqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=1))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual(sorted([obj.object.id for obj in sqs]), range(2, 24))

        sqs = sqs.load_all_queryset(MockModel, MockModel.objects.filter(id__gt=10))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], [12, 17, 11, 16, 23, 15, 22, 14, 19, 21, 13, 18, 20])
        self.assertEqual([obj.object.id for obj in sqs[10:20]], [13, 18, 20])

    def test_related_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.rsqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, [2, 7, 12, 17, 1, 6, 11, 16, 23, 5, 10, 15, 22, 4, 9, 14, 19, 21, 3, 8, 13, 18, 20])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]], [7, 12, 17, 1, 6, 11, 16, 23, 5, 10])
        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(int(results[21].pk), 18)
        self.assertEqual(len(connections['default'].queries), 4)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[20:30]], [13, 18, 20])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_manual_iter(self):
        results = self.rsqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = sorted([int(result.pk) for result in results._manual_iter()])
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_related_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.rsqs._cache_is_full(), False)
        results = self.rsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 5)

    def test_quotes_regression(self):
        sqs = self.sqs.auto_query(u"44°48'40''N 20°28'32''E")
        # Should not have empty terms.
        self.assertEqual(sqs.query.build_query(), u"(44\xb048'40''N 20\xb028'32''E)")
        # Should not cause Elasticsearch to 500.
        self.assertEqual(sqs.count(), 0)

        sqs = self.sqs.auto_query('blazing')
        self.assertEqual(sqs.query.build_query(), u'(blazing)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(blazing saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(\\"blazing saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'\'saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'\'saddles" \')')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'\'saddles" \'\\")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel brooks')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" mel brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" brooks')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles" brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" "brooks')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles" \\"brooks)')
        self.assertEqual(sqs.count(), 0)

    def test_query_generation(self):
        sqs = self.sqs.filter(SQ(content=AutoQuery("hello world")) | SQ(title=AutoQuery("hello world")))
        self.assertEqual(sqs.query.build_query(), u"((hello world) OR title:(hello world))")

    def test_result_class(self):
        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))
class ElasticsearchSearchBackendTestCase(TestCase):
    def setUp(self):
        super(ElasticsearchSearchBackendTestCase, self).setUp()

        # Wipe it clean.
        self.raw_es = pyelasticsearch.ElasticSearch(settings.HAYSTACK_CONNECTIONS['default']['URL'])
        clear_elasticsearch_index()

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = ElasticsearchMockSearchIndex()
        self.smtmmi = ElasticsearchMaintainTypeMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui
        self.sb = connections['default'].get_backend()

        # Force the backend to rebuild the mapping each time.
        self.sb.existing_mapping = {}
        self.sb.setup()

        self.sample_objs = []

        for i in xrange(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = datetime.date(2009, 2, 25) - datetime.timedelta(days=i)
            self.sample_objs.append(mock)

    def tearDown(self):
        connections['default']._index = self.old_ui
        super(ElasticsearchSearchBackendTestCase, self).tearDown()

    def raw_search(self, query):
        try:
            return self.raw_es.search('*:*', index=settings.HAYSTACK_CONNECTIONS['default']['INDEX_NAME'])
        except (requests.RequestException, pyelasticsearch.ElasticHttpError):
            return {}

    def test_non_silent(self):
        bad_sb = connections['default'].backend('bad', URL='http://omg.wtf.bbq:1000/', INDEX_NAME='whatver', SILENTLY_FAIL=False, TIMEOUT=1)

        try:
            bad_sb.update(self.smmi, self.sample_objs)
            self.fail()
        except:
            pass

        try:
            bad_sb.remove('core.mockmodel.1')
            self.fail()
        except:
            pass

        try:
            bad_sb.clear()
            self.fail()
        except:
            pass

        try:
            bad_sb.search('foo')
            self.fail()
        except:
            pass

    def test_update(self):
        self.sb.update(self.smmi, self.sample_objs)

        # Check what Elasticsearch thinks is there.
        self.assertEqual(self.raw_search('*:*')['hits']['total'], 3)
        self.assertEqual(sorted([res['_source'] for res in self.raw_search('*:*')['hits']['hits']], cmp=lambda x, y: cmp(x['id'], y['id'])), [
            {
                'django_id': '1',
                'django_ct': 'core.mockmodel',
                'name': 'daniel1',
                'name_exact': 'daniel1',
                'text': 'Indexed!\n1',
                'pub_date': '2009-02-24T00:00:00',
                'id': 'core.mockmodel.1'
            },
            {
                'django_id': '2',
                'django_ct': 'core.mockmodel',
                'name': 'daniel2',
                'name_exact': 'daniel2',
                'text': 'Indexed!\n2',
                'pub_date': '2009-02-23T00:00:00',
                'id': 'core.mockmodel.2'
            },
            {
                'django_id': '3',
                'django_ct': 'core.mockmodel',
                'name': 'daniel3',
                'name_exact': 'daniel3',
                'text': 'Indexed!\n3',
                'pub_date': '2009-02-22T00:00:00',
                'id': 'core.mockmodel.3'
            }
        ])

    def test_remove(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_search('*:*')['hits']['total'], 3)

        self.sb.remove(self.sample_objs[0])
        self.assertEqual(self.raw_search('*:*')['hits']['total'], 2)
        self.assertEqual([res['_source'] for res in self.raw_search('*:*')['hits']['hits']], [
            {
                'django_id': '2',
                'django_ct': 'core.mockmodel',
                'name': 'daniel2',
                'name_exact': 'daniel2',
                'text': 'Indexed!\n2',
                'pub_date': '2009-02-23T00:00:00',
                'id': 'core.mockmodel.2'
            },
            {
                'django_id': '3',
                'django_ct': 'core.mockmodel',
                'name': 'daniel3',
                'name_exact': 'daniel3',
                'text': 'Indexed!\n3',
                'pub_date': '2009-02-22T00:00:00',
                'id': 'core.mockmodel.3'
            }
        ])

    def test_clear(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 3)

        self.sb.clear()
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 0)

        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 3)

        self.sb.clear([AnotherMockModel])
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 3)

        self.sb.clear([MockModel])
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 0)

        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 3)

        self.sb.clear([AnotherMockModel, MockModel])
        self.assertEqual(self.raw_search('*:*').get('hits', {}).get('total', 0), 0)

    def test_search(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_search('*:*')['hits']['total'], 3)

        self.assertEqual(self.sb.search(''), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search('*:*')['hits'], 3)
        self.assertEqual([result.pk for result in self.sb.search('*:*')['results']], [u'2', u'1', u'3'])

        self.assertEqual(self.sb.search('', highlight=True), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search('Index', highlight=True)['hits'], 3)
        self.assertEqual([result.highlighted for result in self.sb.search('Index', highlight=True)['results']],
            [[u'<em>Indexed</em>!\n2'], [u'<em>Indexed</em>!\n1'], [u'<em>Indexed</em>!\n3']])

        self.assertEqual(self.sb.search('Indx')['hits'], 0)
        self.assertEqual(self.sb.search('indaxed')['spelling_suggestion'], 'indexed')
        self.assertEqual(self.sb.search('arf', spelling_query='indexyd')['spelling_suggestion'], 'indexed')

        self.assertEqual(self.sb.search('', facets={'name': {}}), {'hits': 0, 'results': []})
        results = self.sb.search('Index', facets={'name': {}})
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['fields']['name'], [('daniel3', 1), ('daniel2', 1), ('daniel1', 1)])

        self.assertEqual(self.sb.search('', date_facets={'pub_date': {'start_date': datetime.date(2008, 1, 1), 'end_date': datetime.date(2009, 4, 1), 'gap_by': 'month', 'gap_amount': 1}}), {'hits': 0, 'results': []})
        results = self.sb.search('Index', date_facets={'pub_date': {'start_date': datetime.date(2008, 1, 1), 'end_date': datetime.date(2009, 4, 1), 'gap_by': 'month', 'gap_amount': 1}})
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['dates']['pub_date'], [(datetime.datetime(2009, 2, 1, 0, 0), 3)])

        self.assertEqual(self.sb.search('', query_facets=[('name', '[* TO e]')]), {'hits': 0, 'results': []})
        results = self.sb.search('Index', query_facets=[('name', '[* TO e]')])
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['queries'], {u'name': 3})

        self.assertEqual(self.sb.search('', narrow_queries=set(['name:daniel1'])), {'hits': 0, 'results': []})
        results = self.sb.search('Index', narrow_queries=set(['name:daniel1']))
        self.assertEqual(results['hits'], 1)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(isinstance(self.sb.search(u'index', result_class=MockSearchResult)['results'][0], MockSearchResult))

        # Check the use of ``limit_to_registered_models``.
        self.assertEqual(self.sb.search('', limit_to_registered_models=False), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search('*:*', limit_to_registered_models=False)['hits'], 3)
        self.assertEqual(sorted([result.pk for result in self.sb.search('*:*', limit_to_registered_models=False)['results']]), ['1', '2', '3'])

        # Stow.
        old_limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = False

        self.assertEqual(self.sb.search(''), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search('*:*')['hits'], 3)
        self.assertEqual(sorted([result.pk for result in self.sb.search('*:*')['results']]), ['1', '2', '3'])

        # Restore.
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = old_limit_to_registered_models

    def test_more_like_this(self):
        self.sb.update(self.smmi, self.sample_objs)
        self.assertEqual(self.raw_search('*:*')['hits']['total'], 3)

        # A functional MLT example with enough data to work is below. Rely on
        # this to ensure the API is correct enough.
        self.assertEqual(self.sb.more_like_this(self.sample_objs[0])['hits'], 0)
        self.assertEqual([result.pk for result in self.sb.more_like_this(self.sample_objs[0])['results']], [])

    def test_build_schema(self):
        old_ui = connections['default'].get_unified_index()

        (content_field_name, mapping) = self.sb.build_schema(old_ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(mapping), 4)
        self.assertEqual(mapping, {
            'text': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'pub_date': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'name': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'name_exact': {'index': 'not_analyzed', 'term_vector': 'with_positions_offsets', 'boost': 1.0, 'store': 'yes', 'type': 'string'}
        })

        ui = UnifiedIndex()
        ui.build(indexes=[ElasticsearchComplexFacetsMockSearchIndex()])
        (content_field_name, mapping) = self.sb.build_schema(ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(mapping), 15)
        self.assertEqual(mapping, {
            'name': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'is_active_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'boolean'},
            'created': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'post_count': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'long'},
            'created_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'sites_exact': {'index': 'not_analyzed', 'term_vector': 'with_positions_offsets', 'boost': 1.0, 'store': 'yes', 'type': 'string'},
            'is_active': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'boolean'},
            'sites': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'post_count_i': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'long'},
            'average_rating': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'float'},
            'text': {'index': 'analyzed', 'term_vector': 'with_positions_offsets', 'type': 'string', 'analyzer': 'snowball', 'boost': 1.0, 'store': 'yes'},
            'pub_date_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'name_exact': {'index': 'not_analyzed', 'term_vector': 'with_positions_offsets', 'boost': 1.0, 'store': 'yes', 'type': 'string'},
            'pub_date': {'index': 'analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'date'},
            'average_rating_exact': {'index': 'not_analyzed', 'boost': 1.0, 'store': 'yes', 'type': 'float'}
        })

    def test_verify_type(self):
        old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        smtmmi = ElasticsearchMaintainTypeMockSearchIndex()
        ui.build(indexes=[smtmmi])
        connections['default']._index = ui
        sb = connections['default'].get_backend()
        sb.update(smtmmi, self.sample_objs)

        self.assertEqual(sb.search('*:*')['hits'], 3)
        self.assertEqual([result.month for result in sb.search('*:*')['results']], [u'02', u'02', u'02'])
        connections['default']._index = old_ui
Example #39
0
class LiveSolrSearchQuerySetTestCase(TestCase):
    """Used to test actual implementation details of the SearchQuerySet."""
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveSolrSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()
        self.rsqs = RelatedSearchQuerySet()

        # Ugly but not constantly reindexing saves us almost 50% runtime.
        global lssqstc_all_loaded

        if lssqstc_all_loaded is None:
            print 'Reloading data...'
            lssqstc_all_loaded = True

            # Wipe it clean.
            clear_solr_index()

            # Force indexing of the content.
            self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(LiveSolrSearchQuerySetTestCase, self).tearDown()

    def test_load_all(self):
        sqs = self.sqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(
            sqs[0].object.foo,
            u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:"
        )

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]],
                         [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_count(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.sqs.all()
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(sqs.count(), 23)
        self.assertEqual(len(sqs), 23)
        self.assertEqual(sqs.count(), 23)
        # Should only execute one query to count the length of the result set.
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.sqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 3)

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test___and__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((foo) AND (bar))')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(
            title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar')
        sqs = sqs3 & sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 3)
        self.assertEqual(
            sqs.query.build_query(),
            u'(NOT (title:(moof)) AND ((foo) OR (baz)) AND (bar))')

    def test___or__(self):
        sqs1 = self.sqs.filter(content='foo')
        sqs2 = self.sqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(sqs.query.build_query(), u'((foo) OR (bar))')

        # Now for something more complex...
        sqs3 = self.sqs.exclude(
            title='moof').filter(SQ(content='foo') | SQ(content='baz'))
        sqs4 = self.sqs.filter(content='bar').models(MockModel)
        sqs = sqs3 | sqs4

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)
        self.assertEqual(
            sqs.query.build_query(),
            u'((NOT (title:(moof)) AND ((foo) OR (baz))) OR (bar))')

    def test_auto_query(self):
        # Ensure bits in exact matches get escaped properly as well.
        # This will break horrifically if escaping isn't working.
        sqs = self.sqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__contains="pants:rule">')
        self.assertEqual(sqs.query.build_query(), u'("pants\\:rule")')
        self.assertEqual(len(sqs), 0)

    # Regressions

    def test_regression_proper_start_offsets(self):
        sqs = self.sqs.filter(text='index')
        self.assertNotEqual(sqs.count(), 0)

        id_counts = {}

        for item in sqs:
            if item.id in id_counts:
                id_counts[item.id] += 1
            else:
                id_counts[item.id] = 1

        for key, value in id_counts.items():
            if value > 1:
                self.fail(
                    "Result with id '%s' seen more than once in the results." %
                    key)

    def test_regression_raw_search_breaks_slicing(self):
        sqs = self.sqs.raw_search('text: index')
        page_1 = [result.pk for result in sqs[0:10]]
        page_2 = [result.pk for result in sqs[10:20]]

        for pk in page_2:
            if pk in page_1:
                self.fail(
                    "Result with id '%s' seen more than once in the results." %
                    pk)

    # RelatedSearchQuerySet Tests

    def test_related_load_all(self):
        sqs = self.rsqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue(len(sqs) > 0)
        self.assertEqual(
            sqs[0].object.foo,
            u"Registering indexes in Haystack is very similar to registering models and ``ModelAdmin`` classes in the `Django admin site`_.  If you want to override the default indexing behavior for your model you can specify your own ``SearchIndex`` class.  This is useful for ensuring that future-dated or non-live content is not indexed and searchable. Our ``Note`` model has a ``pub_date`` field, so let's update our code to include our own ``SearchIndex`` to exclude indexing future-dated notes:"
        )

    def test_related_load_all_queryset(self):
        sqs = self.rsqs.load_all()
        self.assertEqual(len(sqs._load_all_querysets), 0)

        sqs = sqs.load_all_queryset(MockModel,
                                    MockModel.objects.filter(id__gt=1))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], range(2, 24))

        sqs = sqs.load_all_queryset(MockModel,
                                    MockModel.objects.filter(id__gt=10))
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs._load_all_querysets), 1)
        self.assertEqual([obj.object.id for obj in sqs], range(11, 24))
        self.assertEqual([obj.object.id for obj in sqs[10:20]], [21, 22, 23])

    def test_related_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        sqs = self.rsqs.all()
        results = [int(result.pk) for result in sqs]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[1:11]],
                         [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(int(results[21].pk), 22)
        self.assertEqual(len(connections['default'].queries), 4)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual([int(result.pk) for result in results[20:30]],
                         [21, 22, 23])
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_manual_iter(self):
        results = self.rsqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(results, range(1, 24))
        self.assertEqual(len(connections['default'].queries), 4)

    def test_related_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.rsqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

    def test_related_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.rsqs._cache_is_full(), False)
        results = self.rsqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 5)

    def test_quotes_regression(self):
        sqs = self.sqs.auto_query(u"44°48'40''N 20°28'32''E")
        # Should not have empty terms.
        self.assertEqual(sqs.query.build_query(),
                         u"(44\xb048'40''N 20\xb028'32''E)")
        # Should not cause Solr to 500.
        self.assertEqual(sqs.count(), 0)

        sqs = self.sqs.auto_query('blazing')
        self.assertEqual(sqs.query.build_query(), u'(blazing)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(blazing saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles')
        self.assertEqual(sqs.query.build_query(), u'(\\"blazing saddles)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'saddles"')
        self.assertEqual(sqs.query.build_query(), u'(mel "blazing \'saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"')
        self.assertEqual(sqs.query.build_query(),
                         u'(mel "blazing \'\'saddles")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'')
        self.assertEqual(sqs.query.build_query(),
                         u'(mel "blazing \'\'saddles" \')')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing \'\'saddles"\'"')
        self.assertEqual(sqs.query.build_query(),
                         u'(mel "blazing \'\'saddles" \'\\")')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel')
        self.assertEqual(sqs.query.build_query(), u'("blazing saddles" mel)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('"blazing saddles" mel brooks')
        self.assertEqual(sqs.query.build_query(),
                         u'("blazing saddles" mel brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" brooks')
        self.assertEqual(sqs.query.build_query(),
                         u'(mel "blazing saddles" brooks)')
        self.assertEqual(sqs.count(), 0)
        sqs = self.sqs.auto_query('mel "blazing saddles" "brooks')
        self.assertEqual(sqs.query.build_query(),
                         u'(mel "blazing saddles" \\"brooks)')
        self.assertEqual(sqs.count(), 0)

    def test_query_generation(self):
        sqs = self.sqs.filter(
            SQ(content=AutoQuery("hello world"))
            | SQ(title=AutoQuery("hello world")))
        self.assertEqual(sqs.query.build_query(),
                         u"((hello world) OR title:(hello world))")

    def test_result_class(self):
        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))
class ManagementCommandTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(ManagementCommandTestCase, self).setUp()
        self.solr = pysolr.Solr(
            settings.HAYSTACK_CONNECTIONS['default']['URL'])

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

    def tearDown(self):
        connections['default']._index = self.old_ui
        super(ManagementCommandTestCase, self).tearDown()

    def test_basic_commands(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('rebuild_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

    def test_remove(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # Remove a model instance.
        MockModel.objects.get(pk=1).delete()
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # Plain ``update_index`` doesn't fix it.
        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # With the remove flag, it's gone.
        call_command('update_index', remove=True, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 22)

    def test_age(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        start = datetime.datetime.now() - datetime.timedelta(hours=3)
        end = datetime.datetime.now()

        mock = MockModel.objects.get(pk=1)
        mock.pub_date = datetime.datetime.now() - datetime.timedelta(hours=2)
        mock.save()
        self.assertEqual(
            MockModel.objects.filter(pub_date__range=(start, end)).count(), 1)

        call_command('update_index', age=3, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 1)

    def test_dates(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        start = datetime.datetime.now() - datetime.timedelta(hours=5,
                                                             minutes=30)
        end = datetime.datetime.now() - datetime.timedelta(hours=2)

        mock_1 = MockModel.objects.get(pk=1)
        mock_1.pub_date = datetime.datetime.now() - datetime.timedelta(
            hours=5, minutes=1)
        mock_1.save()
        mock_2 = MockModel.objects.get(pk=2)
        mock_2.pub_date = datetime.datetime.now() - datetime.timedelta(hours=3)
        mock_2.save()
        mock_3 = MockModel.objects.get(pk=3)
        mock_3.pub_date = datetime.datetime.now() - datetime.timedelta(hours=1)
        mock_3.save()
        self.assertEqual(
            MockModel.objects.filter(pub_date__range=(start, end)).count(), 2)

        call_command('update_index',
                     start_date=start.isoformat(),
                     end_date=end.isoformat(),
                     verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 2)

    def test_multiprocessing(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        # Watch the output, make sure there are multiple pids.
        call_command('update_index', verbosity=2, workers=2, batchsize=5)
        self.assertEqual(self.solr.search('*:*').hits, 23)
Example #41
0
class XapianSearchBackendTestCase(TestCase):
    def setUp(self):
        super(XapianSearchBackendTestCase, self).setUp()

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.index = XapianMockSearchIndex()
        self.ui.build(indexes=[self.index])
        self.backend = connections['default'].get_backend()
        connections['default']._index = self.ui

        self.sample_objs = []

        for i in xrange(1, 4):
            mock = XapianMockModel()
            mock.id = i
            mock.author = 'david%s' % i
            mock.pub_date = datetime.date(2009, 2,
                                          25) - datetime.timedelta(days=i)
            mock.exp_date = datetime.date(2009, 2,
                                          23) + datetime.timedelta(days=i)
            mock.value = i * 5
            mock.flag = bool(i % 2)
            mock.slug = 'http://example.com/%d/' % i
            mock.url = 'http://example.com/%d/' % i
            self.sample_objs.append(mock)

        self.sample_objs[0].popularity = 834.0
        self.sample_objs[1].popularity = 35.5
        self.sample_objs[2].popularity = 972.0

    def tearDown(self):
        if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
            shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])

        connections['default']._index = self.old_ui
        super(XapianSearchBackendTestCase, self).tearDown()

    def test_update(self):
        self.backend.update(self.index, self.sample_objs)

        self.assertEqual(self.backend.document_count(), 3)
        self.assertEqual([
            result.pk
            for result in self.backend.search(xapian.Query(''))['results']
        ], [1, 2, 3])

    def test_duplicate_update(self):
        self.backend.update(self.index, self.sample_objs)
        self.backend.update(
            self.index, self.sample_objs
        )  # Duplicates should be updated, not appended -- http://github.com/notanumber/xapian-haystack/issues/#issue/6

        self.assertEqual(self.backend.document_count(), 3)

    def test_remove(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.backend.remove(self.sample_objs[0])
        self.assertEqual(self.backend.document_count(), 2)
        self.assertEqual([
            result.pk
            for result in self.backend.search(xapian.Query(''))['results']
        ], [2, 3])

    def test_clear(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.backend.clear()
        self.assertEqual(self.backend.document_count(), 0)

        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.backend.clear([AnotherMockModel])
        self.assertEqual(self.backend.document_count(), 3)

        self.backend.clear([XapianMockModel])
        self.assertEqual(self.backend.document_count(), 0)

        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.backend.clear([AnotherMockModel, XapianMockModel])
        self.assertEqual(self.backend.document_count(), 0)

    def test_search(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.assertEqual(self.backend.search(xapian.Query()), {
            'hits': 0,
            'results': []
        })
        self.assertEqual(self.backend.search(xapian.Query(''))['hits'], 3)
        self.assertEqual([
            result.pk
            for result in self.backend.search(xapian.Query(''))['results']
        ], [1, 2, 3])
        self.assertEqual(
            self.backend.search(xapian.Query('indexed'))['hits'], 3)
        self.assertEqual([
            result.pk
            for result in self.backend.search(xapian.Query(''))['results']
        ], [1, 2, 3])

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.backend.search(
                    xapian.Query('indexed'),
                    result_class=MockSearchResult)['results'][0],
                MockSearchResult))

    def test_search_field_with_punctuation(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        # self.assertEqual(self.backend.search(xapian.Query('http://example.com/'))['hits'], 3)
        self.assertEqual([
            result.pk for result in self.backend.search(
                xapian.Query('http://example.com/1/'))['results']
        ], [1])

    def test_search_by_mvf(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.assertEqual(self.backend.search(xapian.Query('ab'))['hits'], 1)
        self.assertEqual(self.backend.search(xapian.Query('b'))['hits'], 1)
        self.assertEqual(self.backend.search(xapian.Query('to'))['hits'], 1)
        self.assertEqual(self.backend.search(xapian.Query('one'))['hits'], 3)

    def test_field_facets(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.assertEqual(self.backend.search(xapian.Query(), facets=['name']),
                         {
                             'hits': 0,
                             'results': []
                         })
        results = self.backend.search(xapian.Query('indexed'), facets=['name'])
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['fields']['name'], [('david1', 1),
                                                               ('david2', 1),
                                                               ('david3', 1)])

        results = self.backend.search(xapian.Query('indexed'), facets=['flag'])
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['fields']['flag'], [(False, 1),
                                                               (True, 2)])

        results = self.backend.search(xapian.Query('indexed'),
                                      facets=['sites'])
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['fields']['sites'], [('1', 1),
                                                                ('3', 2),
                                                                ('2', 2),
                                                                ('4', 1),
                                                                ('6', 2),
                                                                ('9', 1)])

    def test_date_facets(self):
        self.backend.update(self.index, self.sample_objs)
        self.assertEqual(self.backend.document_count(), 3)

        self.assertEqual(
            self.backend.search(xapian.Query(),
                                date_facets={
                                    'pub_date': {
                                        'start_date':
                                        datetime.datetime(2008, 10, 26),
                                        'end_date':
                                        datetime.datetime(2009, 3, 26),
                                        'gap_by':
                                        'month'
                                    }
                                }), {
                                    'hits': 0,
                                    'results': []
                                })
        results = self.backend.search(xapian.Query('indexed'),
                                      date_facets={
                                          'pub_date': {
                                              'start_date':
                                              datetime.datetime(2008, 10, 26),
                                              'end_date':
                                              datetime.datetime(2009, 3, 26),
                                              'gap_by':
                                              'month'
                                          }
                                      })
        self.assertEqual(results['hits'], 3)
        self.assertEqual(results['facets']['dates']['pub_date'], [
            ('2009-02-26T00:00:00', 0),
            ('2009-01-26T00:00:00', 3),
            ('2008-12-26T00:00:00', 0),
            ('2008-11-26T00:00:00', 0),
            ('2008-10-26T00:00:00', 0),
        ])

        results = self.backend.search(xapian.Query('indexed'),
                                      date_facets={
                                          'pub_date': {
                                              'start_date':
                                              datetime.datetime(2009, 02, 01),
                                              'end_date':
                                              datetime.datetime(2009, 3, 15),
                                              'gap_by':
                                              'day',
                                              'gap_amount':
                                              15
                                          }
                                      })
class WhooshSearchBackendTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(WhooshSearchBackendTestCase, self).setUp()

        # Stow.
        temp_path = os.path.join('tmp', 'test_whoosh_query')
        self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections['default'].get_backend()
        connections['default']._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)
        self.sb.delete_index()

        self.sample_objs = MockModel.objects.all()

    def tearDown(self):
        if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
            shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])

        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
        connections['default']._index = self.old_ui
        super(WhooshSearchBackendTestCase, self).tearDown()

    def whoosh_search(self, query):
        self.raw_whoosh = self.raw_whoosh.refresh()
        searcher = self.raw_whoosh.searcher()
        return searcher.search(self.parser.parse(query), limit=1000)

    def test_non_silent(self):
        bad_sb = connections['default'].backend('bad',
                                                PATH='/tmp/bad_whoosh',
                                                SILENTLY_FAIL=False)
        bad_sb.use_file_storage = False
        bad_sb.storage = 'omg.wtf.bbq'

        try:
            bad_sb.update(self.wmmi, self.sample_objs)
            self.fail()
        except:
            pass

        try:
            bad_sb.remove('core.mockmodel.1')
            self.fail()
        except:
            pass

        try:
            bad_sb.clear()
            self.fail()
        except:
            pass

        try:
            bad_sb.search('foo')
            self.fail()
        except:
            pass

    def test_update(self):
        self.sb.update(self.wmmi, self.sample_objs)

        # Check what Whoosh thinks is there.
        self.assertEqual(len(self.whoosh_search(u'*')), 23)
        self.assertEqual(
            [doc.fields()['id'] for doc in self.whoosh_search(u'*')],
            [u'core.mockmodel.%s' % i for i in xrange(1, 24)])

    def test_remove(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.remove(self.sample_objs[0])
        self.assertEqual(self.sb.index.doc_count(), 22)

    def test_clear(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear()
        self.assertEqual(self.sb.index.doc_count(), 0)

        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear([AnotherMockModel])
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear([MockModel])
        self.assertEqual(self.sb.index.doc_count(), 0)

        self.sb.index.refresh()
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear([AnotherMockModel, MockModel])
        self.assertEqual(self.raw_whoosh.doc_count(), 0)

    def test_search(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(len(self.whoosh_search(u'*')), 23)

        # No query string should always yield zero results.
        self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []})

        # A one letter query string gets nabbed by a stopwords filter. Should
        # always yield zero results.
        self.assertEqual(self.sb.search(u'a'), {'hits': 0, 'results': []})

        # Possible AttributeError?
        # self.assertEqual(self.sb.search(u'a b'), {'hits': 0, 'results': [], 'spelling_suggestion': '', 'facets': {}})

        self.assertEqual(self.sb.search(u'*')['hits'], 23)
        self.assertEqual(
            [result.pk for result in self.sb.search(u'*')['results']],
            [u'%s' % i for i in xrange(1, 24)])

        self.assertEqual(self.sb.search(u'', highlight=True), {
            'hits': 0,
            'results': []
        })
        self.assertEqual(self.sb.search(u'index*', highlight=True)['hits'], 23)
        # DRL_FIXME: Uncomment once highlighting works.
        # self.assertEqual([result.highlighted['text'][0] for result in self.sb.search('Index*', highlight=True)['results']], ['<em>Indexed</em>!\n3', '<em>Indexed</em>!\n2', '<em>Indexed</em>!\n1'])

        self.assertEqual(self.sb.search(u'Indx')['hits'], 0)
        self.assertEqual(
            self.sb.search(u'Indx')['spelling_suggestion'], u'index')

        self.assertEqual(self.sb.search(u'', facets=['name']), {
            'hits': 0,
            'results': []
        })
        results = self.sb.search(u'Index*', facets=['name'])
        results = self.sb.search(u'index*', facets=['name'])
        self.assertEqual(results['hits'], 23)
        self.assertEqual(results['facets'], {})

        self.assertEqual(
            self.sb.search(u'',
                           date_facets={
                               'pub_date': {
                                   'start_date': date(2008, 2, 26),
                                   'end_date': date(2008, 2, 26),
                                   'gap': '/MONTH'
                               }
                           }), {
                               'hits': 0,
                               'results': []
                           })
        results = self.sb.search(u'Index*',
                                 date_facets={
                                     'pub_date': {
                                         'start_date': date(2008, 2, 26),
                                         'end_date': date(2008, 2, 26),
                                         'gap': '/MONTH'
                                     }
                                 })
        results = self.sb.search(u'index*',
                                 date_facets={
                                     'pub_date': {
                                         'start_date': date(2008, 2, 26),
                                         'end_date': date(2008, 2, 26),
                                         'gap': '/MONTH'
                                     }
                                 })
        self.assertEqual(results['hits'], 23)
        self.assertEqual(results['facets'], {})

        self.assertEqual(
            self.sb.search(u'', query_facets={'name': '[* TO e]'}), {
                'hits': 0,
                'results': []
            })
        results = self.sb.search(u'Index*', query_facets={'name': '[* TO e]'})
        results = self.sb.search(u'index*', query_facets={'name': '[* TO e]'})
        self.assertEqual(results['hits'], 23)
        self.assertEqual(results['facets'], {})

        # self.assertEqual(self.sb.search('', narrow_queries=set(['name:daniel1'])), {'hits': 0, 'results': []})
        # results = self.sb.search('Index*', narrow_queries=set(['name:daniel1']))
        # self.assertEqual(results['hits'], 1)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.sb.search(u'Index*',
                               result_class=MockSearchResult)['results'][0],
                MockSearchResult))

        # Check the use of ``limit_to_registered_models``.
        self.assertEqual(self.sb.search(u'', limit_to_registered_models=False),
                         {
                             'hits': 0,
                             'results': []
                         })
        self.assertEqual(
            self.sb.search(u'*', limit_to_registered_models=False)['hits'], 23)
        self.assertEqual([
            result.pk for result in self.sb.search(
                u'*', limit_to_registered_models=False)['results']
        ], [u'%s' % i for i in xrange(1, 24)])

        # Stow.
        old_limit_to_registered_models = getattr(
            settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = False

        self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []})
        self.assertEqual(self.sb.search(u'*')['hits'], 23)
        self.assertEqual(
            [result.pk for result in self.sb.search(u'*')['results']],
            [u'%s' % i for i in xrange(1, 24)])

        # Restore.
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = old_limit_to_registered_models

    def test_search_all_models(self):
        wamsi = WhooshAnotherMockSearchIndex()
        self.ui.build(indexes=[self.wmmi, wamsi])

        self.sb.update(self.wmmi, self.sample_objs)
        self.sb.update(wamsi, AnotherMockModel.objects.all())

        self.assertEqual(len(self.whoosh_search(u'*')), 25)

        self.ui.build(indexes=[self.wmmi])

    def test_more_like_this(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(len(self.whoosh_search(u'*')), 23)

        # Now supported by Whoosh (as of 1.8.4). See the ``LiveWhooshMoreLikeThisTestCase``.
        self.assertEqual(
            self.sb.more_like_this(self.sample_objs[0])['hits'], 22)

        # Make sure that swapping the ``result_class`` doesn't blow up.
        try:
            self.sb.more_like_this(self.sample_objs[0],
                                   result_class=MockSearchResult)
        except:
            self.fail()

    def test_delete_index(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertTrue(self.sb.index.doc_count() > 0)

        self.sb.delete_index()
        self.assertEqual(self.sb.index.doc_count(), 0)

    def test_order_by(self):
        self.sb.update(self.wmmi, self.sample_objs)

        results = self.sb.search(u'*', sort_by=['pub_date'])
        self.assertEqual([result.pk for result in results['results']], [
            u'1', u'3', u'2', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11',
            u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20',
            u'21', u'22', u'23'
        ])

        results = self.sb.search(u'*', sort_by=['-pub_date'])
        self.assertEqual([result.pk for result in results['results']], [
            u'23', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15',
            u'14', u'13', u'12', u'11', u'10', u'9', u'8', u'7', u'6', u'5',
            u'4', u'2', u'3', u'1'
        ])

        results = self.sb.search(u'*', sort_by=['id'])
        self.assertEqual([result.pk for result in results['results']], [
            u'1', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17',
            u'18', u'19', u'2', u'20', u'21', u'22', u'23', u'3', u'4', u'5',
            u'6', u'7', u'8', u'9'
        ])

        results = self.sb.search(u'*', sort_by=['-id'])
        self.assertEqual([result.pk for result in results['results']], [
            u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'23', u'22', u'21',
            u'20', u'2', u'19', u'18', u'17', u'16', u'15', u'14', u'13',
            u'12', u'11', u'10', u'1'
        ])

    def test__from_python(self):
        self.assertEqual(self.sb._from_python('abc'), u'abc')
        self.assertEqual(self.sb._from_python(1), 1)
        self.assertEqual(self.sb._from_python(2653), 2653)
        self.assertEqual(self.sb._from_python(25.5), 25.5)
        self.assertEqual(self.sb._from_python([1, 2, 3]), u'1,2,3')
        self.assertEqual(self.sb._from_python({
            'a': 1,
            'c': 3,
            'b': 2
        }), u"{'a': 1, 'c': 3, 'b': 2}")
        self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 16, 14)),
                         datetime(2009, 5, 9, 16, 14))
        self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 0, 0)),
                         datetime(2009, 5, 9, 0, 0))
        self.assertEqual(self.sb._from_python(datetime(1899, 5, 18, 0, 0)),
                         datetime(1899, 5, 18, 0, 0))
        self.assertEqual(
            self.sb._from_python(datetime(2009, 5, 18, 1, 16, 30, 250)),
            datetime(2009, 5, 18, 1, 16, 30, 250))

    def test__to_python(self):
        self.assertEqual(self.sb._to_python('abc'), 'abc')
        self.assertEqual(self.sb._to_python('1'), 1)
        self.assertEqual(self.sb._to_python('2653'), 2653)
        self.assertEqual(self.sb._to_python('25.5'), 25.5)
        self.assertEqual(self.sb._to_python('[1, 2, 3]'), [1, 2, 3])
        self.assertEqual(self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {
            'a': 1,
            'c': 3,
            'b': 2
        })
        self.assertEqual(self.sb._to_python('2009-05-09T16:14:00'),
                         datetime(2009, 5, 9, 16, 14))
        self.assertEqual(self.sb._to_python('2009-05-09T00:00:00'),
                         datetime(2009, 5, 9, 0, 0))
        self.assertEqual(self.sb._to_python(None), None)

    def test_range_queries(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.assertEqual(len(self.whoosh_search(u'[d TO]')), 23)
        self.assertEqual(len(self.whoosh_search(u'name:[d TO]')), 23)
        self.assertEqual(len(self.whoosh_search(u'Ind* AND name:[d to]')), 23)
        self.assertEqual(len(self.whoosh_search(u'Ind* AND name:[to c]')), 0)

    def test_date_queries(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.assertEqual(len(self.whoosh_search(u"pub_date:20090717003000")),
                         1)
        self.assertEqual(len(self.whoosh_search(u"pub_date:20090717000000")),
                         0)
        self.assertEqual(
            len(self.whoosh_search(u'Ind* AND pub_date:[to 20090717003000]')),
            3)

    def test_escaped_characters_queries(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.assertEqual(len(self.whoosh_search(u"Indexed\!")), 23)
        self.assertEqual(
            len(self.whoosh_search(u"http\:\/\/www\.example\.com")), 0)

    def test_build_schema(self):
        ui = UnifiedIndex()
        ui.build(indexes=[AllTypesWhooshMockSearchIndex()])

        (content_field_name,
         schema) = self.sb.build_schema(ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(schema.names()), 9)
        self.assertEqual(schema.names(), [
            'django_ct', 'django_id', 'id', 'is_active', 'name', 'pub_date',
            'seen_count', 'sites', 'text'
        ])
        self.assertTrue(isinstance(schema._fields['text'], TEXT))
        self.assertTrue(isinstance(schema._fields['pub_date'], DATETIME))
        self.assertTrue(isinstance(schema._fields['seen_count'], NUMERIC))
        self.assertTrue(isinstance(schema._fields['sites'], KEYWORD))
        self.assertTrue(isinstance(schema._fields['is_active'], BOOLEAN))

    def test_verify_type(self):
        old_ui = connections['default'].get_unified_index()
        ui = UnifiedIndex()
        wmtmmi = WhooshMaintainTypeMockSearchIndex()
        ui.build(indexes=[wmtmmi])
        connections['default']._index = ui
        sb = connections['default'].get_backend()
        sb.setup()
        sb.update(wmtmmi, self.sample_objs)

        self.assertEqual(sb.search(u'*')['hits'], 23)
        self.assertEqual(
            [result.month for result in sb.search(u'*')['results']], [
                u'06', u'07', u'06', u'07', u'07', u'07', u'07', u'07', u'07',
                u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07',
                u'07', u'07', u'07', u'07', u'07'
            ])
        connections['default']._index = old_ui

    def test_writable(self):
        if getattr(settings, 'HAYSTACK_WHOOSH_STORAGE', 'file') == 'file':
            if not os.path.exists(
                    settings.HAYSTACK_CONNECTIONS['default']['PATH']):
                os.makedirs(settings.HAYSTACK_CONNECTIONS['default']['PATH'])

            os.chmod(settings.HAYSTACK_CONNECTIONS['default']['PATH'], 0400)

            try:
                self.sb.setup()
                self.fail()
            except IOError:
                # Yay. We failed
                pass

            os.chmod(settings.HAYSTACK_CONNECTIONS['default']['PATH'], 0755)

    def test_slicing(self):
        self.sb.update(self.wmmi, self.sample_objs)

        page_1 = self.sb.search(u'*', start_offset=0, end_offset=20)
        page_2 = self.sb.search(u'*', start_offset=20, end_offset=30)
        self.assertEqual(len(page_1['results']), 20)
        self.assertEqual([result.pk for result in page_1['results']],
                         [u'%s' % i for i in xrange(1, 21)])
        self.assertEqual(len(page_2['results']), 3)
        self.assertEqual([result.pk for result in page_2['results']],
                         [u'21', u'22', u'23'])

        # This used to throw an error.
        page_0 = self.sb.search(u'*', start_offset=0, end_offset=0)
        self.assertEqual(len(page_0['results']), 1)

    def test_scoring(self):
        self.sb.update(self.wmmi, self.sample_objs)

        page_1 = self.sb.search(u'index', start_offset=0, end_offset=20)
        page_2 = self.sb.search(u'index', start_offset=20, end_offset=30)
        self.assertEqual(len(page_1['results']), 20)
        self.assertEqual(
            ["%0.2f" % result.score for result in page_1['results']], [
                '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51',
                '0.51', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40',
                '0.40', '0.40', '0.40', '0.40'
            ])
        self.assertEqual(len(page_2['results']), 3)
        self.assertEqual(
            ["%0.2f" % result.score for result in page_2['results']],
            ['0.40', '0.40', '0.40'])
class WhooshBoostBackendTestCase(TestCase):
    def setUp(self):
        super(WhooshBoostBackendTestCase, self).setUp()

        # Stow.
        temp_path = os.path.join('tmp', 'test_whoosh_query')
        self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshBoostMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections['default'].get_backend()
        connections['default']._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)
        self.sb.delete_index()
        self.sample_objs = []

        for i in range(1, 5):
            mock = AFourthMockModel()
            mock.id = i

            if i % 2:
                mock.author = 'daniel'
                mock.editor = 'david'
            else:
                mock.author = 'david'
                mock.editor = 'daniel'

            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            self.sample_objs.append(mock)

    def tearDown(self):
        if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
            shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])

        settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
        connections['default']._index = self.ui
        super(WhooshBoostBackendTestCase, self).tearDown()

    @unittest.expectedFailure
    def test_boost(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.raw_whoosh = self.raw_whoosh.refresh()
        searcher = self.raw_whoosh.searcher()
        self.assertEqual(
            len(searcher.search(self.parser.parse(u'*'), limit=1000)), 2)

        results = SearchQuerySet().filter(
            SQ(author='daniel') | SQ(editor='daniel'))

        self.assertEqual([result.id for result in results], [
            'core.afourthmockmodel.1',
            'core.afourthmockmodel.3',
        ])
        self.assertEqual(results[0].boost, 1.1)
Example #44
0
class LiveSolrMoreLikeThisTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveSolrMoreLikeThisTestCase, self).setUp()

        # Wipe it clean.
        clear_solr_index()

        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockModelSearchIndex()
        self.sammi = SolrAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.smmi, self.sammi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()

        self.smmi.update()
        self.sammi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        super(LiveSolrMoreLikeThisTestCase, self).tearDown()

    def test_more_like_this(self):
        mlt = self.sqs.more_like_this(MockModel.objects.get(pk=1))
        self.assertEqual(mlt.count(), 22)
        self.assertEqual([result.pk for result in mlt], [
            '14', '6', '10', '22', '4', '5', '3', '12', '2', '19', '18', '13',
            '15', '21', '7', '23', '20', '9', '1', '2', '17', '16'
        ])
        self.assertEqual(len([result.pk for result in mlt]), 22)

        alt_mlt = self.sqs.filter(name='daniel3').more_like_this(
            MockModel.objects.get(pk=3))
        self.assertEqual(alt_mlt.count(), 8)
        self.assertEqual([result.pk for result in alt_mlt],
                         ['17', '16', '19', '23', '22', '13', '1', '2'])
        self.assertEqual(len([result.pk for result in alt_mlt]), 8)

        alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(
            MockModel.objects.get(pk=1))
        self.assertEqual(alt_mlt_with_models.count(), 20)
        self.assertEqual([result.pk for result in alt_mlt_with_models], [
            '14', '6', '10', '22', '4', '5', '3', '12', '2', '19', '18', '13',
            '15', '21', '7', '23', '20', '9', '17', '16'
        ])
        self.assertEqual(len([result.pk for result in alt_mlt_with_models]),
                         20)

        if hasattr(MockModel.objects, 'defer'):
            # Make sure MLT works with deferred bits.
            mi = MockModel.objects.defer('foo').get(pk=1)
            self.assertEqual(mi._deferred, True)
            deferred = self.sqs.models(MockModel).more_like_this(mi)
            self.assertEqual(deferred.count(), 0)
            self.assertEqual([result.pk for result in deferred], [])
            self.assertEqual(len([result.pk for result in deferred]), 0)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.sqs.result_class(MockSearchResult).more_like_this(
                    MockModel.objects.get(pk=1))[0], MockSearchResult))
Example #45
0
class LiveWhooshSearchQuerySetTestCase(WhooshTestCase):
    def setUp(self):
        super().setUp()

        # Stow.
        self.old_ui = connections["whoosh"].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections["whoosh"].get_backend()
        connections["whoosh"]._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
        self.sb.delete_index()

        self.sample_objs = []

        for i in range(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = "daniel%s" % i
            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            self.sample_objs.append(mock)

        self.sq = connections["whoosh"].get_query()
        self.sqs = SearchQuerySet("whoosh")

    def tearDown(self):
        connections["whoosh"]._index = self.old_ui
        super().tearDown()

    def test_various_searchquerysets(self):
        self.sb.update(self.wmmi, self.sample_objs)

        sqs = self.sqs.filter(content="Index")
        self.assertEqual(sqs.query.build_query(), "(Index)")
        self.assertEqual(len(sqs), 3)

        sqs = self.sqs.auto_query("Indexed!")
        self.assertEqual(sqs.query.build_query(), "('Indexed!')")
        self.assertEqual(len(sqs), 3)

        sqs = self.sqs.auto_query("Indexed!").filter(pub_date__lte=date(2009, 8, 31))
        self.assertEqual(
            sqs.query.build_query(), "(('Indexed!') AND pub_date:([to 20090831000000]))"
        )
        self.assertEqual(len(sqs), 3)

        sqs = self.sqs.auto_query("Indexed!").filter(pub_date__lte=date(2009, 2, 23))
        self.assertEqual(
            sqs.query.build_query(), "(('Indexed!') AND pub_date:([to 20090223000000]))"
        )
        self.assertEqual(len(sqs), 2)

        sqs = (
            self.sqs.auto_query("Indexed!")
            .filter(pub_date__lte=date(2009, 2, 25))
            .filter(django_id__in=[1, 2])
            .exclude(name="daniel1")
        )
        self.assertEqual(
            sqs.query.build_query(),
            "(('Indexed!') AND pub_date:([to 20090225000000]) AND django_id:(1 OR 2) AND NOT (name:(daniel1)))",
        )
        self.assertEqual(len(sqs), 1)

        sqs = self.sqs.auto_query("re-inker")
        self.assertEqual(sqs.query.build_query(), "('re-inker')")
        self.assertEqual(len(sqs), 0)

        sqs = self.sqs.auto_query("0.7 wire")
        self.assertEqual(sqs.query.build_query(), "('0.7' wire)")
        self.assertEqual(len(sqs), 0)

        sqs = self.sqs.auto_query("daler-rowney pearlescent 'bell bronze'")
        self.assertEqual(
            sqs.query.build_query(), "('daler-rowney' pearlescent 'bell bronze')"
        )
        self.assertEqual(len(sqs), 0)

        sqs = self.sqs.models(MockModel)
        self.assertEqual(sqs.query.build_query(), "*")
        self.assertEqual(len(sqs), 3)

    def test_all_regression(self):
        sqs = SearchQuerySet("whoosh")
        self.assertEqual([result.pk for result in sqs], [])

        self.sb.update(self.wmmi, self.sample_objs)
        self.assertTrue(self.sb.index.doc_count() > 0)

        sqs = SearchQuerySet("whoosh")
        self.assertEqual(len(sqs), 3)
        self.assertEqual(sorted([result.pk for result in sqs]), ["1", "2", "3"])

        try:
            sqs = repr(SearchQuerySet("whoosh"))
        except:
            self.fail()

    def test_regression_space_query(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertTrue(self.sb.index.doc_count() > 0)

        sqs = SearchQuerySet("whoosh").auto_query(" ")
        self.assertEqual(len(sqs), 3)
        sqs = SearchQuerySet("whoosh").filter(content=" ")
        self.assertEqual(len(sqs), 0)

    def test_iter(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        sqs = self.sqs.auto_query("Indexed!")
        results = [int(result.pk) for result in iter(sqs)]
        self.assertEqual(sorted(results), [1, 2, 3])
        self.assertEqual(len(connections["whoosh"].queries), 1)

    def test_slice(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        results = self.sqs.auto_query("Indexed!")
        self.assertEqual(sorted([int(result.pk) for result in results[1:3]]), [1, 2])
        self.assertEqual(len(connections["whoosh"].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        results = self.sqs.auto_query("Indexed!")
        self.assertEqual(int(results[0].pk), 1)
        self.assertEqual(len(connections["whoosh"].queries), 1)

    def test_values_slicing(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)

        # TODO: this would be a good candidate for refactoring into a TestCase subclass shared across backends

        # The values will come back as strings because Hasytack doesn't assume PKs are integers.
        # We'll prepare this set once since we're going to query the same results in multiple ways:
        expected_pks = ["3", "2", "1"]

        results = self.sqs.all().order_by("pub_date").values("pk")
        self.assertListEqual([i["pk"] for i in results[1:11]], expected_pks)

        results = self.sqs.all().order_by("pub_date").values_list("pk")
        self.assertListEqual([i[0] for i in results[1:11]], expected_pks)

        results = self.sqs.all().order_by("pub_date").values_list("pk", flat=True)
        self.assertListEqual(results[1:11], expected_pks)

        self.assertEqual(len(connections["whoosh"].queries), 3)

    def test_manual_iter(self):
        self.sb.update(self.wmmi, self.sample_objs)
        results = self.sqs.auto_query("Indexed!")

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(sorted(results), [1, 2, 3])
        self.assertEqual(len(connections["whoosh"].queries), 1)

    def test_fill_cache(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        results = self.sqs.auto_query("Indexed!")
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections["whoosh"].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([result for result in results._result_cache if result is not None]), 3
        )
        self.assertEqual(len(connections["whoosh"].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([result for result in results._result_cache if result is not None]), 3
        )
        self.assertEqual(len(connections["whoosh"].queries), 2)

    def test_cache_is_full(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.auto_query("Indexed!")
        result_list = [i for i in iter(results)]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections["whoosh"].queries), 1)

    def test_count(self):
        more_samples = []

        for i in range(1, 50):
            mock = MockModel()
            mock.id = i
            mock.author = "daniel%s" % i
            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            more_samples.append(mock)

        self.sb.update(self.wmmi, more_samples)

        reset_search_queries()
        self.assertEqual(len(connections["whoosh"].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results), 49)
        self.assertEqual(results._cache_is_full(), False)
        self.assertEqual(len(connections["whoosh"].queries), 1)

    def test_query_generation(self):
        sqs = self.sqs.filter(
            SQ(content=AutoQuery("hello world")) | SQ(title=AutoQuery("hello world"))
        )
        self.assertEqual(
            sqs.query.build_query(), "((hello world) OR title:(hello world))"
        )

    def test_result_class(self):
        self.sb.update(self.wmmi, self.sample_objs)

        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))
Example #46
0
class BaseSearchQueryTestCase(TestCase):
    fixtures = ['base_data.json', 'bulk_data.json']

    def setUp(self):
        super(BaseSearchQueryTestCase, self).setUp()
        self.bsq = BaseSearchQuery()

    def test_get_count(self):
        self.bsq.add_filter(SQ(foo='bar'))
        self.assertRaises(NotImplementedError, self.bsq.get_count)

    def test_build_query(self):
        self.bsq.add_filter(SQ(foo='bar'))
        self.assertRaises(NotImplementedError, self.bsq.build_query)

    def test_add_filter(self):
        self.assertEqual(len(self.bsq.query_filter), 0)

        self.bsq.add_filter(SQ(foo='bar'))
        self.assertEqual(len(self.bsq.query_filter), 1)

        self.bsq.add_filter(SQ(foo__lt='10'))

        self.bsq.add_filter(~SQ(claris='moof'))

        self.bsq.add_filter(SQ(claris='moof'), use_or=True)

        self.assertEqual(
            repr(self.bsq.query_filter),
            '<SQ: OR ((foo__content=bar AND foo__lt=10 AND NOT (claris__content=moof)) OR claris__content=moof)>'
        )

        self.bsq.add_filter(SQ(claris='moof'))

        self.assertEqual(
            repr(self.bsq.query_filter),
            '<SQ: AND (((foo__content=bar AND foo__lt=10 AND NOT (claris__content=moof)) OR claris__content=moof) AND claris__content=moof)>'
        )

        self.bsq.add_filter(SQ(claris='wtf mate'))

        self.assertEqual(
            repr(self.bsq.query_filter),
            '<SQ: AND (((foo__content=bar AND foo__lt=10 AND NOT (claris__content=moof)) OR claris__content=moof) AND claris__content=moof AND claris__content=wtf mate)>'
        )

    def test_add_order_by(self):
        self.assertEqual(len(self.bsq.order_by), 0)

        self.bsq.add_order_by('foo')
        self.assertEqual(len(self.bsq.order_by), 1)

    def test_clear_order_by(self):
        self.bsq.add_order_by('foo')
        self.assertEqual(len(self.bsq.order_by), 1)

        self.bsq.clear_order_by()
        self.assertEqual(len(self.bsq.order_by), 0)

    def test_add_model(self):
        self.assertEqual(len(self.bsq.models), 0)
        self.assertRaises(AttributeError, self.bsq.add_model, object)
        self.assertEqual(len(self.bsq.models), 0)

        self.bsq.add_model(MockModel)
        self.assertEqual(len(self.bsq.models), 1)

        self.bsq.add_model(AnotherMockModel)
        self.assertEqual(len(self.bsq.models), 2)

    def test_set_limits(self):
        self.assertEqual(self.bsq.start_offset, 0)
        self.assertEqual(self.bsq.end_offset, None)

        self.bsq.set_limits(10, 50)
        self.assertEqual(self.bsq.start_offset, 10)
        self.assertEqual(self.bsq.end_offset, 50)

    def test_clear_limits(self):
        self.bsq.set_limits(10, 50)
        self.assertEqual(self.bsq.start_offset, 10)
        self.assertEqual(self.bsq.end_offset, 50)

        self.bsq.clear_limits()
        self.assertEqual(self.bsq.start_offset, 0)
        self.assertEqual(self.bsq.end_offset, None)

    def test_add_boost(self):
        self.assertEqual(self.bsq.boost, {})

        self.bsq.add_boost('foo', 10)
        self.assertEqual(self.bsq.boost, {'foo': 10})

    def test_add_highlight(self):
        self.assertEqual(self.bsq.highlight, False)

        self.bsq.add_highlight()
        self.assertEqual(self.bsq.highlight, True)

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1
        msq = MockSearchQuery()
        msq.backend = MockSearchBackend('mlt')
        ui = connections['default'].get_unified_index()
        bmmsi = BasicMockModelSearchIndex()
        ui.build(indexes=[bmmsi])
        bmmsi.update()
        msq.more_like_this(mock)

        self.assertEqual(msq.get_count(), 23)
        self.assertEqual(int(msq.get_results()[0].pk),
                         MOCK_SEARCH_RESULTS[0].pk)

    def test_add_field_facet(self):
        self.bsq.add_field_facet('foo')
        self.assertEqual(self.bsq.facets, {'foo': {}})

        self.bsq.add_field_facet('bar')
        self.assertEqual(self.bsq.facets, {'foo': {}, 'bar': {}})

    def test_add_date_facet(self):
        self.bsq.add_date_facet('foo',
                                start_date=datetime.date(2009, 2, 25),
                                end_date=datetime.date(2009, 3, 25),
                                gap_by='day')
        self.assertEqual(
            self.bsq.date_facets, {
                'foo': {
                    'gap_by': 'day',
                    'start_date': datetime.date(2009, 2, 25),
                    'end_date': datetime.date(2009, 3, 25),
                    'gap_amount': 1
                }
            })

        self.bsq.add_date_facet('bar',
                                start_date=datetime.date(2008, 1, 1),
                                end_date=datetime.date(2009, 12, 1),
                                gap_by='month')
        self.assertEqual(
            self.bsq.date_facets, {
                'foo': {
                    'gap_by': 'day',
                    'start_date': datetime.date(2009, 2, 25),
                    'end_date': datetime.date(2009, 3, 25),
                    'gap_amount': 1
                },
                'bar': {
                    'gap_by': 'month',
                    'start_date': datetime.date(2008, 1, 1),
                    'end_date': datetime.date(2009, 12, 1),
                    'gap_amount': 1
                }
            })

    def test_add_query_facet(self):
        self.bsq.add_query_facet('foo', 'bar')
        self.assertEqual(self.bsq.query_facets, [('foo', 'bar')])

        self.bsq.add_query_facet('moof', 'baz')
        self.assertEqual(self.bsq.query_facets, [('foo', 'bar'),
                                                 ('moof', 'baz')])

        self.bsq.add_query_facet('foo', 'baz')
        self.assertEqual(self.bsq.query_facets,
                         [('foo', 'bar'), ('moof', 'baz'), ('foo', 'baz')])

    def test_add_stats(self):
        self.bsq.add_stats_query('foo', ['bar'])
        self.assertEqual(self.bsq.stats, {'foo': ['bar']})

        self.bsq.add_stats_query('moof', ['bar', 'baz'])
        self.assertEqual(self.bsq.stats, {
            'foo': ['bar'],
            'moof': ['bar', 'baz']
        })

    def test_add_narrow_query(self):
        self.bsq.add_narrow_query('foo:bar')
        self.assertEqual(self.bsq.narrow_queries, set(['foo:bar']))

        self.bsq.add_narrow_query('moof:baz')
        self.assertEqual(self.bsq.narrow_queries, set(['foo:bar', 'moof:baz']))

    def test_set_result_class(self):
        # Assert that we're defaulting to ``SearchResult``.
        self.assertTrue(issubclass(self.bsq.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        self.bsq.set_result_class(IttyBittyResult)
        self.assertTrue(issubclass(self.bsq.result_class, IttyBittyResult))

        # Reset to default.
        self.bsq.set_result_class(None)
        self.assertTrue(issubclass(self.bsq.result_class, SearchResult))

    def test_run(self):
        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.bammsi = BasicAnotherMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.bammsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        msq = connections['default'].get_query()
        self.assertEqual(len(msq.get_results()), 23)
        self.assertEqual(int(msq.get_results()[0].pk),
                         MOCK_SEARCH_RESULTS[0].pk)

        # Restore.
        connections['default']._index = self.old_unified_index

    def test_clone(self):
        self.bsq.add_filter(SQ(foo='bar'))
        self.bsq.add_filter(SQ(foo__lt='10'))
        self.bsq.add_filter(~SQ(claris='moof'))
        self.bsq.add_filter(SQ(claris='moof'), use_or=True)
        self.bsq.add_order_by('foo')
        self.bsq.add_model(MockModel)
        self.bsq.add_boost('foo', 2)
        self.bsq.add_highlight()
        self.bsq.add_field_facet('foo')
        self.bsq.add_date_facet('foo',
                                start_date=datetime.date(2009, 1, 1),
                                end_date=datetime.date(2009, 1, 31),
                                gap_by='day')
        self.bsq.add_query_facet('foo', 'bar')
        self.bsq.add_stats_query('foo', 'bar')
        self.bsq.add_narrow_query('foo:bar')

        clone = self.bsq._clone()
        self.assertTrue(isinstance(clone, BaseSearchQuery))
        self.assertEqual(len(clone.query_filter), 2)
        self.assertEqual(len(clone.order_by), 1)
        self.assertEqual(len(clone.models), 1)
        self.assertEqual(len(clone.boost), 1)
        self.assertEqual(clone.highlight, True)
        self.assertEqual(len(clone.facets), 1)
        self.assertEqual(len(clone.date_facets), 1)
        self.assertEqual(len(clone.query_facets), 1)
        self.assertEqual(len(clone.narrow_queries), 1)
        self.assertEqual(clone.start_offset, self.bsq.start_offset)
        self.assertEqual(clone.end_offset, self.bsq.end_offset)
        self.assertEqual(clone.backend.__class__, self.bsq.backend.__class__)

    def test_log_query(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        self.bmmsi.update()

        with self.settings(DEBUG=False):
            msq = connections['default'].get_query()
            self.assertEqual(len(msq.get_results()), 23)
            self.assertEqual(len(connections['default'].queries), 0)

        with self.settings(DEBUG=True):
            # Redefine it to clear out the cached results.
            msq2 = connections['default'].get_query()
            self.assertEqual(len(msq2.get_results()), 23)
            self.assertEqual(len(connections['default'].queries), 1)
            self.assertEqual(connections['default'].queries[0]['query_string'],
                             '')

            msq3 = connections['default'].get_query()
            msq3.add_filter(SQ(foo='bar'))
            len(msq3.get_results())
            self.assertEqual(len(connections['default'].queries), 2)
            self.assertEqual(connections['default'].queries[0]['query_string'],
                             '')
            self.assertEqual(connections['default'].queries[1]['query_string'],
                             '')

        # Restore.
        connections['default']._index = self.old_unified_index
Example #47
0
class WhooshSearchBackendTestCase(WhooshTestCase):
    fixtures = ["bulk_data.json"]

    def setUp(self):
        super().setUp()

        self.old_ui = connections["whoosh"].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.wmmidni = WhooshMockSearchIndexWithSkipDocument()
        self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections["whoosh"].get_backend()
        connections["whoosh"]._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
        self.sb.delete_index()

        self.sample_objs = MockModel.objects.all()

    def tearDown(self):
        connections["whoosh"]._index = self.old_ui
        super().tearDown()

    def whoosh_search(self, query):
        self.raw_whoosh = self.raw_whoosh.refresh()
        searcher = self.raw_whoosh.searcher()
        return searcher.search(self.parser.parse(query), limit=1000)

    def test_non_silent(self):
        bad_sb = connections["whoosh"].backend(
            "bad", PATH="/tmp/bad_whoosh", SILENTLY_FAIL=False
        )
        bad_sb.use_file_storage = False
        bad_sb.storage = "omg.wtf.bbq"

        try:
            bad_sb.update(self.wmmi, self.sample_objs)
            self.fail()
        except:
            pass

        try:
            bad_sb.remove("core.mockmodel.1")
            self.fail()
        except:
            pass

        try:
            bad_sb.clear()
            self.fail()
        except:
            pass

        try:
            bad_sb.search("foo")
            self.fail()
        except:
            pass

    def test_update(self):
        self.sb.update(self.wmmi, self.sample_objs)

        # Check what Whoosh thinks is there.
        self.assertEqual(len(self.whoosh_search("*")), 23)
        self.assertEqual(
            [doc.fields()["id"] for doc in self.whoosh_search("*")],
            ["core.mockmodel.%s" % i for i in range(1, 24)],
        )

    def test_update_with_SkipDocument_raised(self):
        self.sb.update(self.wmmidni, self.sample_objs)

        # Check what Whoosh thinks is there.
        res = self.whoosh_search("*")
        self.assertEqual(len(res), 14)
        ids = [1, 2, 5, 6, 7, 8, 9, 11, 12, 14, 15, 18, 20, 21]
        self.assertListEqual(
            [doc.fields()["id"] for doc in res], ["core.mockmodel.%s" % i for i in ids]
        )

    def test_remove(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.remove(self.sample_objs[0])
        self.assertEqual(self.sb.index.doc_count(), 22)

    def test_clear(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear()
        self.assertEqual(self.sb.index.doc_count(), 0)

        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear([AnotherMockModel])
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear([MockModel])
        self.assertEqual(self.sb.index.doc_count(), 0)

        self.sb.index.refresh()
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(self.sb.index.doc_count(), 23)

        self.sb.clear([AnotherMockModel, MockModel])
        self.assertEqual(self.raw_whoosh.doc_count(), 0)

    def test_search(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(len(self.whoosh_search("*")), 23)

        # No query string should always yield zero results.
        self.assertEqual(self.sb.search(""), {"hits": 0, "results": []})

        # A one letter query string gets nabbed by a stopwords filter. Should
        # always yield zero results.
        self.assertEqual(self.sb.search("a"), {"hits": 0, "results": []})

        # Possible AttributeError?
        # self.assertEqual(self.sb.search(u'a b'), {'hits': 0, 'results': [], 'spelling_suggestion': '', 'facets': {}})

        self.assertEqual(self.sb.search("*")["hits"], 23)
        self.assertEqual(
            [result.pk for result in self.sb.search("*")["results"]],
            ["%s" % i for i in range(1, 24)],
        )

        self.assertEqual(self.sb.search("Indexe")["hits"], 23)
        self.assertEqual(self.sb.search("Indexe")["spelling_suggestion"], "indexed")

        self.assertEqual(
            self.sb.search("", facets=["name"]), {"hits": 0, "results": []}
        )
        results = self.sb.search("Index*", facets=["name"])
        results = self.sb.search("index*", facets=["name"])
        self.assertEqual(results["hits"], 23)
        self.assertEqual(results["facets"], {})

        self.assertEqual(
            self.sb.search(
                "",
                date_facets={
                    "pub_date": {
                        "start_date": date(2008, 2, 26),
                        "end_date": date(2008, 2, 26),
                        "gap": "/MONTH",
                    }
                },
            ),
            {"hits": 0, "results": []},
        )
        results = self.sb.search(
            "Index*",
            date_facets={
                "pub_date": {
                    "start_date": date(2008, 2, 26),
                    "end_date": date(2008, 2, 26),
                    "gap": "/MONTH",
                }
            },
        )
        results = self.sb.search(
            "index*",
            date_facets={
                "pub_date": {
                    "start_date": date(2008, 2, 26),
                    "end_date": date(2008, 2, 26),
                    "gap": "/MONTH",
                }
            },
        )
        self.assertEqual(results["hits"], 23)
        self.assertEqual(results["facets"], {})

        self.assertEqual(
            self.sb.search("", query_facets={"name": "[* TO e]"}),
            {"hits": 0, "results": []},
        )
        results = self.sb.search("Index*", query_facets={"name": "[* TO e]"})
        results = self.sb.search("index*", query_facets={"name": "[* TO e]"})
        self.assertEqual(results["hits"], 23)
        self.assertEqual(results["facets"], {})

        # self.assertEqual(self.sb.search('', narrow_queries=set(['name:daniel1'])), {'hits': 0, 'results': []})
        # results = self.sb.search('Index*', narrow_queries=set(['name:daniel1']))
        # self.assertEqual(results['hits'], 1)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.sb.search("Index*", result_class=MockSearchResult)["results"][0],
                MockSearchResult,
            )
        )

        # Check the use of ``limit_to_registered_models``.
        self.assertEqual(
            self.sb.search("", limit_to_registered_models=False),
            {"hits": 0, "results": []},
        )
        self.assertEqual(
            self.sb.search("*", limit_to_registered_models=False)["hits"], 23
        )
        self.assertEqual(
            [
                result.pk
                for result in self.sb.search("*", limit_to_registered_models=False)[
                    "results"
                ]
            ],
            ["%s" % i for i in range(1, 24)],
        )

        # Stow.
        old_limit_to_registered_models = getattr(
            settings, "HAYSTACK_LIMIT_TO_REGISTERED_MODELS", True
        )
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = False

        self.assertEqual(self.sb.search(""), {"hits": 0, "results": []})
        self.assertEqual(self.sb.search("*")["hits"], 23)
        self.assertEqual(
            [result.pk for result in self.sb.search("*")["results"]],
            ["%s" % i for i in range(1, 24)],
        )

        # Restore.
        settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = old_limit_to_registered_models

    def test_highlight(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(len(self.whoosh_search("*")), 23)

        self.assertEqual(self.sb.search("", highlight=True), {"hits": 0, "results": []})
        self.assertEqual(self.sb.search("index*", highlight=True)["hits"], 23)

        query = self.sb.search("Index*", highlight=True)["results"]
        result = [result.highlighted["text"][0] for result in query]

        self.assertEqual(result, ["<em>Indexed</em>!\n%d" % i for i in range(1, 24)])

    def test_search_all_models(self):
        wamsi = WhooshAnotherMockSearchIndex()
        self.ui.build(indexes=[self.wmmi, wamsi])

        self.sb.update(self.wmmi, self.sample_objs)
        self.sb.update(wamsi, AnotherMockModel.objects.all())

        self.assertEqual(len(self.whoosh_search("*")), 25)

        self.ui.build(indexes=[self.wmmi])

    def test_more_like_this(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertEqual(len(self.whoosh_search("*")), 23)

        # Now supported by Whoosh (as of 1.8.4). See the ``LiveWhooshMoreLikeThisTestCase``.
        self.assertEqual(self.sb.more_like_this(self.sample_objs[0])["hits"], 22)

        # Make sure that swapping the ``result_class`` doesn't blow up.
        try:
            self.sb.more_like_this(self.sample_objs[0], result_class=MockSearchResult)
        except:
            self.fail()

    def test_delete_index(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertTrue(self.sb.index.doc_count() > 0)

        self.sb.delete_index()
        self.assertEqual(self.sb.index.doc_count(), 0)

    def test_order_by(self):
        self.sb.update(self.wmmi, self.sample_objs)

        results = self.sb.search("*", sort_by=["pub_date"])
        self.assertEqual(
            [result.pk for result in results["results"]],
            [
                "1",
                "3",
                "2",
                "4",
                "5",
                "6",
                "7",
                "8",
                "9",
                "10",
                "11",
                "12",
                "13",
                "14",
                "15",
                "16",
                "17",
                "18",
                "19",
                "20",
                "21",
                "22",
                "23",
            ],
        )

        results = self.sb.search("*", sort_by=["-pub_date"])
        self.assertEqual(
            [result.pk for result in results["results"]],
            [
                "23",
                "22",
                "21",
                "20",
                "19",
                "18",
                "17",
                "16",
                "15",
                "14",
                "13",
                "12",
                "11",
                "10",
                "9",
                "8",
                "7",
                "6",
                "5",
                "4",
                "2",
                "3",
                "1",
            ],
        )

        results = self.sb.search("*", sort_by=["id"])
        self.assertEqual(
            [result.pk for result in results["results"]],
            [
                "1",
                "10",
                "11",
                "12",
                "13",
                "14",
                "15",
                "16",
                "17",
                "18",
                "19",
                "2",
                "20",
                "21",
                "22",
                "23",
                "3",
                "4",
                "5",
                "6",
                "7",
                "8",
                "9",
            ],
        )

        results = self.sb.search("*", sort_by=["-id"])
        self.assertEqual(
            [result.pk for result in results["results"]],
            [
                "9",
                "8",
                "7",
                "6",
                "5",
                "4",
                "3",
                "23",
                "22",
                "21",
                "20",
                "2",
                "19",
                "18",
                "17",
                "16",
                "15",
                "14",
                "13",
                "12",
                "11",
                "10",
                "1",
            ],
        )

        results = self.sb.search("*", sort_by=["-pub_date", "-id"])
        self.assertEqual(
            [result.pk for result in results["results"]],
            [
                "23",
                "22",
                "21",
                "20",
                "19",
                "18",
                "17",
                "16",
                "15",
                "14",
                "13",
                "12",
                "11",
                "10",
                "9",
                "8",
                "7",
                "6",
                "5",
                "4",
                "2",
                "3",
                "1",
            ],
        )

        self.assertRaises(
            SearchBackendError, self.sb.search, "*", sort_by=["-pub_date", "id"]
        )

    def test__from_python(self):
        self.assertEqual(self.sb._from_python("abc"), "abc")
        self.assertEqual(self.sb._from_python(1), 1)
        self.assertEqual(self.sb._from_python(2653), 2653)
        self.assertEqual(self.sb._from_python(25.5), 25.5)
        self.assertEqual(self.sb._from_python([1, 2, 3]), "1,2,3")
        self.assertTrue("a': 1" in self.sb._from_python({"a": 1, "c": 3, "b": 2}))
        self.assertEqual(
            self.sb._from_python(datetime(2009, 5, 9, 16, 14)),
            datetime(2009, 5, 9, 16, 14),
        )
        self.assertEqual(
            self.sb._from_python(datetime(2009, 5, 9, 0, 0)), datetime(2009, 5, 9, 0, 0)
        )
        self.assertEqual(
            self.sb._from_python(datetime(1899, 5, 18, 0, 0)),
            datetime(1899, 5, 18, 0, 0),
        )
        self.assertEqual(
            self.sb._from_python(datetime(2009, 5, 18, 1, 16, 30, 250)),
            datetime(2009, 5, 18, 1, 16, 30, 250),
        )

    def test__to_python(self):
        self.assertEqual(self.sb._to_python("abc"), "abc")
        self.assertEqual(self.sb._to_python("1"), 1)
        self.assertEqual(self.sb._to_python("2653"), 2653)
        self.assertEqual(self.sb._to_python("25.5"), 25.5)
        self.assertEqual(self.sb._to_python("[1, 2, 3]"), [1, 2, 3])
        self.assertEqual(
            self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {"a": 1, "c": 3, "b": 2}
        )
        self.assertEqual(
            self.sb._to_python("2009-05-09T16:14:00"), datetime(2009, 5, 9, 16, 14)
        )
        self.assertEqual(
            self.sb._to_python("2009-05-09T00:00:00"), datetime(2009, 5, 9, 0, 0)
        )
        self.assertEqual(self.sb._to_python(None), None)

    def test_range_queries(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.assertEqual(len(self.whoosh_search("[d TO]")), 23)
        self.assertEqual(len(self.whoosh_search("name:[d TO]")), 23)
        self.assertEqual(len(self.whoosh_search("Ind* AND name:[d to]")), 23)
        self.assertEqual(len(self.whoosh_search("Ind* AND name:[to c]")), 0)

    def test_date_queries(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.assertEqual(len(self.whoosh_search("pub_date:20090717003000")), 1)
        self.assertEqual(len(self.whoosh_search("pub_date:20090717000000")), 0)
        self.assertEqual(
            len(self.whoosh_search("Ind* AND pub_date:[to 20090717003000]")), 3
        )

    def test_escaped_characters_queries(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.assertEqual(len(self.whoosh_search("Indexed\!")), 23)
        self.assertEqual(len(self.whoosh_search("http\:\/\/www\.example\.com")), 0)

    def test_build_schema(self):
        ui = UnifiedIndex()
        ui.build(indexes=[AllTypesWhooshMockSearchIndex()])

        (content_field_name, schema) = self.sb.build_schema(ui.all_searchfields())
        self.assertEqual(content_field_name, "text")

        schema_names = set(schema.names())
        required_schema = {
            "django_ct",
            "django_id",
            "id",
            "is_active",
            "name",
            "pub_date",
            "seen_count",
            "sites",
            "text",
        }
        self.assertTrue(required_schema.issubset(schema_names))

        self.assertIsInstance(schema._fields["text"], TEXT)
        self.assertIsInstance(schema._fields["pub_date"], DATETIME)
        self.assertIsInstance(schema._fields["seen_count"], NUMERIC)
        self.assertIsInstance(schema._fields["sites"], KEYWORD)
        self.assertIsInstance(schema._fields["is_active"], BOOLEAN)

    def test_verify_type(self):
        old_ui = connections["whoosh"].get_unified_index()
        ui = UnifiedIndex()
        wmtmmi = WhooshMaintainTypeMockSearchIndex()
        ui.build(indexes=[wmtmmi])
        connections["whoosh"]._index = ui
        sb = connections["whoosh"].get_backend()
        sb.setup()
        sb.update(wmtmmi, self.sample_objs)

        self.assertEqual(sb.search("*")["hits"], 23)
        self.assertEqual(
            [result.month for result in sb.search("*")["results"]],
            [
                "06",
                "07",
                "06",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
                "07",
            ],
        )
        connections["whoosh"]._index = old_ui

    @unittest.skipIf(
        settings.HAYSTACK_CONNECTIONS["whoosh"].get("STORAGE") != "file",
        "testing writability requires Whoosh to use STORAGE=file",
    )
    def test_writable(self):
        if not os.path.exists(settings.HAYSTACK_CONNECTIONS["whoosh"]["PATH"]):
            os.makedirs(settings.HAYSTACK_CONNECTIONS["whoosh"]["PATH"])

        os.chmod(settings.HAYSTACK_CONNECTIONS["whoosh"]["PATH"], 0o400)

        try:
            self.sb.setup()
            self.fail()
        except IOError:
            # Yay. We failed
            pass

        os.chmod(settings.HAYSTACK_CONNECTIONS["whoosh"]["PATH"], 0o755)

    def test_slicing(self):
        self.sb.update(self.wmmi, self.sample_objs)

        page_1 = self.sb.search("*", start_offset=0, end_offset=20)
        page_2 = self.sb.search("*", start_offset=20, end_offset=30)
        self.assertEqual(len(page_1["results"]), 20)
        self.assertEqual(
            [result.pk for result in page_1["results"]],
            ["%s" % i for i in range(1, 21)],
        )
        self.assertEqual(len(page_2["results"]), 3)
        self.assertEqual(
            [result.pk for result in page_2["results"]], ["21", "22", "23"]
        )

        # This used to throw an error.
        page_0 = self.sb.search("*", start_offset=0, end_offset=0)
        self.assertEqual(len(page_0["results"]), 1)

    @unittest.expectedFailure
    def test_scoring(self):
        self.sb.update(self.wmmi, self.sample_objs)

        page_1 = self.sb.search("index", start_offset=0, end_offset=20)
        page_2 = self.sb.search("index", start_offset=20, end_offset=30)
        self.assertEqual(len(page_1["results"]), 20)
        self.assertEqual(
            ["%0.2f" % result.score for result in page_1["results"]],
            [
                "0.51",
                "0.51",
                "0.51",
                "0.51",
                "0.51",
                "0.51",
                "0.51",
                "0.51",
                "0.51",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
                "0.40",
            ],
        )
        self.assertEqual(len(page_2["results"]), 3)
        self.assertEqual(
            ["%0.2f" % result.score for result in page_2["results"]],
            ["0.40", "0.40", "0.40"],
        )
Example #48
0
class SearchQuerySetTestCase(TestCase):
    fixtures = ['base_data.json', 'bulk_data.json']

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_unified_index
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertRegexpMatches(
            repr(self.msqs),
            r'^<SearchQuerySet: query=<test_haystack.mocks.MockSearchQuery object'
            r' at 0x[0-9A-Fa-f]+>, using=None>$')

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in iter(msqs)]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual([int(res.pk) for res in results[1:11]],
                         [res.pk for res in MOCK_SEARCH_RESULTS[1:11]])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(check, [
            u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11',
            u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20',
            u'21', u'22', u'23'
        ])

        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections['default']._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, [u'sometext', u'1234'])
        self.assertEqual(len(connections['default'].queries), 1)

        connections['default']._index = old_ui

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = list(results)
        self.assertEqual(23, len(fire_the_iterator_and_fill_cache))
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 4)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue('foo' in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections['default']._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections['default']._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost('foo', 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling_override(self):
        sqs = self.msqs.filter(content='not the spellchecking query')
        self.assertEqual(sqs.query.spelling_query, None)
        sqs = self.msqs.set_spelling_query('override')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.spelling_query, 'override')

    def test_spelling_suggestions(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content='Indx')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search('foo')), 23)
        self.assertEqual(
            len(
                self.msqs.raw_search(
                    '(content__exact:hello AND content__exact:world)')), 23)

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections['default']._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # Restore.
        connections['default']._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query('test search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__content=test search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__content=test "my thing" search -stuff>')

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__content=test "my thing" search \'moar quotes\' -stuff>'
        )

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__content=test "my thing" search \'moar quotes\' "foo -stuff>'
        )

        sqs = self.msqs.auto_query('test - stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND content__content=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__content="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query('test search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND title__content=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff',
                                   fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND title__content=test "my thing" search -stuff>')

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest('pub_date'), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet('foo').facet('bar')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet('foo',
                                       start_date=datetime.date(2008, 2, 25),
                                       end_date=datetime.date(2009, 2, 25),
                                       gap_by='smarblaph')
            self.fail()
        except FacetingError as e:
            self.assertEqual(
                str(e),
                "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second."
            )

        sqs = self.msqs.date_facet('foo',
                                   start_date=datetime.date(2008, 2, 25),
                                   end_date=datetime.date(2009, 2, 25),
                                   gap_by='month')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet('foo',
                                    start_date=datetime.date(2008, 2, 25),
                                    end_date=datetime.date(2009, 2, 25),
                                    gap_by='month').date_facet(
                                        'bar',
                                        start_date=datetime.date(2007, 2, 25),
                                        end_date=datetime.date(2009, 2, 25),
                                        gap_by='year')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)

    def test_query_facets(self):
        sqs = self.msqs.query_facet('foo', '[bar TO *]')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_facets), 1)

        sqs2 = self.msqs.query_facet('foo', '[bar TO *]').query_facet(
            'bar', '[100 TO 499]')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.query_facets), 2)

        # Test multiple query facets on a single field
        sqs3 = self.msqs.query_facet('foo', '[bar TO *]').query_facet(
            'bar', '[100 TO 499]').query_facet('foo', '[1000 TO 1499]')
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.query_facets), 3)

    def test_stats(self):
        sqs = self.msqs.stats_facet('foo', 'bar')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.stats), 1)

        sqs2 = self.msqs.stats_facet('foo', 'bar').stats_facet('foo', 'baz')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.stats), 1)

        sqs3 = self.msqs.stats_facet('foo', 'bar').stats_facet('moof', 'baz')
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.stats), 2)

    def test_narrow(self):
        sqs = self.msqs.narrow('foo:moof')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.narrow_queries), 1)

    def test_clone(self):
        results = self.msqs.filter(foo='bar', foo__lt='10')

        clone = results._clone()
        self.assertTrue(isinstance(clone, SearchQuerySet))
        self.assertEqual(str(clone.query), str(results.query))
        self.assertEqual(clone._result_cache, [])
        self.assertEqual(clone._result_count, None)
        self.assertEqual(clone._cache_full, False)
        self.assertEqual(clone._using, results._using)

    def test_using(self):
        sqs = SearchQuerySet(using='default')
        self.assertNotEqual(sqs.query, None)
        self.assertEqual(sqs.query._using, 'default')

    def test_chaining(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

        # A second instance should inherit none of the changes from above.
        sqs = self.msqs.filter(content='bar')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_none(self):
        sqs = self.msqs.none()
        self.assertTrue(isinstance(sqs, EmptySearchQuerySet))
        self.assertEqual(len(sqs), 0)

    def test___and__(self):
        sqs1 = self.msqs.filter(content='foo')
        sqs2 = self.msqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test___or__(self):
        sqs1 = self.msqs.filter(content='foo')
        sqs2 = self.msqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test_and_or(self):
        """
        Combining AND queries with OR should give
            AND(OR(a, b), OR(c, d))
        """
        sqs1 = self.msqs.filter(content='foo').filter(content='oof')
        sqs2 = self.msqs.filter(content='bar').filter(content='rab')
        sqs = sqs1 | sqs2

        self.assertEqual(sqs.query.query_filter.connector, 'OR')
        self.assertEqual(repr(sqs.query.query_filter.children[0]),
                         repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]),
                         repr(sqs2.query.query_filter))

    def test_or_and(self):
        """
        Combining OR queries with AND should give
            OR(AND(a, b), AND(c, d))
        """
        sqs1 = self.msqs.filter(content='foo').filter_or(content='oof')
        sqs2 = self.msqs.filter(content='bar').filter_or(content='rab')
        sqs = sqs1 & sqs2

        self.assertEqual(sqs.query.query_filter.connector, 'AND')
        self.assertEqual(repr(sqs.query.query_filter.children[0]),
                         repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]),
                         repr(sqs2.query.query_filter))
Example #49
0
class ManagementCommandTestCase(TestCase):
    fixtures = ['base_data.json', 'bulk_data.json']

    def setUp(self):
        super(ManagementCommandTestCase, self).setUp()
        self.solr = pysolr.Solr(settings.HAYSTACK_CONNECTIONS['solr']['URL'])

        # Stow.
        self.old_ui = connections['solr'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['solr']._index = self.ui

    def tearDown(self):
        connections['solr']._index = self.old_ui
        super(ManagementCommandTestCase, self).tearDown()

    def test_basic_commands(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=0, commit=False)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('rebuild_index', interactive=False, verbosity=0, commit=False)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('rebuild_index', interactive=False, verbosity=0, commit=True)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        call_command('clear_index', interactive=False, verbosity=0, commit=False)
        self.assertEqual(self.solr.search('*:*').hits, 23)

    def test_remove(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # Remove several instances, two of which will fit in the same block:
        MockModel.objects.get(pk=1).delete()
        MockModel.objects.get(pk=2).delete()
        MockModel.objects.get(pk=8).delete()
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # Plain ``update_index`` doesn't fix it.
        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # Remove without commit also doesn't affect queries:
        call_command('update_index', remove=True, verbosity=0, batchsize=2, commit=False)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        # … but remove with commit does:
        call_command('update_index', remove=True, verbosity=0, batchsize=2)
        self.assertEqual(self.solr.search('*:*').hits, 20)

    def test_age(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        start = datetime.datetime.now() - datetime.timedelta(hours=3)
        end = datetime.datetime.now()

        mock = MockModel.objects.get(pk=1)
        mock.pub_date = datetime.datetime.now() - datetime.timedelta(hours=2)
        mock.save()
        self.assertEqual(MockModel.objects.filter(pub_date__range=(start, end)).count(), 1)

        call_command('update_index', age=3, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 1)

    def test_age_with_time_zones(self):
        """Haystack should use django.utils.timezone.now"""
        from django.utils.timezone import now as django_now
        from haystack.management.commands.update_index import now as haystack_now

        self.assertIs(haystack_now, django_now,
                      msg="update_index should use django.utils.timezone.now")

        with patch("haystack.management.commands.update_index.now") as m:
            m.return_value = django_now()
            self.test_age()
            assert m.called

    def test_dates(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        start = datetime.datetime.now() - datetime.timedelta(hours=5, minutes=30)
        end = datetime.datetime.now() - datetime.timedelta(hours=2)

        mock_1 = MockModel.objects.get(pk=1)
        mock_1.pub_date = datetime.datetime.now() - datetime.timedelta(hours=5, minutes=1)
        mock_1.save()
        mock_2 = MockModel.objects.get(pk=2)
        mock_2.pub_date = datetime.datetime.now() - datetime.timedelta(hours=3)
        mock_2.save()
        mock_3 = MockModel.objects.get(pk=3)
        mock_3.pub_date = datetime.datetime.now() - datetime.timedelta(hours=1)
        mock_3.save()
        self.assertEqual(MockModel.objects.filter(pub_date__range=(start, end)).count(), 2)

        call_command('update_index', start_date=start.isoformat(), end_date=end.isoformat(), verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 2)

    def test_multiprocessing(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        # TODO: Watch the output, make sure there are multiple pids.
        call_command('update_index', verbosity=2, workers=2, batchsize=5)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=2, workers=2, batchsize=5, commit=False)
        self.assertEqual(self.solr.search('*:*').hits, 0)

    def test_build_schema_wrong_backend(self):

        settings.HAYSTACK_CONNECTIONS['whoosh'] = {'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
                                                   'PATH': mkdtemp(prefix='dummy-path-'), }

        connections['whoosh']._index = self.ui
        self.assertRaises(ImproperlyConfigured, call_command, 'build_solr_schema', using='whoosh', interactive=False)
Example #50
0
class AppModelManagementCommandTestCase(TestCase):
    fixtures = ['base_data', 'bulk_data.json']

    def setUp(self):
        super(AppModelManagementCommandTestCase, self).setUp()
        self.solr = pysolr.Solr(settings.HAYSTACK_CONNECTIONS['solr']['URL'])

        # Stow.
        self.old_ui = connections['solr'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = SolrMockSearchIndex()
        self.smtmi = SolrMockTagSearchIndex()
        self.ui.build(indexes=[self.smmi, self.smtmi])
        connections['solr']._index = self.ui

    def tearDown(self):
        connections['solr']._index = self.old_ui
        super(AppModelManagementCommandTestCase, self).tearDown()

    def test_app_model_variations(self):
        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 25)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', 'core', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 25)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        with self.assertRaises(ImproperlyConfigured):
            call_command('update_index', 'fake_app_thats_not_there', interactive=False)

        call_command('update_index', 'core', 'discovery', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 25)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', 'discovery', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', 'core.MockModel', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 23)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', 'core.MockTag', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 2)

        call_command('clear_index', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 0)

        call_command('update_index', 'core.MockTag', 'core.MockModel', interactive=False, verbosity=0)
        self.assertEqual(self.solr.search('*:*').hits, 25)
Example #51
0
class SearchModelAdminTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(SearchModelAdminTestCase, self).setUp()

        # With the models setup, you get the proper bits.
        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        smmsi = SolrMockModelSearchIndex()
        self.ui.build(indexes=[smmsi])
        connections['default']._index = self.ui

        # Wipe it clean.
        clear_solr_index()

        # Force indexing of the content.
        smmsi.update()

        superuser = User.objects.create_superuser(
            username='******',
            password='******',
            email='*****@*****.**',
        )

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        settings.DEBUG = self.old_debug
        super(SearchModelAdminTestCase, self).tearDown()

    def test_usage(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        self.assertEqual(
            self.client.login(username='******', password='******'), True)

        # First, non-search behavior.
        resp = self.client.get('/admin/core/mockmodel/')
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(resp.context['cl'].full_result_count, 23)

        # Then search behavior.
        resp = self.client.get('/admin/core/mockmodel/',
                               data={'q': 'Haystack'})
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections['default'].queries), 3)
        self.assertEqual(resp.context['cl'].full_result_count, 23)
        # Ensure they aren't search results.
        self.assertEqual(
            isinstance(resp.context['cl'].result_list[0], MockModel), True)

        result_pks = [i.pk for i in resp.context['cl'].result_list]
        self.assertIn(5, result_pks)

        # Make sure only changelist is affected.
        resp = self.client.get('/admin/core/mockmodel/1/')
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections['default'].queries), 3)
        self.assertEqual(resp.context['original'].id, 1)
Example #52
0
class LiveWhooshMoreLikeThisTestCase(WhooshTestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveWhooshMoreLikeThisTestCase, self).setUp()

        # Stow.
        self.old_ui = connections['whoosh'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.wamsi = WhooshAnotherMockSearchIndex()
        self.ui.build(indexes=[self.wmmi, self.wamsi])
        self.sb = connections['whoosh'].get_backend()
        connections['whoosh']._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)
        self.sb.delete_index()

        self.wmmi.update()
        self.wamsi.update()

        self.sqs = SearchQuerySet('whoosh')

    def tearDown(self):
        connections['whoosh']._index = self.old_ui
        super(LiveWhooshMoreLikeThisTestCase, self).tearDown()

    # We expect failure here because, despite not changing the code, Whoosh
    # 2.5.1 returns incorrect counts/results. Huzzah.
    @unittest.expectedFailure
    def test_more_like_this(self):
        mlt = self.sqs.more_like_this(MockModel.objects.get(pk=22))
        self.assertEqual(mlt.count(), 22)
        self.assertEqual(
            sorted([result.pk for result in mlt]),
            sorted([
                u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'21',
                u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12',
                u'11', u'10', u'23'
            ]))
        self.assertEqual(len([result.pk for result in mlt]), 22)

        alt_mlt = self.sqs.filter(name='daniel3').more_like_this(
            MockModel.objects.get(pk=13))
        self.assertEqual(alt_mlt.count(), 8)
        self.assertEqual(
            sorted([result.pk for result in alt_mlt]),
            sorted([u'4', u'3', u'22', u'19', u'17', u'16', u'10', u'23']))
        self.assertEqual(len([result.pk for result in alt_mlt]), 8)

        alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(
            MockModel.objects.get(pk=11))
        self.assertEqual(alt_mlt_with_models.count(), 22)
        self.assertEqual(
            sorted([result.pk for result in alt_mlt_with_models]),
            sorted([
                u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'22',
                u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13',
                u'12', u'10', u'23'
            ]))
        self.assertEqual(len([result.pk for result in alt_mlt_with_models]),
                         22)

        if hasattr(MockModel.objects, 'defer'):
            # Make sure MLT works with deferred bits.
            mi = MockModel.objects.defer('foo').get(pk=21)
            self.assertEqual(mi._deferred, True)
            deferred = self.sqs.models(MockModel).more_like_this(mi)
            self.assertEqual(deferred.count(), 0)
            self.assertEqual([result.pk for result in deferred], [])
            self.assertEqual(len([result.pk for result in deferred]), 0)

        # Ensure that swapping the ``result_class`` works.
        self.assertTrue(
            isinstance(
                self.sqs.result_class(MockSearchResult).more_like_this(
                    MockModel.objects.get(pk=21))[0], MockSearchResult))
Example #53
0
class SearchModelAdminTestCase(TestCase):
    fixtures = ["base_data.json", "bulk_data.json"]

    def setUp(self):
        super().setUp()

        # With the models setup, you get the proper bits.
        # Stow.
        self.old_ui = connections["solr"].get_unified_index()
        self.ui = UnifiedIndex()
        smmsi = SolrMockModelSearchIndex()
        self.ui.build(indexes=[smmsi])
        connections["solr"]._index = self.ui

        # Wipe it clean.
        clear_solr_index()

        # Force indexing of the content.
        smmsi.update(using="solr")

        superuser = User.objects.create_superuser(username="******",
                                                  password="******",
                                                  email="*****@*****.**")

    def tearDown(self):
        # Restore.
        connections["solr"]._index = self.old_ui
        super().tearDown()

    def test_usage(self):
        reset_search_queries()
        self.assertEqual(len(connections["solr"].queries), 0)

        self.assertEqual(
            self.client.login(username="******", password="******"), True)

        # First, non-search behavior.
        resp = self.client.get("/admin/core/mockmodel/")
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections["solr"].queries), 0)
        self.assertEqual(resp.context["cl"].full_result_count, 23)

        # Then search behavior.
        resp = self.client.get("/admin/core/mockmodel/",
                               data={"q": "Haystack"})
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(len(connections["solr"].queries), 3)
        self.assertEqual(resp.context["cl"].full_result_count, 23)
        # Ensure they aren't search results.
        self.assertEqual(
            isinstance(resp.context["cl"].result_list[0], MockModel), True)

        result_pks = [i.pk for i in resp.context["cl"].result_list]
        self.assertIn(5, result_pks)

        # Make sure only changelist is affected.
        resp = self.client.get(
            reverse("admin:core_mockmodel_change", args=(1, )))
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(resp.context["original"].id, 1)
        self.assertTemplateUsed(resp, "admin/change_form.html")

        # The Solr query count should be unchanged:
        self.assertEqual(len(connections["solr"].queries), 3)
Example #54
0
class LiveWhooshSearchQuerySetTestCase(WhooshTestCase):
    def setUp(self):
        super(LiveWhooshSearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_ui = connections['whoosh'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections['whoosh'].get_backend()
        connections['whoosh']._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)
        self.sb.delete_index()

        self.sample_objs = []

        for i in range(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            self.sample_objs.append(mock)

        self.sq = connections['whoosh'].get_query()
        self.sqs = SearchQuerySet('whoosh')

    def tearDown(self):
        connections['whoosh']._index = self.old_ui
        super(LiveWhooshSearchQuerySetTestCase, self).tearDown()

    def test_various_searchquerysets(self):
        self.sb.update(self.wmmi, self.sample_objs)

        sqs = self.sqs.filter(content='Index')
        self.assertEqual(sqs.query.build_query(), u'(Index)')
        self.assertEqual(len(sqs), 3)

        sqs = self.sqs.auto_query('Indexed!')
        self.assertEqual(sqs.query.build_query(), u"('Indexed!')")
        self.assertEqual(len(sqs), 3)

        sqs = self.sqs.auto_query('Indexed!').filter(
            pub_date__lte=date(2009, 8, 31))
        self.assertEqual(sqs.query.build_query(),
                         u"(('Indexed!') AND pub_date:([to 20090831000000]))")
        self.assertEqual(len(sqs), 3)

        sqs = self.sqs.auto_query('Indexed!').filter(
            pub_date__lte=date(2009, 2, 23))
        self.assertEqual(sqs.query.build_query(),
                         u"(('Indexed!') AND pub_date:([to 20090223000000]))")
        self.assertEqual(len(sqs), 2)

        sqs = self.sqs.auto_query('Indexed!').filter(
            pub_date__lte=date(2009, 2, 25)).filter(
                django_id__in=[1, 2]).exclude(name='daniel1')
        self.assertEqual(
            sqs.query.build_query(),
            u'((\'Indexed!\') AND pub_date:([to 20090225000000]) AND django_id:(1 OR 2) AND NOT (name:(daniel1)))'
        )
        self.assertEqual(len(sqs), 1)

        sqs = self.sqs.auto_query('re-inker')
        self.assertEqual(sqs.query.build_query(), u"('re-inker')")
        self.assertEqual(len(sqs), 0)

        sqs = self.sqs.auto_query('0.7 wire')
        self.assertEqual(sqs.query.build_query(), u"('0.7' wire)")
        self.assertEqual(len(sqs), 0)

        sqs = self.sqs.auto_query("daler-rowney pearlescent 'bell bronze'")
        self.assertEqual(sqs.query.build_query(),
                         u"('daler-rowney' pearlescent 'bell bronze')")
        self.assertEqual(len(sqs), 0)

        sqs = self.sqs.models(MockModel)
        self.assertEqual(sqs.query.build_query(), u'*')
        self.assertEqual(len(sqs), 3)

    def test_all_regression(self):
        sqs = SearchQuerySet('whoosh')
        self.assertEqual([result.pk for result in sqs], [])

        self.sb.update(self.wmmi, self.sample_objs)
        self.assertTrue(self.sb.index.doc_count() > 0)

        sqs = SearchQuerySet('whoosh')
        self.assertEqual(len(sqs), 3)
        self.assertEqual(sorted([result.pk for result in sqs]),
                         [u'1', u'2', u'3'])

        try:
            sqs = repr(SearchQuerySet('whoosh'))
        except:
            self.fail()

    def test_regression_space_query(self):
        self.sb.update(self.wmmi, self.sample_objs)
        self.assertTrue(self.sb.index.doc_count() > 0)

        sqs = SearchQuerySet('whoosh').auto_query(" ")
        self.assertEqual(len(sqs), 3)
        sqs = SearchQuerySet('whoosh').filter(content=" ")
        self.assertEqual(len(sqs), 0)

    def test_iter(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        sqs = self.sqs.auto_query('Indexed!')
        results = [int(result.pk) for result in sqs]
        self.assertEqual(sorted(results), [1, 2, 3])
        self.assertEqual(len(connections['whoosh'].queries), 1)

    def test_slice(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        results = self.sqs.auto_query('Indexed!')
        self.assertEqual(sorted([int(result.pk) for result in results[1:3]]),
                         [1, 2])
        self.assertEqual(len(connections['whoosh'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        results = self.sqs.auto_query('Indexed!')
        self.assertEqual(int(results[0].pk), 1)
        self.assertEqual(len(connections['whoosh'].queries), 1)

    def test_manual_iter(self):
        self.sb.update(self.wmmi, self.sample_objs)
        results = self.sqs.auto_query('Indexed!')

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        results = [int(result.pk) for result in results._manual_iter()]
        self.assertEqual(sorted(results), [1, 2, 3])
        self.assertEqual(len(connections['whoosh'].queries), 1)

    def test_fill_cache(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        results = self.sqs.auto_query('Indexed!')
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['whoosh'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 3)
        self.assertEqual(len(connections['whoosh'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 3)
        self.assertEqual(len(connections['whoosh'].queries), 2)

    def test_cache_is_full(self):
        self.sb.update(self.wmmi, self.sample_objs)

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        self.assertEqual(self.sqs._cache_is_full(), False)
        results = self.sqs.auto_query('Indexed!')
        [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['whoosh'].queries), 1)

    def test_count(self):
        more_samples = []

        for i in range(1, 50):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            more_samples.append(mock)

        self.sb.update(self.wmmi, more_samples)

        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)
        results = self.sqs.all()
        self.assertEqual(len(results), 49)
        self.assertEqual(results._cache_is_full(), False)
        self.assertEqual(len(connections['whoosh'].queries), 1)

    def test_query_generation(self):
        sqs = self.sqs.filter(
            SQ(content=AutoQuery("hello world"))
            | SQ(title=AutoQuery("hello world")))
        self.assertEqual(sqs.query.build_query(),
                         u"((hello world) OR title:(hello world))")

    def test_result_class(self):
        self.sb.update(self.wmmi, self.sample_objs)

        # Assert that we're defaulting to ``SearchResult``.
        sqs = self.sqs.all()
        self.assertTrue(isinstance(sqs[0], SearchResult))

        # Custom class.
        sqs = self.sqs.result_class(MockSearchResult).all()
        self.assertTrue(isinstance(sqs[0], MockSearchResult))

        # Reset to default.
        sqs = self.sqs.result_class(None).all()
        self.assertTrue(isinstance(sqs[0], SearchResult))
Example #55
0
class SearchIndexTestCase(TestCase):
    def setUp(self):
        super(SearchIndexTestCase, self).setUp()
        self.sb = connections['default'].get_backend()
        self.mi = GoodMockSearchIndex()
        self.cmi = GoodCustomMockSearchIndex()
        self.cnmi = GoodNullableMockSearchIndex()
        self.gfmsi = GoodFacetedMockSearchIndex()

        # Fake the unified index.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.ui.build(indexes=[self.mi])
        connections['default']._index = self.ui

        self.sample_docs = {
            u'core.mockmodel.1': {
                'text': u'Indexed!\n1',
                'django_id': u'1',
                'django_ct': u'core.mockmodel',
                'extra': u'Stored!\n1',
                'author': u'daniel1',
                'pub_date': datetime.datetime(2009, 3, 17, 6, 0),
                'id': u'core.mockmodel.1'
            },
            u'core.mockmodel.2': {
                'text': u'Indexed!\n2',
                'django_id': u'2',
                'django_ct': u'core.mockmodel',
                'extra': u'Stored!\n2',
                'author': u'daniel2',
                'pub_date': datetime.datetime(2009, 3, 17, 7, 0),
                'id': u'core.mockmodel.2'
            },
            u'core.mockmodel.3': {
                'text': u'Indexed!\n3',
                'django_id': u'3',
                'django_ct': u'core.mockmodel',
                'extra': u'Stored!\n3',
                'author': u'daniel3',
                'pub_date': datetime.datetime(2009, 3, 17, 8, 0),
                'id': u'core.mockmodel.3'
            }
        }

    def tearDown(self):
        connections['default']._index = self.old_unified_index
        super(SearchIndexTestCase, self).tearDown()

    def test_no_contentfield_present(self):
        self.assertRaises(SearchFieldError, BadSearchIndex1)

    def test_too_many_contentfields_present(self):
        self.assertRaises(SearchFieldError, BadSearchIndex2)

    def test_contentfield_present(self):
        try:
            mi = GoodMockSearchIndex()
        except:
            self.fail()

    def test_proper_fields(self):
        self.assertEqual(len(self.mi.fields), 4)
        self.assertTrue('text' in self.mi.fields)
        self.assertTrue(isinstance(self.mi.fields['text'], indexes.CharField))
        self.assertTrue('author' in self.mi.fields)
        self.assertTrue(isinstance(self.mi.fields['author'],
                                   indexes.CharField))
        self.assertTrue('pub_date' in self.mi.fields)
        self.assertTrue(
            isinstance(self.mi.fields['pub_date'], indexes.DateTimeField))
        self.assertTrue('extra' in self.mi.fields)
        self.assertTrue(isinstance(self.mi.fields['extra'], indexes.CharField))

        self.assertEqual(len(self.cmi.fields), 7)
        self.assertTrue('text' in self.cmi.fields)
        self.assertTrue(isinstance(self.cmi.fields['text'], indexes.CharField))
        self.assertTrue('author' in self.cmi.fields)
        self.assertTrue(
            isinstance(self.cmi.fields['author'], indexes.CharField))
        self.assertTrue('author_exact' in self.cmi.fields)
        self.assertTrue(
            isinstance(self.cmi.fields['author_exact'],
                       indexes.FacetCharField))
        self.assertTrue('pub_date' in self.cmi.fields)
        self.assertTrue(
            isinstance(self.cmi.fields['pub_date'], indexes.DateTimeField))
        self.assertTrue('pub_date_exact' in self.cmi.fields)
        self.assertTrue(
            isinstance(self.cmi.fields['pub_date_exact'],
                       indexes.FacetDateTimeField))
        self.assertTrue('extra' in self.cmi.fields)
        self.assertTrue(isinstance(self.cmi.fields['extra'],
                                   indexes.CharField))
        self.assertTrue('hello' in self.cmi.fields)
        self.assertTrue(isinstance(self.cmi.fields['extra'],
                                   indexes.CharField))

    def test_index_queryset(self):
        self.assertEqual(len(self.cmi.index_queryset()), 3)

    def test_read_queryset(self):
        self.assertEqual(len(self.cmi.read_queryset()), 2)

    def test_build_queryset(self):
        # The custom SearchIndex.build_queryset returns the same records as
        # the read_queryset
        self.assertEqual(len(self.cmi.build_queryset()), 2)

        # Store a reference to the original method
        old_guf = self.mi.__class__.get_updated_field

        self.mi.__class__.get_updated_field = lambda self: 'pub_date'

        # With an updated field, we should get have filtered results
        sd = datetime.datetime(2009, 3, 17, 7, 0)
        self.assertEqual(len(self.mi.build_queryset(start_date=sd)), 2)

        ed = datetime.datetime(2009, 3, 17, 7, 59)
        self.assertEqual(len(self.mi.build_queryset(end_date=ed)), 2)

        sd = datetime.datetime(2009, 3, 17, 6, 0)
        ed = datetime.datetime(2009, 3, 17, 6, 59)
        self.assertEqual(
            len(self.mi.build_queryset(start_date=sd, end_date=ed)), 1)

        # Remove the updated field for the next test
        del self.mi.__class__.get_updated_field

        # The default should return all 3 even if we specify a start date
        # because there is no updated field specified
        self.assertEqual(len(self.mi.build_queryset(start_date=sd)), 3)

        # Restore the original attribute
        self.mi.__class__.get_updated_field = old_guf

    def test_prepare(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.assertEqual(len(self.mi.prepare(mock)), 7)
        self.assertEqual(sorted(self.mi.prepare(mock).keys()), [
            'author', 'django_ct', 'django_id', 'extra', 'id', 'pub_date',
            'text'
        ])

    def test_custom_prepare(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.assertEqual(len(self.cmi.prepare(mock)), 11)
        self.assertEqual(sorted(self.cmi.prepare(mock).keys()), [
            'author', 'author_exact', 'django_ct', 'django_id', 'extra',
            'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'
        ])

        self.assertEqual(len(self.cmi.full_prepare(mock)), 11)
        self.assertEqual(sorted(self.cmi.full_prepare(mock).keys()), [
            'author', 'author_exact', 'django_ct', 'django_id', 'extra',
            'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'
        ])

    def test_thread_safety(self):
        # This is a regression. ``SearchIndex`` used to write to
        # ``self.prepared_data``, which would leak between threads if things
        # went too fast.
        exceptions = []

        def threaded_prepare(index_queue, index, model):
            try:
                index.queue = index_queue
                prepped = index.prepare(model)
            except Exception as e:
                exceptions.append(e)
                raise

        class ThreadedSearchIndex(GoodMockSearchIndex):
            def prepare_author(self, obj):
                if obj.pk == 20:
                    time.sleep(0.1)
                else:
                    time.sleep(0.5)

                index_queue.put(self.prepared_data['author'])
                return self.prepared_data['author']

        tmi = ThreadedSearchIndex()
        index_queue = queue.Queue()
        mock_1 = MockModel()
        mock_1.pk = 20
        mock_1.author = 'foo'
        mock_1.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
        mock_2 = MockModel()
        mock_2.pk = 21
        mock_2.author = 'daniel%s' % mock_2.id
        mock_2.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        th1 = Thread(target=threaded_prepare, args=(index_queue, tmi, mock_1))
        th2 = Thread(target=threaded_prepare, args=(index_queue, tmi, mock_2))

        th1.start()
        th2.start()
        th1.join()
        th2.join()

        mock_1_result = index_queue.get()
        mock_2_result = index_queue.get()
        self.assertEqual(mock_1_result, u'foo')
        self.assertEqual(mock_2_result, u'daniel21')

    def test_custom_prepare_author(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.assertEqual(len(self.cmi.prepare(mock)), 11)
        self.assertEqual(sorted(self.cmi.prepare(mock).keys()), [
            'author', 'author_exact', 'django_ct', 'django_id', 'extra',
            'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'
        ])

        self.assertEqual(len(self.cmi.full_prepare(mock)), 11)
        self.assertEqual(sorted(self.cmi.full_prepare(mock).keys()), [
            'author', 'author_exact', 'django_ct', 'django_id', 'extra',
            'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'
        ])
        self.assertEqual(self.cmi.prepared_data['author'], "Hi, I'm daniel20")
        self.assertEqual(self.cmi.prepared_data['author_exact'],
                         "Hi, I'm daniel20")

    def test_custom_model_attr(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.assertEqual(len(self.cmi.prepare(mock)), 11)
        self.assertEqual(sorted(self.cmi.prepare(mock).keys()), [
            'author', 'author_exact', 'django_ct', 'django_id', 'extra',
            'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'
        ])

        self.assertEqual(len(self.cmi.full_prepare(mock)), 11)
        self.assertEqual(sorted(self.cmi.full_prepare(mock).keys()), [
            'author', 'author_exact', 'django_ct', 'django_id', 'extra',
            'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'
        ])
        self.assertEqual(self.cmi.prepared_data['hello'], u'World!')

    def test_custom_index_fieldname(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        cofnmi = GoodOverriddenFieldNameMockSearchIndex()
        self.assertEqual(len(cofnmi.prepare(mock)), 6)
        self.assertEqual(sorted(cofnmi.prepare(mock).keys()), [
            'django_ct', 'django_id', 'hello', 'id', 'more_content', 'name_s'
        ])
        self.assertEqual(cofnmi.prepared_data['name_s'], u'daniel20')
        self.assertEqual(cofnmi.get_content_field(), 'more_content')

    def test_get_content_field(self):
        self.assertEqual(self.mi.get_content_field(), 'text')

    def test_update(self):
        self.sb.clear()
        self.assertEqual(self.sb.search('*')['hits'], 0)
        self.mi.update()
        self.assertEqual(self.sb.search('*')['hits'], 3)
        self.sb.clear()

    def test_update_object(self):
        self.sb.clear()
        self.assertEqual(self.sb.search('*')['hits'], 0)

        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.mi.update_object(mock)
        self.assertEqual([(res.content_type(), res.pk)
                          for res in self.sb.search('*')['results']],
                         [(u'core.mockmodel', u'20')])
        self.sb.clear()

    def test_remove_object(self):
        self.mi.update()
        self.assertEqual(self.sb.search('*')['hits'], 3)

        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.mi.update_object(mock)
        self.assertEqual(self.sb.search('*')['hits'], 4)

        self.mi.remove_object(mock)
        self.assertEqual([(res.content_type(), res.pk)
                          for res in self.sb.search('*')['results']],
                         [(u'core.mockmodel', u'1'), (u'core.mockmodel', u'2'),
                          (u'core.mockmodel', u'3')])

        # Put it back so we can test passing kwargs.
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        self.mi.update_object(mock)
        self.assertEqual(self.sb.search('*')['hits'], 4)

        self.mi.remove_object(mock, commit=False)
        self.assertEqual([(res.content_type(), res.pk)
                          for res in self.sb.search('*')['results']],
                         [(u'core.mockmodel', u'1'), (u'core.mockmodel', u'2'),
                          (u'core.mockmodel', u'3'),
                          (u'core.mockmodel', u'20')])

        self.sb.clear()

    def test_clear(self):
        self.mi.update()
        self.assertGreater(self.sb.search('*')['hits'], 0)

        self.mi.clear()
        self.assertEqual(self.sb.search('*')['hits'], 0)

    def test_reindex(self):
        self.mi.reindex()
        self.assertEqual([(res.content_type(), res.pk)
                          for res in self.sb.search('*')['results']],
                         [(u'core.mockmodel', u'1'), (u'core.mockmodel', u'2'),
                          (u'core.mockmodel', u'3')])
        self.sb.clear()

    def test_inheritance(self):
        try:
            agmi = AltGoodMockSearchIndex()
        except:
            self.fail()

        self.assertEqual(len(agmi.fields), 5)
        self.assertTrue('text' in agmi.fields)
        self.assertTrue(isinstance(agmi.fields['text'], indexes.CharField))
        self.assertTrue('author' in agmi.fields)
        self.assertTrue(isinstance(agmi.fields['author'], indexes.CharField))
        self.assertTrue('pub_date' in agmi.fields)
        self.assertTrue(
            isinstance(agmi.fields['pub_date'], indexes.DateTimeField))
        self.assertTrue('extra' in agmi.fields)
        self.assertTrue(isinstance(agmi.fields['extra'], indexes.CharField))
        self.assertTrue('additional' in agmi.fields)
        self.assertTrue(
            isinstance(agmi.fields['additional'], indexes.CharField))

    def test_proper_field_resolution(self):
        mrofsc = MROFieldsSearchChild()
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel%s' % mock.id
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
        mock.test_a = 'This is A'
        mock.test_b = 'This is B'

        self.assertEqual(len(mrofsc.fields), 1)
        prepped_data = mrofsc.prepare(mock)
        self.assertEqual(len(prepped_data), 4)
        self.assertEqual(prepped_data['text'], 'This is A')

    def test_load_all_queryset(self):
        self.assertEqual([obj.id for obj in self.cmi.load_all_queryset()],
                         [2, 3])

    def test_nullable(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = None
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        prepared_data = self.cnmi.prepare(mock)
        self.assertEqual(len(prepared_data), 6)
        self.assertEqual(
            sorted(prepared_data.keys()),
            ['author', 'author_exact', 'django_ct', 'django_id', 'id', 'text'])

        prepared_data = self.cnmi.full_prepare(mock)
        self.assertEqual(len(prepared_data), 4)
        self.assertEqual(sorted(prepared_data.keys()),
                         ['django_ct', 'django_id', 'id', 'text'])

    def test_custom_facet_fields(self):
        mock = MockModel()
        mock.pk = 20
        mock.author = 'daniel'
        mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)

        prepared_data = self.gfmsi.prepare(mock)
        self.assertEqual(len(prepared_data), 8)
        self.assertEqual(sorted(prepared_data.keys()), [
            'author', 'author_foo', 'django_ct', 'django_id', 'id', 'pub_date',
            'pub_date_exact', 'text'
        ])

        prepared_data = self.gfmsi.full_prepare(mock)
        self.assertEqual(len(prepared_data), 8)
        self.assertEqual(sorted(prepared_data.keys()), [
            'author', 'author_foo', 'django_ct', 'django_id', 'id', 'pub_date',
            'pub_date_exact', 'text'
        ])
        self.assertEqual(prepared_data['author_foo'], u"Hi, I'm daniel")
        self.assertEqual(prepared_data['pub_date_exact'],
                         '2010-10-26T01:54:32')
Example #56
0
class LiveWhooshSearchQueryTestCase(WhooshTestCase):
    def setUp(self):
        super(LiveWhooshSearchQueryTestCase, self).setUp()

        # Stow.
        self.old_ui = connections['whoosh'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wmmi = WhooshMockSearchIndex()
        self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
        self.ui.build(indexes=[self.wmmi])
        self.sb = connections['whoosh'].get_backend()
        connections['whoosh']._index = self.ui

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)
        self.sb.delete_index()

        self.sample_objs = []

        for i in range(1, 4):
            mock = MockModel()
            mock.id = i
            mock.author = 'daniel%s' % i
            mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
            self.sample_objs.append(mock)

        self.sq = connections['whoosh'].get_query()

    def tearDown(self):
        connections['whoosh']._index = self.old_ui
        super(LiveWhooshSearchQueryTestCase, self).tearDown()

    def test_get_spelling(self):
        self.sb.update(self.wmmi, self.sample_objs)

        self.sq.add_filter(SQ(content='Indexe'))
        self.assertEqual(self.sq.get_spelling_suggestion(), u'indexed')

    def test_log_query(self):
        from django.conf import settings
        reset_search_queries()
        self.assertEqual(len(connections['whoosh'].queries), 0)

        # Stow.

        with self.settings(DEBUG=False):
            len(self.sq.get_results())
            self.assertEqual(len(connections['whoosh'].queries), 0)

        with self.settings(DEBUG=True):
            # Redefine it to clear out the cached results.
            self.sq = connections['whoosh'].get_query()
            self.sq.add_filter(SQ(name='bar'))
            len(self.sq.get_results())
            self.assertEqual(len(connections['whoosh'].queries), 1)
            self.assertEqual(connections['whoosh'].queries[0]['query_string'],
                             'name:(bar)')

            # And again, for good measure.
            self.sq = connections['whoosh'].get_query()
            self.sq.add_filter(SQ(name='baz'))
            self.sq.add_filter(SQ(text='foo'))
            len(self.sq.get_results())
            self.assertEqual(len(connections['whoosh'].queries), 2)
            self.assertEqual(connections['whoosh'].queries[0]['query_string'],
                             'name:(bar)')
            self.assertEqual(connections['whoosh'].queries[1]['query_string'],
                             u'(name:(baz) AND text:(foo))')
Example #57
0
class SearchQuerySetTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        self.old_debug = settings.DEBUG
        settings.DEBUG = True

        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_unified_index
        settings.DEBUG = self.old_debug
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(
            repr(self.msqs),
            "[<SearchResult: core.mockmodel (pk=u'1')>, <SearchResult: core.mockmodel (pk=u'2')>, <SearchResult: core.mockmodel (pk=u'3')>, <SearchResult: core.mockmodel (pk=u'4')>, <SearchResult: core.mockmodel (pk=u'5')>, <SearchResult: core.mockmodel (pk=u'6')>, <SearchResult: core.mockmodel (pk=u'7')>, <SearchResult: core.mockmodel (pk=u'8')>, <SearchResult: core.mockmodel (pk=u'9')>, <SearchResult: core.mockmodel (pk=u'10')>, <SearchResult: core.mockmodel (pk=u'11')>, <SearchResult: core.mockmodel (pk=u'12')>, <SearchResult: core.mockmodel (pk=u'13')>, <SearchResult: core.mockmodel (pk=u'14')>, <SearchResult: core.mockmodel (pk=u'15')>, <SearchResult: core.mockmodel (pk=u'16')>, <SearchResult: core.mockmodel (pk=u'17')>, <SearchResult: core.mockmodel (pk=u'18')>, <SearchResult: core.mockmodel (pk=u'19')>, '...(remaining elements truncated)...']"
        )
        self.assertEqual(len(connections['default'].queries), 1)

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in msqs]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual([int(res.pk) for res in results[1:11]],
                         [res.pk for res in MOCK_SEARCH_RESULTS[1:11]])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(check, [
            u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11',
            u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20',
            u'21', u'22', u'23'
        ])

        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections['default']._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, [u'sometext', u'1234'])
        self.assertEqual(len(connections['default'].queries), 1)

        connections['default']._index = old_ui

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        sqs = SearchQuerySet().all()
        sqs.query.backend = MixedMockSearchBackend('default')
        results = sqs
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 0)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [])
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 9)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [1, 2, 3, 4, 5, 6, 7, 8, 10])
        self.assertEqual(len(connections['default'].queries), 2)
        results._fill_cache(10, 20)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 17)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20])
        self.assertEqual(len(connections['default'].queries), 4)
        results._fill_cache(20, 30)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 20)
        self.assertEqual([
            int(result.pk)
            for result in results._result_cache if result is not None
        ], [
            1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22,
            23
        ])
        self.assertEqual(len(connections['default'].queries), 6)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue('foo' in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections['default']._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections['default']._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost('foo', 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content='Indx')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search('foo')), 23)
        self.assertEqual(
            len(
                self.msqs.raw_search(
                    '(content__exact:hello AND content__exact:world)')), 23)

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections['default']._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(
            len([
                result for result in results._result_cache
                if result is not None
            ]), 2)

        # Restore.
        connections['default']._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query('test search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__contains=test search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__contains=test "my thing" search -stuff>')

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__contains=test "my thing" search \'moar quotes\' -stuff>'
        )

        sqs = self.msqs.auto_query(
            'test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND content__contains=test "my thing" search \'moar quotes\' "foo -stuff>'
        )

        sqs = self.msqs.auto_query('test - stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND content__contains=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         '<SQ: AND content__contains="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query('test search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter),
                         "<SQ: AND title__contains=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff',
                                   fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(
            repr(sqs.query.query_filter),
            '<SQ: AND title__contains=test "my thing" search -stuff>')

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest('pub_date'), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet('foo').facet('bar')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet('foo',
                                       start_date=datetime.date(2008, 2, 25),
                                       end_date=datetime.date(2009, 2, 25),
                                       gap_by='smarblaph')
            self.fail()
        except FacetingError, e:
            self.assertEqual(
                str(e),
                "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second."
            )

        sqs = self.msqs.date_facet('foo',
                                   start_date=datetime.date(2008, 2, 25),
                                   end_date=datetime.date(2009, 2, 25),
                                   gap_by='month')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet('foo',
                                    start_date=datetime.date(2008, 2, 25),
                                    end_date=datetime.date(2009, 2, 25),
                                    gap_by='month').date_facet(
                                        'bar',
                                        start_date=datetime.date(2007, 2, 25),
                                        end_date=datetime.date(2009, 2, 25),
                                        gap_by='year')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)
Example #58
0
class LiveWhooshRamStorageTestCase(TestCase):
    def setUp(self):
        super(LiveWhooshRamStorageTestCase, self).setUp()

        # Stow.
        self.old_whoosh_storage = settings.HAYSTACK_CONNECTIONS['whoosh'].get(
            'STORAGE', 'file')
        settings.HAYSTACK_CONNECTIONS['whoosh']['STORAGE'] = 'ram'

        self.old_ui = connections['whoosh'].get_unified_index()
        self.ui = UnifiedIndex()
        self.wrtsi = WhooshRoundTripSearchIndex()
        self.ui.build(indexes=[self.wrtsi])
        self.sb = connections['whoosh'].get_backend()
        connections['whoosh']._index = self.ui

        # Stow.
        import haystack

        self.sb.setup()
        self.raw_whoosh = self.sb.index
        self.parser = QueryParser(self.sb.content_field_name,
                                  schema=self.sb.schema)

        self.sqs = SearchQuerySet('whoosh')

        # Wipe it clean.
        self.sqs.query.backend.clear()

        # Fake indexing.
        mock = MockModel()
        mock.id = 1
        self.sb.update(self.wrtsi, [mock])

    def tearDown(self):
        self.sqs.query.backend.clear()

        settings.HAYSTACK_CONNECTIONS['whoosh'][
            'STORAGE'] = self.old_whoosh_storage
        connections['whoosh']._index = self.old_ui
        super(LiveWhooshRamStorageTestCase, self).tearDown()

    def test_ram_storage(self):
        results = self.sqs.filter(id='core.mockmodel.1')

        # Sanity check.
        self.assertEqual(results.count(), 1)

        # Check the individual fields.
        result = results[0]
        self.assertEqual(result.id, 'core.mockmodel.1')
        self.assertEqual(result.text, 'This is some example text.')
        self.assertEqual(result.name, 'Mister Pants')
        self.assertEqual(result.is_active, True)
        self.assertEqual(result.post_count, 25)
        self.assertEqual(result.average_rating, 3.6)
        self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
        self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
        self.assertEqual(result.tags,
                         ['staff', 'outdoor', 'activist', 'scientist'])
        self.assertEqual(result.sites, [u'3', u'5', u'1'])
        self.assertEqual(result.empty_list, [])
class LiveElasticsearchAutocompleteTestCase(TestCase):
    fixtures = ['bulk_data.json']

    def setUp(self):
        super(LiveElasticsearchAutocompleteTestCase, self).setUp()

        # Stow.
        self.old_ui = connections['default'].get_unified_index()
        self.ui = UnifiedIndex()
        self.smmi = ElasticsearchAutocompleteMockModelSearchIndex()
        self.ui.build(indexes=[self.smmi])
        connections['default']._index = self.ui

        self.sqs = SearchQuerySet()

        # Wipe it clean.
        clear_elasticsearch_index()

        # Reboot the schema.
        self.sb = connections['default'].get_backend()
        self.sb.setup()

        self.smmi.update()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_ui
        super(LiveElasticsearchAutocompleteTestCase, self).tearDown()

    def test_build_schema(self):
        self.sb = connections['default'].get_backend()
        content_name, mapping = self.sb.build_schema(self.ui.all_searchfields())
        self.assertEqual(mapping, {
            'name_auto': {
                'index': 'analyzed',
                'term_vector': 'with_positions_offsets',
                'type': 'string',
                'analyzer': 'edgengram_analyzer',
                'boost': 1.0,
                'store': 'yes'
            },
            'text': {
                'index': 'analyzed',
                'term_vector': 'with_positions_offsets',
                'type': 'string',
                'analyzer': 'snowball',
                'boost': 1.0,
                'store': 'yes'
            },
            'pub_date': {
                'index': 'analyzed',
                'boost': 1.0,
                'store': 'yes',
                'type': 'date'
            },
            'name': {
                'index': 'analyzed',
                'term_vector': 'with_positions_offsets',
                'type': 'string',
                'analyzer': 'snowball',
                'boost': 1.0,
                'store': 'yes'
            },
            'text_auto': {
                'index': 'analyzed',
                'term_vector': 'with_positions_offsets',
                'type': 'string',
                'analyzer': 'edgengram_analyzer',
                'boost': 1.0,
                'store': 'yes'
            }
        })

    def test_autocomplete(self):
        autocomplete = self.sqs.autocomplete(text_auto='mod')
        self.assertEqual(autocomplete.count(), 5)
        self.assertEqual([result.pk for result in autocomplete], [u'1', u'12', u'14', u'6', u'7'])
        self.assertTrue('mod' in autocomplete[0].text.lower())
        self.assertTrue('mod' in autocomplete[1].text.lower())
        self.assertTrue('mod' in autocomplete[2].text.lower())
        self.assertTrue('mod' in autocomplete[3].text.lower())
        self.assertTrue('mod' in autocomplete[4].text.lower())
        self.assertEqual(len([result.pk for result in autocomplete]), 5)

        # Test multiple words.
        autocomplete_2 = self.sqs.autocomplete(text_auto='your mod')
        self.assertEqual(autocomplete_2.count(), 3)
        self.assertEqual([result.pk for result in autocomplete_2], ['1', '14', '6'])
        self.assertTrue('your' in autocomplete_2[0].text.lower())
        self.assertTrue('mod' in autocomplete_2[0].text.lower())
        self.assertTrue('your' in autocomplete_2[1].text.lower())
        self.assertTrue('mod' in autocomplete_2[1].text.lower())
        self.assertTrue('your' in autocomplete_2[2].text.lower())
        self.assertTrue('mod' in autocomplete_2[2].text.lower())
        self.assertEqual(len([result.pk for result in autocomplete_2]), 3)

        # Test multiple fields.
        autocomplete_3 = self.sqs.autocomplete(text_auto='Django', name_auto='dan')
        self.assertEqual(autocomplete_3.count(), 4)
        self.assertEqual([result.pk for result in autocomplete_3], ['12', '1', '14', '22'])
        self.assertEqual(len([result.pk for result in autocomplete_3]), 4)
Example #60
0
    def test_build_schema(self):
        old_ui = connections['default'].get_unified_index()

        (content_field_name,
         fields) = self.sb.build_schema(old_ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(fields), 4)
        self.assertEqual(fields, [{
            'indexed': 'true',
            'type': 'text_en',
            'stored': 'true',
            'field_name': 'text',
            'multi_valued': 'false'
        }, {
            'indexed': 'true',
            'type': 'date',
            'stored': 'true',
            'field_name': 'pub_date',
            'multi_valued': 'false'
        }, {
            'indexed': 'true',
            'type': 'text_en',
            'stored': 'true',
            'field_name': 'name',
            'multi_valued': 'false'
        }, {
            'indexed': 'true',
            'field_name': 'name_exact',
            'stored': 'true',
            'type': 'string',
            'multi_valued': 'false'
        }])

        ui = UnifiedIndex()
        ui.build(indexes=[SolrComplexFacetsMockSearchIndex()])
        (content_field_name,
         fields) = self.sb.build_schema(ui.all_searchfields())
        self.assertEqual(content_field_name, 'text')
        self.assertEqual(len(fields), 15)
        fields = sorted(fields, key=lambda field: field['field_name'])
        self.assertEqual(fields, [{
            'field_name': 'average_rating',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'float'
        }, {
            'field_name': 'average_rating_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'float'
        }, {
            'field_name': 'created',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'created_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'is_active',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'boolean'
        }, {
            'field_name': 'is_active_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'boolean'
        }, {
            'field_name': 'name',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'text_en'
        }, {
            'field_name': 'name_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'string'
        }, {
            'field_name': 'post_count',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'long'
        }, {
            'field_name': 'post_count_i',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'long'
        }, {
            'field_name': 'pub_date',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'pub_date_exact',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'date'
        }, {
            'field_name': 'sites',
            'indexed': 'true',
            'multi_valued': 'true',
            'stored': 'true',
            'type': 'text_en'
        }, {
            'field_name': 'sites_exact',
            'indexed': 'true',
            'multi_valued': 'true',
            'stored': 'true',
            'type': 'string'
        }, {
            'field_name': 'text',
            'indexed': 'true',
            'multi_valued': 'false',
            'stored': 'true',
            'type': 'text_en'
        }])