def test_models(self):
        # Stow.
        old_unified_index = connections['default']._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections['default']._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)
    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections['default']._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Restore.
        connections['default']._index = old_ui
    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui
    def setUp(self):
        super(SpatialSolrTestCase, self).setUp()
        self.ui = connections[self.using].get_unified_index()
        self.checkindex = self.ui.get_index(Checkin)
        self.checkindex.reindex(using=self.using)
        self.sqs = SearchQuerySet().using(self.using)

        self.downtown_pnt = Point(-95.23592948913574, 38.97127105172941)
        self.downtown_bottom_left = Point(-95.23947, 38.9637903)
        self.downtown_top_right = Point(-95.23362278938293, 38.973081081164715)
        self.lawrence_bottom_left = Point(-95.345535, 39.002643)
        self.lawrence_top_right = Point(-95.202713, 38.923626)
    def test_searchqueryset_using(self):
        # Using the default.
        sqs = SearchQuerySet('solr')
        self.assertEqual(sqs.count(), 5)
        self.assertEqual(sqs.models(Foo).count(), 2)
        self.assertEqual(sqs.models(Bar).count(), 3)

        self.assertEqual(sqs.using('solr').count(), 5)
        self.assertEqual(sqs.using('solr').models(Foo).count(), 2)
        self.assertEqual(sqs.using('solr').models(Bar).count(), 3)

        self.assertEqual(sqs.using('elasticsearch').count(), 2)
        self.assertEqual(sqs.using('elasticsearch').models(Foo).count(), 2)
        self.assertEqual(sqs.using('elasticsearch').models(Bar).count(), 0)
    def setUp(self):
        super(PickleSearchQuerySetTestCase, self).setUp()
        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()
class PickleSearchQuerySetTestCase(TestCase):
    fixtures = ['initial_data.json']

    def setUp(self):
        super(PickleSearchQuerySetTestCase, self).setUp()
        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_unified_index
        super(PickleSearchQuerySetTestCase, self).tearDown()

    def test_pickling(self):
        results = self.msqs.all()

        for res in results:
            # Make sure the cache is full.
            pass

        in_a_pickle = pickle.dumps(results)
        like_a_cuke = pickle.loads(in_a_pickle)
        self.assertEqual(len(like_a_cuke), len(results))
        self.assertEqual(like_a_cuke[0].id, results[0].id)
class SpatialSolrTestCase(TestCase):
    fixtures = ['sample_spatial_data.json']
    using = 'solr'

    def setUp(self):
        super(SpatialSolrTestCase, self).setUp()
        self.ui = connections[self.using].get_unified_index()
        self.checkindex = self.ui.get_index(Checkin)
        self.checkindex.reindex(using=self.using)
        self.sqs = SearchQuerySet().using(self.using)

        self.downtown_pnt = Point(-95.23592948913574, 38.97127105172941)
        self.downtown_bottom_left = Point(-95.23947, 38.9637903)
        self.downtown_top_right = Point(-95.23362278938293, 38.973081081164715)
        self.lawrence_bottom_left = Point(-95.345535, 39.002643)
        self.lawrence_top_right = Point(-95.202713, 38.923626)

    def tearDown(self):
        self.checkindex.clear(using=self.using)
        super(SpatialSolrTestCase, self).setUp()

    def test_indexing(self):
        # Make sure the indexed data looks correct.
        first = Checkin.objects.get(pk=1)
        sqs = self.sqs.models(Checkin).filter(django_id=first.pk)
        self.assertEqual(sqs.count(), 1)
        self.assertEqual(sqs[0].username, first.username)
        # Make sure we've got a proper ``Point`` object.
        self.assertAlmostEqual(sqs[0].location.get_coords()[0], first.longitude)
        self.assertAlmostEqual(sqs[0].location.get_coords()[1], first.latitude)

        # Double-check, to make sure there was nothing accidentally copied
        # between instances.
        second = Checkin.objects.get(pk=2)
        self.assertNotEqual(second.latitude, first.latitude)
        sqs = self.sqs.models(Checkin).filter(django_id=second.pk)
        self.assertEqual(sqs.count(), 1)
        self.assertEqual(sqs[0].username, second.username)
        self.assertAlmostEqual(sqs[0].location.get_coords()[0], second.longitude)
        self.assertAlmostEqual(sqs[0].location.get_coords()[1], second.latitude)

    def test_within(self):
        self.assertEqual(self.sqs.all().count(), 10)

        sqs = self.sqs.within('location', self.downtown_bottom_left, self.downtown_top_right)
        self.assertEqual(sqs.count(), 7)

        sqs = self.sqs.within('location', self.lawrence_bottom_left, self.lawrence_top_right)
        self.assertEqual(sqs.count(), 9)

    def test_dwithin(self):
        self.assertEqual(self.sqs.all().count(), 10)

        sqs = self.sqs.dwithin('location', self.downtown_pnt, D(mi=0.1))
        self.assertEqual(sqs.count(), 5)

        sqs = self.sqs.dwithin('location', self.downtown_pnt, D(mi=0.5))
        self.assertEqual(sqs.count(), 7)

        sqs = self.sqs.dwithin('location', self.downtown_pnt, D(mi=100))
        self.assertEqual(sqs.count(), 10)

    def test_distance_added(self):
        sqs = self.sqs.within('location', self.downtown_bottom_left,
                              self.downtown_top_right).distance('location', self.downtown_pnt)
        self.assertEqual(sqs.count(), 7)
        self.assertAlmostEqual(sqs[0].distance.mi, 0.01985226)
        self.assertAlmostEqual(sqs[1].distance.mi, 0.03385863)
        self.assertAlmostEqual(sqs[2].distance.mi, 0.04539100)
        self.assertAlmostEqual(sqs[3].distance.mi, 0.04831436)
        self.assertAlmostEqual(sqs[4].distance.mi, 0.41116546)
        self.assertAlmostEqual(sqs[5].distance.mi, 0.25098114)
        self.assertAlmostEqual(sqs[6].distance.mi, 0.04831436)

        sqs = self.sqs.dwithin('location', self.downtown_pnt,
                               D(mi=0.1)).distance('location', self.downtown_pnt)
        self.assertEqual(sqs.count(), 5)
        self.assertAlmostEqual(sqs[0].distance.mi, 0.01985226)
        self.assertAlmostEqual(sqs[1].distance.mi, 0.03385863)
        self.assertAlmostEqual(sqs[2].distance.mi, 0.04539100)
        self.assertAlmostEqual(sqs[3].distance.mi, 0.04831436)
        self.assertAlmostEqual(sqs[4].distance.mi, 0.04831436)

    def test_order_by_distance(self):
        sqs = self.sqs.within('location', self.downtown_bottom_left, self.downtown_top_right)
        sqs = sqs.distance('location', self.downtown_pnt).order_by('distance')
        self.assertEqual(sqs.count(), 7)
        self.assertEqual([result.pk for result in sqs], ['8', '9', '6', '3', '1', '2', '5'])
        self.assertEqual(["%0.04f" % result.distance.mi for result in sqs],
                         ['0.0199', '0.0339', '0.0454', '0.0483', '0.0483', '0.2510', '0.4112'])

        sqs = self.sqs.dwithin('location', self.downtown_pnt, D(mi=0.1))
        sqs = sqs.distance('location', self.downtown_pnt).order_by('distance')
        self.assertEqual(sqs.count(), 5)
        self.assertEqual([result.pk for result in sqs], ['8', '9', '6', '3', '1'])
        self.assertEqual(["%0.04f" % result.distance.mi for result in sqs],
                         ['0.0199', '0.0339', '0.0454', '0.0483', '0.0483'])

        sqs = self.sqs.dwithin('location', self.downtown_pnt, D(mi=0.1))
        sqs = sqs.distance('location', self.downtown_pnt).order_by('-distance')
        self.assertEqual(sqs.count(), 5)
        self.assertEqual([result.pk for result in sqs], ['3', '1', '6', '9', '8'])
        self.assertEqual(["%0.04f" % result.distance.mi for result in sqs],
                         ['0.0483', '0.0483', '0.0454', '0.0339', '0.0199'])

    def test_complex(self):
        sqs = self.sqs.auto_query('coffee')
        sqs = sqs.within('location', self.downtown_bottom_left, self.downtown_top_right)
        sqs = sqs.distance('location', self.downtown_pnt).order_by('distance')
        self.assertEqual(sqs.count(), 5)
        self.assertEqual([result.pk for result in sqs], ['8', '6', '3', '1', '2'])
        self.assertEqual(["%0.04f" % result.distance.mi for result in sqs],
                         ['0.0199', '0.0454', '0.0483', '0.0483', '0.2510'])

        sqs = self.sqs.auto_query('coffee').dwithin('location', self.downtown_pnt, D(mi=0.1))
        sqs = sqs.distance('location', self.downtown_pnt).order_by('distance')
        self.assertEqual(sqs.count(), 4)
        self.assertEqual([result.pk for result in sqs], ['8', '6', '3', '1'])
        self.assertEqual(["%0.04f" % result.distance.mi for result in sqs],
                         ['0.0199', '0.0454', '0.0483', '0.0483'])

        sqs = self.sqs.auto_query('coffee').dwithin('location', self.downtown_pnt, D(mi=0.1))
        sqs = sqs.distance('location', self.downtown_pnt).order_by('-distance')
        self.assertEqual(sqs.count(), 4)
        self.assertEqual([result.pk for result in sqs], ['3', '1', '6', '8'])
        self.assertEqual(["%0.04f" % result.distance.mi for result in sqs],
                         ['0.0483', '0.0483', '0.0454', '0.0199'])

        sqs = self.sqs.auto_query('coffee')
        sqs = sqs.within('location', self.downtown_bottom_left, self.downtown_top_right)
        sqs = sqs.distance('location', self.downtown_pnt).order_by('-created')
        self.assertEqual(sqs.count(), 5)
        self.assertEqual([result.pk for result in sqs], ['8', '6', '3', '2', '1'])

        sqs = self.sqs.auto_query('coffee').dwithin('location', self.downtown_pnt, D(mi=0.1))
        sqs = sqs.distance('location', self.downtown_pnt).order_by('-created')
        self.assertEqual(sqs.count(), 4)
        self.assertEqual([result.pk for result in sqs], ['8', '6', '3', '1'])
class SearchQuerySetTestCase(TestCase):
    fixtures = ['initial_data.json', 'bulk_data.json']

    def setUp(self):
        super(SearchQuerySetTestCase, self).setUp()

        # Stow.
        self.old_unified_index = connections['default']._index
        self.ui = UnifiedIndex()
        self.bmmsi = BasicMockModelSearchIndex()
        self.cpkmmsi = CharPKMockModelSearchIndex()
        self.ui.build(indexes=[self.bmmsi, self.cpkmmsi])
        connections['default']._index = self.ui

        # Update the "index".
        backend = connections['default'].get_backend()
        backend.clear()
        backend.update(self.bmmsi, MockModel.objects.all())

        self.msqs = SearchQuerySet()

        # Stow.
        reset_search_queries()

    def tearDown(self):
        # Restore.
        connections['default']._index = self.old_unified_index
        super(SearchQuerySetTestCase, self).tearDown()

    def test_len(self):
        self.assertEqual(len(self.msqs), 23)

    def test_repr(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertRegexpMatches(repr(self.msqs),
                                r'^<SearchQuerySet: query=<test_searchstack.mocks.MockSearchQuery object'
                                r' at 0x[0-9A-Fa-f]+>, using=None>$')

    def test_iter(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        msqs = self.msqs.all()
        results = [int(res.pk) for res in msqs]
        self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]])
        self.assertEqual(len(connections['default'].queries), 3)

    def test_slice(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual([int(res.pk) for res in results[1:11]], [res.pk for res in MOCK_SEARCH_RESULTS[1:11]])
        self.assertEqual(len(connections['default'].queries), 1)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk)
        self.assertEqual(len(connections['default'].queries), 1)

    def test_manual_iter(self):
        results = self.msqs.all()

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        check = [result.pk for result in results._manual_iter()]
        self.assertEqual(check, ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23'])

        self.assertEqual(len(connections['default'].queries), 3)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        # This will hang indefinitely if broken.
        old_ui = self.ui
        self.ui.build(indexes=[self.cpkmmsi])
        connections['default']._index = self.ui
        self.cpkmmsi.update()

        results = self.msqs.all()
        loaded = [result.pk for result in results._manual_iter()]
        self.assertEqual(loaded, ['sometext', '1234'])
        self.assertEqual(len(connections['default'].queries), 1)

        connections['default']._index = old_ui

    def test_fill_cache(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        results = self.msqs.all()
        self.assertEqual(len(results._result_cache), 0)
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 10)
        self.assertEqual(len(connections['default'].queries), 1)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual(len(connections['default'].queries), 2)

        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)

        # Test to ensure we properly fill the cache, even if we get fewer
        # results back (not a handled model) than the hit count indicates.
        sqs = SearchQuerySet().all()
        sqs.query.backend = MixedMockSearchBackend('default')
        results = sqs
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 0)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [])
        self.assertEqual(len(connections['default'].queries), 0)
        results._fill_cache(0, 10)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 9)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10])
        self.assertEqual(len(connections['default'].queries), 2)
        results._fill_cache(10, 20)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 17)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20])
        self.assertEqual(len(connections['default'].queries), 4)
        results._fill_cache(20, 30)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 20)
        self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22, 23])
        self.assertEqual(len(connections['default'].queries), 6)

    def test_cache_is_full(self):
        reset_search_queries()
        self.assertEqual(len(connections['default'].queries), 0)
        self.assertEqual(self.msqs._cache_is_full(), False)
        results = self.msqs.all()
        fire_the_iterator_and_fill_cache = [result for result in results]
        self.assertEqual(results._cache_is_full(), True)
        self.assertEqual(len(connections['default'].queries), 3)

    def test_all(self):
        sqs = self.msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))

    def test_filter(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_exclude(self):
        sqs = self.msqs.exclude(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_order_by(self):
        sqs = self.msqs.order_by('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertTrue('foo' in sqs.query.order_by)

    def test_models(self):
        # Stow.
        old_unified_index = connections['default']._index
        ui = UnifiedIndex()
        bmmsi = BasicMockModelSearchIndex()
        bammsi = BasicAnotherMockModelSearchIndex()
        ui.build(indexes=[bmmsi, bammsi])
        connections['default']._index = ui

        msqs = SearchQuerySet()

        sqs = msqs.all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 0)

        sqs = msqs.models(MockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

        sqs = msqs.models(MockModel, AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 2)

        # This will produce a warning.
        ui.build(indexes=[bmmsi])
        sqs = msqs.models(AnotherMockModel)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.models), 1)

    def test_result_class(self):
        sqs = self.msqs.all()
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

        # Custom class.
        class IttyBittyResult(object):
            pass

        sqs = self.msqs.result_class(IttyBittyResult)
        self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult))

        # Reset to default.
        sqs = self.msqs.result_class(None)
        self.assertTrue(issubclass(sqs.query.result_class, SearchResult))

    def test_boost(self):
        sqs = self.msqs.boost('foo', 10)
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.boost.keys()), 1)

    def test_highlight(self):
        sqs = self.msqs.highlight()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.query.highlight, True)

    def test_spelling(self):
        # Test the case where spelling support is disabled.
        sqs = self.msqs.filter(content='Indx')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(sqs.spelling_suggestion(), None)
        self.assertEqual(sqs.spelling_suggestion('indexy'), None)

    def test_raw_search(self):
        self.assertEqual(len(self.msqs.raw_search('foo')), 23)
        self.assertEqual(len(self.msqs.raw_search('(content__exact:hello AND content__exact:world)')), 23)

    def test_load_all(self):
        # Models with character primary keys.
        sqs = SearchQuerySet()
        sqs.query.backend = CharPKMockSearchBackend('charpk')
        results = sqs.load_all().all()
        self.assertEqual(len(results._result_cache), 0)
        results._fill_cache(0, 2)
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # If nothing is handled, you get nothing.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        ui.build(indexes=[])
        connections['default']._index = ui

        sqs = self.msqs.load_all()
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs), 0)

        connections['default']._index = old_ui

        # For full tests, see the solr_backend.

    def test_load_all_read_queryset(self):
        # Stow.
        old_ui = connections['default']._index
        ui = UnifiedIndex()
        gafmmsi = GhettoAFifthMockModelSearchIndex()
        ui.build(indexes=[gafmmsi])
        connections['default']._index = ui
        gafmmsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # The deleted result isn't returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 1)

        # Register a SearchIndex with a read_queryset that returns deleted items
        rqstsi = TextReadQuerySetTestSearchIndex()
        ui.build(indexes=[rqstsi])
        rqstsi.update()

        sqs = SearchQuerySet()
        results = sqs.load_all().all()
        results.query.backend = ReadQuerySetMockSearchBackend('default')
        results._fill_cache(0, 2)

        # Both the deleted and not deleted items are returned
        self.assertEqual(len([result for result in results._result_cache if result is not None]), 2)

        # Restore.
        connections['default']._index = old_ui

    def test_auto_query(self):
        sqs = self.msqs.auto_query('test search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search \'moar quotes\' -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search \'moar quotes\' -stuff>')

        sqs = self.msqs.auto_query('test "my thing" search \'moar quotes\' "foo -stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search \'moar quotes\' "foo -stuff>')

        sqs = self.msqs.auto_query('test - stuff')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND content__contains=test - stuff>")

        # Ensure bits in exact matches get escaped properly as well.
        sqs = self.msqs.auto_query('"pants:rule"')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains="pants:rule">')

        # Now with a different fieldname
        sqs = self.msqs.auto_query('test search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND title__contains=test search -stuff>")

        sqs = self.msqs.auto_query('test "my thing" search -stuff', fieldname='title')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND title__contains=test "my thing" search -stuff>')

    def test_count(self):
        self.assertEqual(self.msqs.count(), 23)

    def test_facet_counts(self):
        self.assertEqual(self.msqs.facet_counts(), {})

    def test_best_match(self):
        self.assertTrue(isinstance(self.msqs.best_match(), SearchResult))

    def test_latest(self):
        self.assertTrue(isinstance(self.msqs.latest('pub_date'), SearchResult))

    def test_more_like_this(self):
        mock = MockModel()
        mock.id = 1

        self.assertEqual(len(self.msqs.more_like_this(mock)), 23)

    def test_facets(self):
        sqs = self.msqs.facet('foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.facets), 1)

        sqs2 = self.msqs.facet('foo').facet('bar')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.facets), 2)

    def test_date_facets(self):
        try:
            sqs = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='smarblaph')
            self.fail()
        except FacetingError as e:
            self.assertEqual(str(e), "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second.")

        sqs = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.date_facets), 1)

        sqs2 = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month').date_facet('bar', start_date=datetime.date(2007, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='year')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.date_facets), 2)

    def test_query_facets(self):
        sqs = self.msqs.query_facet('foo', '[bar TO *]')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_facets), 1)

        sqs2 = self.msqs.query_facet('foo', '[bar TO *]').query_facet('bar', '[100 TO 499]')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.query_facets), 2)

        # Test multiple query facets on a single field
        sqs3 = self.msqs.query_facet('foo', '[bar TO *]').query_facet('bar', '[100 TO 499]').query_facet('foo', '[1000 TO 1499]')
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.query_facets), 3)

    def test_stats(self):
        sqs = self.msqs.stats_facet('foo','bar')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.stats),1)

        sqs2 = self.msqs.stats_facet('foo','bar').stats_facet('foo','baz')
        self.assertTrue(isinstance(sqs2, SearchQuerySet))
        self.assertEqual(len(sqs2.query.stats),1)

        sqs3 = self.msqs.stats_facet('foo','bar').stats_facet('moof','baz')
        self.assertTrue(isinstance(sqs3, SearchQuerySet))
        self.assertEqual(len(sqs3.query.stats),2)

    def test_narrow(self):
        sqs = self.msqs.narrow('foo:moof')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.narrow_queries), 1)

    def test_clone(self):
        results = self.msqs.filter(foo='bar', foo__lt='10')

        clone = results._clone()
        self.assertTrue(isinstance(clone, SearchQuerySet))
        self.assertEqual(str(clone.query), str(results.query))
        self.assertEqual(clone._result_cache, [])
        self.assertEqual(clone._result_count, None)
        self.assertEqual(clone._cache_full, False)
        self.assertEqual(clone._using, results._using)

    def test_using(self):
        sqs = SearchQuerySet(using='default')
        self.assertNotEqual(sqs.query, None)
        self.assertEqual(sqs.query._using, 'default')

    def test_chaining(self):
        sqs = self.msqs.filter(content='foo')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

        # A second instance should inherit none of the changes from above.
        sqs = self.msqs.filter(content='bar')
        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 1)

    def test_none(self):
        sqs = self.msqs.none()
        self.assertTrue(isinstance(sqs, EmptySearchQuerySet))
        self.assertEqual(len(sqs), 0)

    def test___and__(self):
        sqs1 = self.msqs.filter(content='foo')
        sqs2 = self.msqs.filter(content='bar')
        sqs = sqs1 & sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test___or__(self):
        sqs1 = self.msqs.filter(content='foo')
        sqs2 = self.msqs.filter(content='bar')
        sqs = sqs1 | sqs2

        self.assertTrue(isinstance(sqs, SearchQuerySet))
        self.assertEqual(len(sqs.query.query_filter), 2)

    def test_and_or(self):
        """
        Combining AND queries with OR should give
            AND(OR(a, b), OR(c, d))
        """
        sqs1 = self.msqs.filter(content='foo').filter(content='oof')
        sqs2 = self.msqs.filter(content='bar').filter(content='rab')
        sqs = sqs1 | sqs2

        self.assertEqual(sqs.query.query_filter.connector, 'OR')
        self.assertEqual(repr(sqs.query.query_filter.children[0]), repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]), repr(sqs2.query.query_filter))

    def test_or_and(self):
        """
        Combining OR queries with AND should give
            OR(AND(a, b), AND(c, d))
        """
        sqs1 = self.msqs.filter(content='foo').filter_or(content='oof')
        sqs2 = self.msqs.filter(content='bar').filter_or(content='rab')
        sqs = sqs1 & sqs2

        self.assertEqual(sqs.query.query_filter.connector, 'AND')
        self.assertEqual(repr(sqs.query.query_filter.children[0]), repr(sqs1.query.query_filter))
        self.assertEqual(repr(sqs.query.query_filter.children[1]), repr(sqs2.query.query_filter))
    def test_handle_delete(self):
        # Because the code here is pretty leaky (abstraction-wise), we'll test
        # the actual setup.
        # First, ensure the signal is setup.
        self.assertEqual(len(models.signals.post_delete.receivers), 1)

        # Second, check the existing search data.
        sqs = SearchQuerySet('solr')
        self.assertEqual(sqs.using('solr').count(), 5)
        self.assertEqual(sqs.using('solr').models(Foo).count(), 2)
        self.assertEqual(sqs.using('solr').models(Bar).count(), 3)
        self.assertEqual(sqs.using('elasticsearch').count(), 2)
        self.assertEqual(sqs.using('elasticsearch').models(Foo).count(), 2)

        self.assertEqual(sqs.using('solr').models(Foo).order_by('django_id')[0].text, 'foo 1')
        self.assertEqual(sqs.using('elasticsearch').models(Foo).order_by('django_id')[0].text, 'foo 1')

        # Third, delete the model, which should fire the signal & remove the
        # record from the index.
        self.foo_1.delete()

        # Fourth, check the search data for the now-removed data, making sure counts
        # have changed correctly.
        sqs = SearchQuerySet('solr')
        self.assertEqual(sqs.using('solr').count(), 4)
        self.assertEqual(sqs.using('solr').models(Foo).count(), 1)
        self.assertEqual(sqs.using('solr').models(Bar).count(), 3)
        self.assertEqual(sqs.using('elasticsearch').count(), 2)
        self.assertEqual(sqs.using('elasticsearch').models(Foo).count(), 2)

        self.assertEqual(sqs.using('solr').models(Foo).order_by('django_id')[0].text, 'foo 2')
        self.assertEqual(sqs.using('elasticsearch').models(Foo).order_by('django_id')[0].text, 'foo 1')
    def test_handle_save(self):
        # Because the code here is pretty leaky (abstraction-wise), we'll test
        # the actual setup.
        # First, ensure the signal is setup.
        self.assertEqual(len(models.signals.post_save.receivers), 1)

        # Second, check the existing search data.
        sqs = SearchQuerySet('solr')
        self.assertEqual(sqs.using('solr').count(), 5)
        self.assertEqual(sqs.using('solr').models(Foo).count(), 2)
        self.assertEqual(sqs.using('solr').models(Bar).count(), 3)
        self.assertEqual(sqs.using('elasticsearch').count(), 2)
        self.assertEqual(sqs.using('elasticsearch').models(Foo).count(), 2)

        self.assertEqual(sqs.using('solr').models(Foo).order_by('django_id')[0].text, 'foo 1')
        self.assertEqual(sqs.using('elasticsearch').models(Foo).order_by('django_id')[0].text, 'foo 1')

        # Third, save the model, which should fire the signal & index the
        # new data.
        self.foo_1.body = 'A different body'
        self.foo_1.save()

        # Fourth, check the search data for the updated data, making sure counts
        # haven't changed.
        sqs = SearchQuerySet('solr')
        self.assertEqual(sqs.using('solr').count(), 5)
        self.assertEqual(sqs.using('solr').models(Foo).count(), 2)
        self.assertEqual(sqs.using('solr').models(Bar).count(), 3)
        self.assertEqual(sqs.using('elasticsearch').count(), 2)
        self.assertEqual(sqs.using('elasticsearch').models(Foo).count(), 2)

        self.assertEqual(sqs.using('solr').models(Foo).order_by('django_id')[0].text, 'A different body')
        self.assertEqual(sqs.using('elasticsearch').models(Foo).order_by('django_id')[0].text, 'foo 1')