def test_iter(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) sqs = self.sqs.all() results = sorted([int(result.pk) for result in sqs]) self.assertEqual(results, list(range(1, 24))) self.assertEqual(len(connections['default'].queries), 3)
def test_log_query(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Stow. old_debug = settings.DEBUG settings.DEBUG = False len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 0) settings.DEBUG = True # Redefine it to clear out the cached results. self.sq = connections['default'].get_query() self.sq.add_filter(SQ(name='bar')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 1) self.assertEqual(str(connections['default'].queries[0]['query_string']), u'Xapian::Query((ZXNAMEbar OR XNAMEbar))') # And again, for good measure. self.sq = connections['default'].get_query() self.sq.add_filter(SQ(name='bar')) self.sq.add_filter(SQ(text='moof')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 2) self.assertEqual(str(connections['default'].queries[0]['query_string']), u'Xapian::Query((ZXNAMEbar OR XNAMEbar))') self.assertEqual(str(connections['default'].queries[1]['query_string']), u'Xapian::Query(((ZXNAMEbar OR XNAMEbar) AND (ZXTEXTmoof OR XTEXTmoof)))') # Restore. settings.DEBUG = old_debug
def test_usage(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertEqual(self.client.login(username='******', password='******'), True) # First, non-search behavior. resp = self.client.get('/admin/core/mockmodel/') self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections['default'].queries), 0) self.assertEqual(resp.context['cl'].full_result_count, 23) # Then search behavior. resp = self.client.get('/admin/core/mockmodel/', data={'q': 'Haystack'}) self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections['default'].queries), 3) self.assertEqual(resp.context['cl'].full_result_count, 23) # Ensure they aren't search results. self.assertEqual(isinstance(resp.context['cl'].result_list[0], MockModel), True) self.assertEqual(resp.context['cl'].result_list[0].id, 17) # Make sure only changelist is affected. resp = self.client.get('/admin/core/mockmodel/1/') self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections['default'].queries), 3) self.assertEqual(resp.context['original'].id, 1)
def test_log_query(self): from django.conf import settings reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Stow. old_debug = settings.DEBUG settings.DEBUG = False len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 0) settings.DEBUG = True # Redefine it to clear out the cached results. self.sq = connections['default'].query() self.sq.add_filter(SQ(name='bar')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 1) self.assertEqual(connections['default'].queries[0]['query_string'], 'name:bar') # And again, for good measure. self.sq = connections['default'].query() self.sq.add_filter(SQ(name='bar')) self.sq.add_filter(SQ(text='moof')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 2) self.assertEqual(connections['default'].queries[0]['query_string'], 'name:bar') self.assertEqual(connections['default'].queries[1]['query_string'], u'(name:bar AND text:moof)') # Restore. settings.DEBUG = old_debug
def test_usage(self): reset_search_queries() self.assertEqual(len(connections["solr"].queries), 0) self.assertEqual( self.client.login(username="******", password="******"), True ) # First, non-search behavior. resp = self.client.get("/admin/core/mockmodel/") self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections["solr"].queries), 0) self.assertEqual(resp.context["cl"].full_result_count, 23) # Then search behavior. resp = self.client.get("/admin/core/mockmodel/", data={"q": "Haystack"}) self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections["solr"].queries), 3) self.assertEqual(resp.context["cl"].full_result_count, 23) # Ensure they aren't search results. self.assertEqual(isinstance(resp.context["cl"].result_list[0], MockModel), True) result_pks = [i.pk for i in resp.context["cl"].result_list] self.assertIn(5, result_pks) # Make sure only changelist is affected. resp = self.client.get(reverse("admin:core_mockmodel_change", args=(1,))) self.assertEqual(resp.status_code, 200) self.assertEqual(resp.context["original"].id, 1) self.assertTemplateUsed(resp, "admin/change_form.html") # The Solr query count should be unchanged: self.assertEqual(len(connections["solr"].queries), 3)
def test_related_iter(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) sqs = self.rsqs.all() results = [int(result.pk) for result in sqs] self.assertEqual(results, [2, 7, 12, 17, 1, 6, 11, 16, 23, 5, 10, 15, 22, 4, 9, 14, 19, 21, 3, 8, 13, 18, 20]) self.assertEqual(len(connections['default'].queries), 4)
def test_related_iter(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) sqs = self.rsqs.all() results = [int(result.pk) for result in sqs] self.assertEqual(results, range(1, 24)) self.assertEqual(len(connections['default'].queries), 4)
def test_log_query(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Stow. old_debug = settings.DEBUG settings.DEBUG = False len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 0) settings.DEBUG = True # Redefine it to clear out the cached results. self.sq = connections['default'].get_query() self.sq.add_filter(SQ(name='bar')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 1) self.assertExpectedQuery(connections['default'].queries[0]['query_string'], '(XNAME^ PHRASE 3 XNAMEbar PHRASE 3 XNAME$)') # And again, for good measure. self.sq = connections['default'].get_query() self.sq.add_filter(SQ(name='bar')) self.sq.add_filter(SQ(text='moof')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 2) self.assertExpectedQuery(connections['default'].queries[0]['query_string'], '(XNAME^ PHRASE 3 XNAMEbar PHRASE 3 XNAME$)') self.assertExpectedQuery(connections['default'].queries[1]['query_string'], '((XNAME^ PHRASE 3 XNAMEbar PHRASE 3 XNAME$) AND' ' (XTEXT^ PHRASE 3 XTEXTmoof PHRASE 3 XTEXT$))') # Restore. settings.DEBUG = old_debug
def test_usage(self): reset_search_queries() self.assertEqual(len(connections['solr'].queries), 0) self.assertEqual(self.client.login(username='******', password='******'), True) # First, non-search behavior. resp = self.client.get('/admin/core/mockmodel/') self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections['solr'].queries), 0) self.assertEqual(resp.context['cl'].full_result_count, 23) # Then search behavior. resp = self.client.get('/admin/core/mockmodel/', data={'q': 'Haystack'}) self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections['solr'].queries), 3) self.assertEqual(resp.context['cl'].full_result_count, 23) # Ensure they aren't search results. self.assertEqual(isinstance(resp.context['cl'].result_list[0], MockModel), True) result_pks = [i.pk for i in resp.context['cl'].result_list] self.assertIn(5, result_pks) # Make sure only changelist is affected. resp = self.client.get(reverse('admin:core_mockmodel_change', args=(1, ))) self.assertEqual(resp.status_code, 200) self.assertEqual(resp.context['original'].id, 1) self.assertTemplateUsed(resp, 'admin/change_form.html') # The Solr query count should be unchanged: self.assertEqual(len(connections['solr'].queries), 3)
def test_log_query(self): from django.conf import settings reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) # Stow. with self.settings(DEBUG=False): len(self.sq.get_results()) self.assertEqual(len(connections['whoosh'].queries), 0) with self.settings(DEBUG=True): # Redefine it to clear out the cached results. self.sq = connections['whoosh'].get_query() self.sq.add_filter(SQ(name='bar')) len(self.sq.get_results()) self.assertEqual(len(connections['whoosh'].queries), 1) self.assertEqual(connections['whoosh'].queries[0]['query_string'], 'name:(bar)') # And again, for good measure. self.sq = connections['whoosh'].get_query() self.sq.add_filter(SQ(name='baz')) self.sq.add_filter(SQ(text='foo')) len(self.sq.get_results()) self.assertEqual(len(connections['whoosh'].queries), 2) self.assertEqual(connections['whoosh'].queries[0]['query_string'], 'name:(bar)') self.assertEqual(connections['whoosh'].queries[1]['query_string'], u'(name:(baz) AND text:(foo))')
def test_usage(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) self.assertEqual(self.client.login(username="******", password="******"), True) # First, non-search behavior. resp = self.client.get("/admin/core/mockmodel/") self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections["default"].queries), 0) self.assertEqual(resp.context["cl"].full_result_count, 23) # Then search behavior. resp = self.client.get("/admin/core/mockmodel/", data={"q": "Haystack"}) self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections["default"].queries), 3) self.assertEqual(resp.context["cl"].full_result_count, 23) # Ensure they aren't search results. self.assertEqual(isinstance(resp.context["cl"].result_list[0], MockModel), True) self.assertEqual(resp.context["cl"].result_list[0].id, 5) # Make sure only changelist is affected. resp = self.client.get("/admin/core/mockmodel/1/") self.assertEqual(resp.status_code, 200) self.assertEqual(len(connections["default"].queries), 3) self.assertEqual(resp.context["original"].id, 1)
def test_manual_iter(self): results = self.msqs.all() reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) check = [result.pk for result in results._manual_iter()] self.assertEqual(check, [u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20', u'21', u'22', u'23']) self.assertEqual(len(connections['default'].queries), 3) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Test to ensure we properly fill the cache, even if we get fewer # results back (not a handled model) than the hit count indicates. # This will hang indefinitely if broken. old_ui = self.ui self.ui.build(indexes=[self.cpkmmsi]) connections['default']._index = self.ui self.cpkmmsi.update() results = self.msqs.all() loaded = [result.pk for result in results._manual_iter()] self.assertEqual(loaded, [u'sometext', u'1234']) self.assertEqual(len(connections['default'].queries), 1) connections['default']._index = old_ui
def test_repr(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) self.assertRegexpMatches( repr(self.msqs), r"^<SearchQuerySet: query=<test_haystack.mocks.MockSearchQuery object" r" at 0x[0-9A-Fa-f]+>, using=None>$", )
def test_iter(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) msqs = self.msqs.all() results = [int(res.pk) for res in iter(msqs)] self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]]) self.assertEqual(len(connections["default"].queries), 3)
def test_repr(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) self.assertEqual( repr(self.msqs).replace("u'", "'"), "[<SearchResult: core.mockmodel (pk='1')>, <SearchResult: core.mockmodel (pk='2')>, <SearchResult: core.mockmodel (pk='3')>, <SearchResult: core.mockmodel (pk='4')>, <SearchResult: core.mockmodel (pk='5')>, <SearchResult: core.mockmodel (pk='6')>, <SearchResult: core.mockmodel (pk='7')>, <SearchResult: core.mockmodel (pk='8')>, <SearchResult: core.mockmodel (pk='9')>, <SearchResult: core.mockmodel (pk='10')>, <SearchResult: core.mockmodel (pk='11')>, <SearchResult: core.mockmodel (pk='12')>, <SearchResult: core.mockmodel (pk='13')>, <SearchResult: core.mockmodel (pk='14')>, <SearchResult: core.mockmodel (pk='15')>, <SearchResult: core.mockmodel (pk='16')>, <SearchResult: core.mockmodel (pk='17')>, <SearchResult: core.mockmodel (pk='18')>, <SearchResult: core.mockmodel (pk='19')>, '...(remaining elements truncated)...']", ) self.assertEqual(len(connections["default"].queries), 1)
def test_manual_iter(self): results = self.sqs.all() reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = [int(result.pk) for result in results._manual_iter()] self.assertEqual(results, range(1, 24)) self.assertEqual(len(connections['default'].queries), 3)
def test_cache_is_full(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertEqual(self.msqs._cache_is_full(), False) results = self.msqs.all() fire_the_iterator_and_fill_cache = [result for result in results] self.assertEqual(results._cache_is_full(), True) self.assertEqual(len(connections['default'].queries), 3)
def test_related_manual_iter(self): results = self.rsqs.all() reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = sorted([int(result.pk) for result in results._manual_iter()]) self.assertEqual(results, list(range(1, 24))) self.assertEqual(len(connections['default'].queries), 4)
def test_cache_is_full(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) self.assertEqual(self.msqs._cache_is_full(), False) results = self.msqs.all() fire_the_iterator_and_fill_cache = list(results) self.assertEqual(23, len(fire_the_iterator_and_fill_cache)) self.assertEqual(results._cache_is_full(), True) self.assertEqual(len(connections["default"].queries), 4)
def test_manual_iter(self): self.sb.update(self.wmmi, self.sample_objs) results = self.sqs.auto_query('Indexed!') reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) results = [int(result.pk) for result in results._manual_iter()] self.assertEqual(sorted(results), [1, 2, 3]) self.assertEqual(len(connections['whoosh'].queries), 1)
def test_cache_is_full(self): self.sb.update(self.wmmi, self.sample_objs) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertEqual(self.sqs._cache_is_full(), False) results = self.sqs.auto_query('Indexed!') fire_the_iterator_and_fill_cache = [result for result in results] self.assertEqual(results._cache_is_full(), True) self.assertEqual(len(connections['default'].queries), 1)
def test_cache_is_full(self): self.sb.update(self.wmmi, self.sample_objs) reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) self.assertEqual(self.sqs._cache_is_full(), False) results = self.sqs.auto_query('Indexed!') [result for result in results] self.assertEqual(results._cache_is_full(), True) self.assertEqual(len(connections['whoosh'].queries), 1)
def test_count(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) sqs = self.sqs.all() self.assertEqual(sqs.count(), 23) self.assertEqual(sqs.count(), 23) self.assertEqual(len(sqs), 23) self.assertEqual(sqs.count(), 23) # Should only execute one query to count the length of the result set. self.assertEqual(len(connections['default'].queries), 1)
def test_manual_iter(self): results = self.msqs.all() reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) check = [result.pk for result in results._manual_iter()] self.assertEqual( check, [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", ], ) self.assertEqual(len(connections["default"].queries), 3) reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) # Test to ensure we properly fill the cache, even if we get fewer # results back (not a handled model) than the hit count indicates. # This will hang indefinitely if broken. old_ui = self.ui self.ui.build(indexes=[self.cpkmmsi]) connections["default"]._index = self.ui self.cpkmmsi.update() results = self.msqs.all() loaded = [result.pk for result in results._manual_iter()] self.assertEqual(loaded, ["sometext", "1234"]) self.assertEqual(len(connections["default"].queries), 1) connections["default"]._index = old_ui
def test_slice(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.sqs.all() self.assertEqual([int(result.pk) for result in results[1:11]], [7, 12, 17, 1, 6, 11, 16, 23, 5, 10]) self.assertEqual(len(connections['default'].queries), 1) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.sqs.all() self.assertEqual(int(results[21].pk), 18) self.assertEqual(len(connections['default'].queries), 1)
def test_slice(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) results = self.msqs.all() self.assertEqual([int(res.pk) for res in results[1:11]], [res.pk for res in MOCK_SEARCH_RESULTS[1:11]]) self.assertEqual(len(connections["default"].queries), 1) reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) results = self.msqs.all() self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk) self.assertEqual(len(connections["default"].queries), 1)
def test_slice(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.sqs.all() self.assertEqual([int(result.pk) for result in results[1:11]], [2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) self.assertEqual(len(connections['default'].queries), 1) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.sqs.all() self.assertEqual(int(results[21].pk), 22) self.assertEqual(len(connections['default'].queries), 1)
def test_related_fill_cache(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual(len(results._result_cache), 0) self.assertEqual(len(connections['default'].queries), 0) results._fill_cache(0, 10) self.assertEqual(len([result for result in results._result_cache if result is not None]), 10) self.assertEqual(len(connections['default'].queries), 1) results._fill_cache(10, 20) self.assertEqual(len([result for result in results._result_cache if result is not None]), 20) self.assertEqual(len(connections['default'].queries), 2)
def test_fill_cache(self): self.sb.update(self.wmmi, self.sample_objs) reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) results = self.sqs.auto_query('Indexed!') self.assertEqual(len(results._result_cache), 0) self.assertEqual(len(connections['whoosh'].queries), 0) results._fill_cache(0, 10) self.assertEqual(len([result for result in results._result_cache if result is not None]), 3) self.assertEqual(len(connections['whoosh'].queries), 1) results._fill_cache(10, 20) self.assertEqual(len([result for result in results._result_cache if result is not None]), 3) self.assertEqual(len(connections['whoosh'].queries), 2)
def test_slice(self): self.sb.update(self.wmmi, self.sample_objs) reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) results = self.sqs.auto_query('Indexed!') self.assertEqual(sorted([int(result.pk) for result in results[1:3]]), [1, 2]) self.assertEqual(len(connections['whoosh'].queries), 1) reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) results = self.sqs.auto_query('Indexed!') self.assertEqual(int(results[0].pk), 1) self.assertEqual(len(connections['whoosh'].queries), 1)
def test_count(self): more_samples = [] for i in range(1, 50): mock = MockModel() mock.id = i mock.author = 'daniel%s' % i mock.pub_date = date(2009, 2, 25) - timedelta(days=i) more_samples.append(mock) self.sb.update(self.wmmi, more_samples) reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) results = self.sqs.all() self.assertEqual(len(results), 49) self.assertEqual(results._cache_is_full(), False) self.assertEqual(len(connections['whoosh'].queries), 1)
def test_fill_cache(self): self.sb.update(self.wmmi, self.sample_objs) reset_search_queries() self.assertEqual(len(connections["whoosh"].queries), 0) results = self.sqs.auto_query("Indexed!") self.assertEqual(len(results._result_cache), 0) self.assertEqual(len(connections["whoosh"].queries), 0) results._fill_cache(0, 10) self.assertEqual( len([result for result in results._result_cache if result is not None]), 3 ) self.assertEqual(len(connections["whoosh"].queries), 1) results._fill_cache(10, 20) self.assertEqual( len([result for result in results._result_cache if result is not None]), 3 ) self.assertEqual(len(connections["whoosh"].queries), 2)
def setUp(self): super(PickleSearchQuerySetTestCase, self).setUp() # Stow. self.old_unified_index = connections['default']._index self.ui = UnifiedIndex() self.bmmsi = BasicMockModelSearchIndex() self.cpkmmsi = CharPKMockModelSearchIndex() self.ui.build(indexes=[self.bmmsi, self.cpkmmsi]) connections['default']._index = self.ui # Update the "index". backend = connections['default'].get_backend() backend.clear() backend.update(self.bmmsi, MockModel.objects.all()) self.msqs = SearchQuerySet() # Stow. reset_search_queries()
def test_related_fill_cache(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual(len(results._result_cache), 0) self.assertEqual(len(connections['default'].queries), 0) results._fill_cache(0, 10) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 10) self.assertEqual(len(connections['default'].queries), 1) results._fill_cache(10, 20) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 20) self.assertEqual(len(connections['default'].queries), 2)
def test_log_query(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Stow. self.old_unified_index = connections['default']._index self.ui = UnifiedIndex() self.bmmsi = BasicMockModelSearchIndex() self.ui.build(indexes=[self.bmmsi]) connections['default']._index = self.ui # Update the "index". backend = connections['default'].get_backend() backend.clear() self.bmmsi.update() with self.settings(DEBUG=False): msq = connections['default'].get_query() self.assertEqual(len(msq.get_results()), 23) self.assertEqual(len(connections['default'].queries), 0) with self.settings(DEBUG=True): # Redefine it to clear out the cached results. msq2 = connections['default'].get_query() self.assertEqual(len(msq2.get_results()), 23) self.assertEqual(len(connections['default'].queries), 1) self.assertEqual(connections['default'].queries[0]['query_string'], '') msq3 = connections['default'].get_query() msq3.add_filter(SQ(foo='bar')) len(msq3.get_results()) self.assertEqual(len(connections['default'].queries), 2) self.assertEqual(connections['default'].queries[0]['query_string'], '') self.assertEqual(connections['default'].queries[1]['query_string'], '') # Restore. connections['default']._index = self.old_unified_index
def test_log_query(self): reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) # Stow. old_debug = settings.DEBUG settings.DEBUG = False len(self.sq.get_results()) self.assertEqual(len(connections["default"].queries), 0) settings.DEBUG = True # Redefine it to clear out the cached results. self.sq = connections["default"].get_query() self.sq.add_filter(SQ(name="bar")) len(self.sq.get_results()) self.assertEqual(len(connections["default"].queries), 1) self.assertExpectedQuery( connections["default"].queries[0]["query_string"], "(XNAME^ PHRASE 3 XNAMEbar PHRASE 3 XNAME$)", ) # And again, for good measure. self.sq = connections["default"].get_query() self.sq.add_filter(SQ(name="bar")) self.sq.add_filter(SQ(text="moof")) len(self.sq.get_results()) self.assertEqual(len(connections["default"].queries), 2) self.assertExpectedQuery( connections["default"].queries[0]["query_string"], "(XNAME^ PHRASE 3 XNAMEbar PHRASE 3 XNAME$)", ) self.assertExpectedQuery( connections["default"].queries[1]["query_string"], "((XNAME^ PHRASE 3 XNAMEbar PHRASE 3 XNAME$) AND" " (XTEXT^ PHRASE 3 XTEXTmoof PHRASE 3 XTEXT$))", ) # Restore. settings.DEBUG = old_debug
def test_log_query(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Stow. old_debug = settings.DEBUG settings.DEBUG = False len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 0) settings.DEBUG = True # Redefine it to clear out the cached results. self.sq = connections['default'].get_query() self.sq.add_filter(SQ(name='bar')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 1) self.assertEqual( str(connections['default'].queries[0]['query_string']), 'Xapian::Query((ZXNAMEbar OR XNAMEbar))') # And again, for good measure. self.sq = connections['default'].get_query() self.sq.add_filter(SQ(name='bar')) self.sq.add_filter(SQ(text='moof')) len(self.sq.get_results()) self.assertEqual(len(connections['default'].queries), 2) self.assertEqual( str(connections['default'].queries[0]['query_string']), 'Xapian::Query((' 'ZXNAMEbar OR ' 'XNAMEbar))') self.assertEqual( str(connections['default'].queries[1]['query_string']), 'Xapian::Query((' '(ZXNAMEbar OR XNAMEbar) AND ' '(ZXTEXTmoof OR XTEXTmoof)))') # Restore. settings.DEBUG = old_debug
def test_values_slicing(self): self.sb.update(self.wmmi, self.sample_objs) reset_search_queries() self.assertEqual(len(connections['whoosh'].queries), 0) # TODO: this would be a good candidate for refactoring into a TestCase subclass shared across backends # The values will come back as strings because Hasytack doesn't assume PKs are integers. # We'll prepare this set once since we're going to query the same results in multiple ways: expected_pks = ['3', '2', '1'] results = self.sqs.all().order_by('pub_date').values('pk') self.assertListEqual([i['pk'] for i in results[1:11]], expected_pks) results = self.sqs.all().order_by('pub_date').values_list('pk') self.assertListEqual([i[0] for i in results[1:11]], expected_pks) results = self.sqs.all().order_by('pub_date').values_list('pk', flat=True) self.assertListEqual(results[1:11], expected_pks) self.assertEqual(len(connections['whoosh'].queries), 3)
def test_fill_cache(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.msqs.all() self.assertEqual(len(results._result_cache), 0) self.assertEqual(len(connections['default'].queries), 0) results._fill_cache(0, 10) self.assertEqual(len([result for result in results._result_cache if result is not None]), 10) self.assertEqual(len(connections['default'].queries), 1) results._fill_cache(10, 20) self.assertEqual(len([result for result in results._result_cache if result is not None]), 20) self.assertEqual(len(connections['default'].queries), 2) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Test to ensure we properly fill the cache, even if we get fewer # results back (not a handled model) than the hit count indicates. sqs = SearchQuerySet().all() sqs.query.backend = MixedMockSearchBackend('default') results = sqs self.assertEqual(len([result for result in results._result_cache if result is not None]), 0) self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], []) self.assertEqual(len(connections['default'].queries), 0) results._fill_cache(0, 10) self.assertEqual(len([result for result in results._result_cache if result is not None]), 9) self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10]) self.assertEqual(len(connections['default'].queries), 2) results._fill_cache(10, 20) self.assertEqual(len([result for result in results._result_cache if result is not None]), 17) self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20]) self.assertEqual(len(connections['default'].queries), 4) results._fill_cache(20, 30) self.assertEqual(len([result for result in results._result_cache if result is not None]), 20) self.assertEqual([int(result.pk) for result in results._result_cache if result is not None], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22, 23]) self.assertEqual(len(connections['default'].queries), 6)
def test_related_slice(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual([int(result.pk) for result in results[1:11]], [2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) self.assertEqual(len(connections['default'].queries), 3) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual(int(results[21].pk), 22) self.assertEqual(len(connections['default'].queries), 4) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual([int(result.pk) for result in results[20:30]], [21, 22, 23]) self.assertEqual(len(connections['default'].queries), 4)
def test_related_slice(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual([int(result.pk) for result in results[1:11]], [7, 12, 17, 1, 6, 11, 16, 23, 5, 10]) self.assertEqual(len(connections['default'].queries), 3) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual(int(results[21].pk), 18) self.assertEqual(len(connections['default'].queries), 4) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.rsqs.all() self.assertEqual([int(result.pk) for result in results[20:30]], [13, 18, 20]) self.assertEqual(len(connections['default'].queries), 4)
def test_manual_iter(self): results = self.msqs.all() reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) check = [result.pk for result in results._manual_iter()] self.assertEqual( check, [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", ], ) self.assertEqual(len(connections["default"].queries), 3) reset_search_queries() self.assertEqual(len(connections["default"].queries), 0) # Test to ensure we properly fill the cache, even if we get fewer # results back (not a handled model) than the hit count indicates. # This will hang indefinitely if broken. # CharPK testing old_ui = self.ui self.ui.build(indexes=[self.cpkmmsi]) connections["default"]._index = self.ui self.cpkmmsi.update() results = self.msqs.all() loaded = [result.pk for result in results._manual_iter()] self.assertEqual(loaded, ["sometext", "1234"]) self.assertEqual(len(connections["default"].queries), 1) # UUID testing self.ui.build(indexes=[self.uuidmmsi]) connections["default"]._index = self.ui self.uuidmmsi.update() results = self.msqs.all() loaded = [result.pk for result in results._manual_iter()] self.assertEqual( loaded, [ "53554c58-7051-4350-bcc9-dad75eb248a9", "77554c58-7051-4350-bcc9-dad75eb24888", ], ) connections["default"]._index = old_ui
def test_repr(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertEqual(repr(self.msqs), "[<SearchResult: core.mockmodel (pk=u'1')>, <SearchResult: core.mockmodel (pk=u'2')>, <SearchResult: core.mockmodel (pk=u'3')>, <SearchResult: core.mockmodel (pk=u'4')>, <SearchResult: core.mockmodel (pk=u'5')>, <SearchResult: core.mockmodel (pk=u'6')>, <SearchResult: core.mockmodel (pk=u'7')>, <SearchResult: core.mockmodel (pk=u'8')>, <SearchResult: core.mockmodel (pk=u'9')>, <SearchResult: core.mockmodel (pk=u'10')>, <SearchResult: core.mockmodel (pk=u'11')>, <SearchResult: core.mockmodel (pk=u'12')>, <SearchResult: core.mockmodel (pk=u'13')>, <SearchResult: core.mockmodel (pk=u'14')>, <SearchResult: core.mockmodel (pk=u'15')>, <SearchResult: core.mockmodel (pk=u'16')>, <SearchResult: core.mockmodel (pk=u'17')>, <SearchResult: core.mockmodel (pk=u'18')>, <SearchResult: core.mockmodel (pk=u'19')>, '...(remaining elements truncated)...']") self.assertEqual(len(connections['default'].queries), 1)
def test_repr(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertRegexpMatches(repr(self.msqs), r'^<SearchQuerySet: query=<test_haystack.mocks.MockSearchQuery object' r' at 0x[0-9A-Fa-f]+>, using=None>$')