def test_cjk_separation(self): q = text_query.TextQuery(u'\u4f59\u5609\u5e73') assert [u'\u4f59', u'\u5609', u'\u5e73'] == q.words assert q.words == q.query_words q = text_query.TextQuery(u'foo\u4f59\u5609bar\u5e73') assert q.words == ['FOO', u'\u4f59', u'\u5609', 'BAR', u'\u5e73'] assert q.words == q.query_words
def test_search_retry_time_out(self): good_response = MockUrlFetchResponse(200, { 'name_entries': [{ 'person_record_id': 'test/1' }], 'all_entries': [], }) deactivation_message_html = 'de<i>acti</i>vated' bad_response = MockUrlFetchResponse(500, '') urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\ .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\ .AndReturn(bad_response) urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.8))\ .WithSideEffects(lambda url, deadline: self.advance_seconds(0.75))\ .AndRaise(urlfetch_errors.Error('bad')) self.mox.ReplayAll() results = external_search.search( 'japan', text_query.TextQuery('mori'), 100, [ 'http://backend1/?q=%s', 'http://backend2/?q=%s', 'http://backend3/?q=%s' ]) self.assertEquals(None, results) self.assertEquals([ 'Bad status code: 500', 'Failed to fetch: bad', 'Fetch retry timed out.' ], self.mock_logging_handler.messages['info']) self.mox.VerifyAll()
def test_search_recover_from_fetch_failure(self): good_response = MockUrlFetchResponse(200, { 'name_entries': [{ 'person_record_id': 'test/1' }], 'all_entries': [], }) urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\ .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\ .AndRaise(urlfetch_errors.Error('bad')) urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.8))\ .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\ .AndRaise(urlfetch_errors.Error('bad')) urlfetch.fetch('http://backend3/?q=mori', deadline=IsSeconds(0.6))\ .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\ .AndReturn(good_response) self.mox.ReplayAll() results = external_search.search( 'japan', text_query.TextQuery('mori'), 100, [ 'http://backend1/?q=%s', 'http://backend2/?q=%s', 'http://backend3/?q=%s' ]) self.assertEquals(1, len(results)) self.assertEquals('test/1', results[0].record_id) self.assertEquals(['Failed to fetch: bad', 'Failed to fetch: bad'], self.mock_logging_handler.messages['info']) self.mox.VerifyAll()
def test_search_remove_non_name_matches(self): response = MockUrlFetchResponse( 200, { 'name_entries': [], 'all_entries': [ { 'person_record_id': 'test/1' }, { 'person_record_id': 'test/2' }, { 'person_record_id': 'test/3' }, { 'person_record_id': 'test/4' }, { 'person_record_id': 'test/5' }, ], }) urlfetch.fetch('http://backend/?q=mori', deadline=IsSeconds(0.9)).AndReturn(response) self.mox.ReplayAll() results = external_search.search('japan', text_query.TextQuery('mori'), 100, ['http://backend/?q=%s']) self.assertEquals(3, len(results)) self.assertEquals('test/1', results[0].record_id) self.assertEquals('test/3', results[1].record_id) self.assertEquals('test/4', results[2].record_id) self.assertTrue(results[0].is_address_match) self.assertTrue(results[1].is_address_match) self.assertTrue(results[2].is_address_match) self.mox.VerifyAll()
def test_search_broken_content(self): response = MockUrlFetchResponse(200, '') response.content = 'broken' urlfetch.fetch('http://backend/?q=mori', deadline=IsSeconds(0.9)).AndReturn(response) self.mox.ReplayAll() results = external_search.search('japan', text_query.TextQuery('mori'), 100, ['http://backend/?q=%s']) self.assertEquals(None, results) self.assertEquals(['Fetched content is broken.'], self.mock_logging_handler.messages['warning']) self.mox.VerifyAll()
def __init__(self, repo, record_id, given_name, family_name, is_expired=False): self.repo = repo self.record_id = record_id self.key_name = '%s:%s' % (repo, record_id) self.given_name = given_name self.family_name = family_name self.full_name = self.primary_full_name = '' self.alternate_names = '' self.names_prefixes = text_query.TextQuery( '%s %s' % (given_name, family_name)).query_words self.is_expired = is_expired
def test_search_shuffle_backends(self): bad_response = MockUrlFetchResponse(500, '') urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\ .InAnyOrder().AndReturn(bad_response) urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.9))\ .InAnyOrder().AndReturn(bad_response) urlfetch.fetch('http://backend3/?q=mori', deadline=IsSeconds(0.9))\ .InAnyOrder().AndReturn(bad_response) self.mox.ReplayAll() results = external_search.search( 'japan', text_query.TextQuery('mori'), 100, [ 'http://backend1/?q=%s', 'http://backend2/?q=%s', 'http://backend3/?q=%s' ]) self.assertEquals(None, results) self.mox.VerifyAll()
def test_parsing(self): q = text_query.TextQuery('abcd e fghij') assert ['ABCD', 'E', 'FGHIJ'] == q.words assert q.words == q.query_words