def search(self, query_txt, role=None):
        """Performs a search and adds view_url attributes to the results."""
        results = None
        if self.config.external_search_backends:
            results = external_search.search(
                self.repo, TextQuery(query_txt), MAX_RESULTS,
                self.config.external_search_backends)

        # External search backends are not always complete. Fall back to the
        # original search when they fail or return no results.
        if not results:
            if config.get('enable_fulltext_search'):
                results = full_text_search.search(self.repo,
                                                  query_txt, MAX_RESULTS)
            else:
                results = indexing.search(self.repo,
                                          TextQuery(query_txt), MAX_RESULTS, role=self.params.role)

        for result in results:
            result.view_url = self.get_url('/view',
                                           id=result.record_id,
                                           role=self.params.role,
                                           query=self.params.query,
                                           given_name=self.params.given_name,
                                           family_name=self.params.family_name)
            result.latest_note_status = get_person_status_text(result)
            if result.is_clone():
                result.provider_name = result.get_original_domain()
            sanitize_urls(result)
        return results
Beispiel #2
0
    def search(self, query_txt):
        """Performs a search and adds view_url attributes to the results."""
        results = None
        if self.config.external_search_backends:
            results = external_search.search(
                self.repo, TextQuery(query_txt), MAX_RESULTS,
                self.config.external_search_backends)

        # External search backends are not always complete. Fall back to the
        # original search when they fail or return no results.
        if not results:
            if config.get('enable_fulltext_search'):
                results = full_text_search.search(self.repo, query_txt,
                                                  MAX_RESULTS)
            else:
                results = indexing.search(self.repo, TextQuery(query_txt),
                                          MAX_RESULTS)

        for result in results:
            result.view_url = self.get_url('/view',
                                           id=result.record_id,
                                           role=self.params.role,
                                           query=self.params.query,
                                           given_name=self.params.given_name,
                                           family_name=self.params.family_name)
            result.latest_note_status = get_person_status_text(result)
            if result.is_clone():
                result.provider_name = result.get_original_domain()
            result.should_show_inline_photo = (self.should_show_inline_photo(
                result.photo_url))
            sanitize_urls(result)
        return results
 def test_search_recover_from_bad_response(self):
     good_response = MockUrlFetchResponse(200, {
         'name_entries': [{'person_record_id': 'test/1'}],
         'all_entries': [],
     })
     bad_response = MockUrlFetchResponse(500, '')
     urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndReturn(bad_response)
     urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.8))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndReturn(bad_response)
     urlfetch.fetch('http://backend3/?q=mori', deadline=IsSeconds(0.6))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndReturn(good_response)
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100,
         ['http://backend1/?q=%s', 'http://backend2/?q=%s',
          'http://backend3/?q=%s'])
     self.assertEquals(1, len(results))
     self.assertEquals('test/1', results[0].record_id)
     self.assertEquals(['Bad status code: 500', 'Bad status code: 500'],
                       self.mock_logging_handler.messages['info'])
     self.mox.VerifyAll()
 def test_search_remove_non_name_matches(self):
     response = MockUrlFetchResponse(200, {
         'name_entries': [],
         'all_entries': [
             {'person_record_id': 'test/1'},
             {'person_record_id': 'test/2'},
             {'person_record_id': 'test/3'},
             {'person_record_id': 'test/4'},
             {'person_record_id': 'test/5'},
         ],
     })
     urlfetch.fetch('http://backend/?q=mori',
                    deadline=IsSeconds(0.9)).AndReturn(response)
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100,
         ['http://backend/?q=%s'])
     self.assertEquals(3, len(results))
     self.assertEquals('test/1', results[0].record_id)
     self.assertEquals('test/3', results[1].record_id)
     self.assertEquals('test/4', results[2].record_id)
     self.assertTrue(results[0].is_address_match)
     self.assertTrue(results[1].is_address_match)
     self.assertTrue(results[2].is_address_match)
     self.mox.VerifyAll()
Beispiel #5
0
 def test_search_retry_time_out(self):
     good_response = MockUrlFetchResponse(200, {
         'name_entries': [{
             'person_record_id': 'test/1'
         }],
         'all_entries': [],
     })
     deactivation_message_html = 'de<i>acti</i>vated'
     bad_response = MockUrlFetchResponse(500, '')
     urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndReturn(bad_response)
     urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.8))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.75))\
         .AndRaise(urlfetch_errors.Error('bad'))
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100, [
             'http://backend1/?q=%s', 'http://backend2/?q=%s',
             'http://backend3/?q=%s'
         ])
     self.assertEquals(None, results)
     self.assertEquals([
         'Bad status code: 500', 'Failed to fetch: bad',
         'Fetch retry timed out.'
     ], self.mock_logging_handler.messages['info'])
     self.mox.VerifyAll()
Beispiel #6
0
    def search(self, query_name, query_location=None):
        """Get results for a query.

        Args:
          query_name: A name to query for (string).
          query_location: A location to query for (optional, string).
        """
        text_query = TextQuery(
            '%s %s' %
            (query_name, query_location) if query_location else query_name)
        results = None
        if self._external_search_backends:
            results = external_search.search(self._repo, text_query,
                                             self._max_results,
                                             self._external_search_backends)
        # External search backends are not always complete. Fall back to the
        # original search when they fail or return no results.
        if not results:
            query_dict = {'name': query_name}
            if query_location:
                query_dict['location'] = query_location
            if self._enable_fulltext_search:
                results = full_text_search.search(self._repo, query_dict,
                                                  self._max_results)
            else:
                results = indexing.search(self._repo, text_query,
                                          self._max_results)
        return results
Beispiel #7
0
 def test_search_recover_from_fetch_failure(self):
     good_response = MockUrlFetchResponse(200, {
         'name_entries': [{
             'person_record_id': 'test/1'
         }],
         'all_entries': [],
     })
     urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndRaise(urlfetch_errors.Error('bad'))
     urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.8))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndRaise(urlfetch_errors.Error('bad'))
     urlfetch.fetch('http://backend3/?q=mori', deadline=IsSeconds(0.6))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndReturn(good_response)
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100, [
             'http://backend1/?q=%s', 'http://backend2/?q=%s',
             'http://backend3/?q=%s'
         ])
     self.assertEquals(1, len(results))
     self.assertEquals('test/1', results[0].record_id)
     self.assertEquals(['Failed to fetch: bad', 'Failed to fetch: bad'],
                       self.mock_logging_handler.messages['info'])
     self.mox.VerifyAll()
Beispiel #8
0
 def test_search_remove_non_name_matches(self):
     response = MockUrlFetchResponse(
         200, {
             'name_entries': [],
             'all_entries': [
                 {
                     'person_record_id': 'test/1'
                 },
                 {
                     'person_record_id': 'test/2'
                 },
                 {
                     'person_record_id': 'test/3'
                 },
                 {
                     'person_record_id': 'test/4'
                 },
                 {
                     'person_record_id': 'test/5'
                 },
             ],
         })
     urlfetch.fetch('http://backend/?q=mori',
                    deadline=IsSeconds(0.9)).AndReturn(response)
     self.mox.ReplayAll()
     results = external_search.search('japan', text_query.TextQuery('mori'),
                                      100, ['http://backend/?q=%s'])
     self.assertEquals(3, len(results))
     self.assertEquals('test/1', results[0].record_id)
     self.assertEquals('test/3', results[1].record_id)
     self.assertEquals('test/4', results[2].record_id)
     self.assertTrue(results[0].is_address_match)
     self.assertTrue(results[1].is_address_match)
     self.assertTrue(results[2].is_address_match)
     self.mox.VerifyAll()
Beispiel #9
0
    def get(self):
        if self.config.search_auth_key_required and not (
                self.auth and self.auth.search_permission):
            self.info(403,
                      message='Missing or invalid authorization key',
                      style='plain')
            return

        pfif_version = self.params.version

        # Retrieve parameters and do some sanity checks on them.
        record_id = self.request.get('id')
        query_string = self.request.get('q')
        max_results = min(self.params.max_results or 100, HARD_MAX_RESULTS)

        results = []
        if record_id:
            # Search by record ID (always returns just 1 result or nothing).
            person = model.Person.get(self.repo, record_id)
            if person:
                results = [person]
        elif query_string:
            # Search by query words.
            if self.config.external_search_backends:
                query = TextQuery(query_string)
                results = external_search.search(
                    self.repo, query, max_results,
                    self.config.external_search_backends)
            # External search backends are not always complete. Fall back to
            # the original search when they fail or return no results.
            if not results:
                if config.get('enable_fulltext_search'):
                    results = full_text_search.search(self.repo, query_string,
                                                      max_results)
                else:
                    results = indexing.search(self.repo,
                                              TextQuery(query_string),
                                              max_results)
        else:
            self.info(400,
                      message='Neither id nor q parameter specified',
                      style='plain')

        records = [pfif_version.person_to_dict(result) for result in results]
        utils.optionally_filter_sensitive_fields(records, self.auth)

        # Define the function to retrieve notes for a person.
        def get_notes_for_person(person):
            notes = model.Note.get_by_person_record_id(
                self.repo, person['person_record_id'])
            notes = [note for note in notes if not note.hidden]
            records = map(pfif_version.note_to_dict, notes)
            utils.optionally_filter_sensitive_fields(records, self.auth)
            return records

        self.response.headers[
            'Content-Type'] = 'application/xml; charset=utf-8'
        pfif_version.write_file(self.response.out, records,
                                get_notes_for_person)
        utils.log_api_action(self, ApiActionLog.SEARCH, len(records))
Beispiel #10
0
    def get(self):
        if self.config.search_auth_key_required and not (
            self.auth and self.auth.search_permission):
            self.info(
                403,
                message='Missing or invalid authorization key',
                style='plain')
            return

        pfif_version = self.params.version

        # Retrieve parameters and do some sanity checks on them.
        record_id = self.request.get('id')
        query_string = self.request.get('q')
        max_results = min(self.params.max_results or 100, HARD_MAX_RESULTS)

        results = []
        if record_id:
            # Search by record ID (always returns just 1 result or nothing).
            person = model.Person.get(self.repo, record_id)
            if person:
                results = [person]
        elif query_string:
            # Search by query words.
            if self.config.external_search_backends:
                query = TextQuery(query_string)
                results = external_search.search(self.repo, query, max_results,
                    self.config.external_search_backends)
            # External search backends are not always complete. Fall back to
            # the original search when they fail or return no results.
            if not results:
                if config.get('enable_fulltext_search'):
                    results = full_text_search.search(
                        self.repo, query_string, max_results)
                else:
                    results = indexing.search(
                        self.repo, TextQuery(query_string), max_results)
        else:
            self.info(
                400,
                message='Neither id nor q parameter specified',
                style='plain')

        records = [pfif_version.person_to_dict(result) for result in results]
        utils.optionally_filter_sensitive_fields(records, self.auth)

        # Define the function to retrieve notes for a person.
        def get_notes_for_person(person):
            notes = model.Note.get_by_person_record_id(
                self.repo, person['person_record_id'])
            notes = [note for note in notes if not note.hidden]
            records = map(pfif_version.note_to_dict, notes)
            utils.optionally_filter_sensitive_fields(records, self.auth)
            return records

        self.response.headers['Content-Type'] = 'application/xml; charset=utf-8'
        pfif_version.write_file(
            self.response.out, records, get_notes_for_person)
        utils.log_api_action(self, ApiActionLog.SEARCH, len(records))
Beispiel #11
0
 def test_search_broken_content(self):
     response = MockUrlFetchResponse(200, '')
     response.content = 'broken'
     urlfetch.fetch('http://backend/?q=mori',
                    deadline=IsSeconds(0.9)).AndReturn(response)
     self.mox.ReplayAll()
     results = external_search.search('japan', text_query.TextQuery('mori'),
                                      100, ['http://backend/?q=%s'])
     self.assertEquals(None, results)
     self.assertEquals(['Fetched content is broken.'],
                       self.mock_logging_handler.messages['warning'])
     self.mox.VerifyAll()
 def test_search_broken_content(self):
     response = MockUrlFetchResponse(200, '')
     response.content = 'broken'
     urlfetch.fetch('http://backend/?q=mori',
                    deadline=IsSeconds(0.9)).AndReturn(response)
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100,
         ['http://backend/?q=%s'])
     self.assertEquals(None, results)
     self.assertEquals(['Fetched content is broken.'],
                       self.mock_logging_handler.messages['warning'])
     self.mox.VerifyAll()
 def test_search_shuffle_backends(self):
     bad_response = MockUrlFetchResponse(500, '')
     urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\
         .InAnyOrder().AndReturn(bad_response)
     urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.9))\
         .InAnyOrder().AndReturn(bad_response)
     urlfetch.fetch('http://backend3/?q=mori', deadline=IsSeconds(0.9))\
         .InAnyOrder().AndReturn(bad_response)
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100,
         ['http://backend1/?q=%s', 'http://backend2/?q=%s',
          'http://backend3/?q=%s'])
     self.assertEquals(None, results)
     self.mox.VerifyAll()
Beispiel #14
0
 def test_search_shuffle_backends(self):
     bad_response = MockUrlFetchResponse(500, '')
     urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\
         .InAnyOrder().AndReturn(bad_response)
     urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.9))\
         .InAnyOrder().AndReturn(bad_response)
     urlfetch.fetch('http://backend3/?q=mori', deadline=IsSeconds(0.9))\
         .InAnyOrder().AndReturn(bad_response)
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100, [
             'http://backend1/?q=%s', 'http://backend2/?q=%s',
             'http://backend3/?q=%s'
         ])
     self.assertEquals(None, results)
     self.mox.VerifyAll()
Beispiel #15
0
    def search(self, query_dict):
        """
        Performs a search and adds view_url attributes to the results.
        Args:
            query_dict: A list contains two queries: Name query and Location query
        """

        query_txt = " ".join(query_dict.values())
        results = None
        if self.config.external_search_backends:
            results = external_search.search(
                self.repo, TextQuery(query_txt), MAX_RESULTS,
                self.config.external_search_backends)

        # External search backends are not always complete. Fall back to the
        # original search when they fail or return no results.
        if not results:
            if config.get('enable_fulltext_search'):
                results = full_text_search.search(self.repo,
                                                  query_dict, MAX_RESULTS)
            else:
                results = indexing.search(self.repo,
                                          TextQuery(query_txt), MAX_RESULTS)

        query_name = self.get_query_value()
        for result in results:
            result.view_url = self.get_url('/view',
                                           id=result.record_id,
                                           role=self.params.role,
                                           query_name=query_name,
                                           query_location=
                                               self.params.query_location,
                                           given_name=self.params.given_name,
                                           family_name=self.params.family_name)
            result.latest_note_status = get_person_status_text(result)
            if result.is_clone():
                result.provider_name = result.get_original_domain()
            result.should_show_inline_photo = (
                self.should_show_inline_photo(result.photo_url))
            if result.should_show_inline_photo and result.photo:
                # Only use a thumbnail URL if the photo was uploaded; we don't
                # have thumbnails for other photos.
                result.thumbnail_url = self.get_thumbnail_url(result.photo_url)
            sanitize_urls(result)
        return results
 def test_search_retry_time_out(self):
     good_response = MockUrlFetchResponse(200, {
         'name_entries': [{'person_record_id': 'test/1'}],
         'all_entries': [],
     })
     deactivation_message_html='de<i>acti</i>vated'
     bad_response = MockUrlFetchResponse(500, '')
     urlfetch.fetch('http://backend1/?q=mori', deadline=IsSeconds(0.9))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.2))\
         .AndReturn(bad_response)
     urlfetch.fetch('http://backend2/?q=mori', deadline=IsSeconds(0.8))\
         .WithSideEffects(lambda url, deadline: self.advance_seconds(0.75))\
         .AndRaise(urlfetch_errors.Error('bad'))
     self.mox.ReplayAll()
     results = external_search.search(
         'japan', text_query.TextQuery('mori'), 100,
         ['http://backend1/?q=%s', 'http://backend2/?q=%s',
          'http://backend3/?q=%s'])
     self.assertEquals(None, results)
     self.assertEquals(['Bad status code: 500', 'Failed to fetch: bad',
                        'Fetch retry timed out.'],
                       self.mock_logging_handler.messages['info'])
     self.mox.VerifyAll()