def test_demo_explorations_are_added_to_search_index(self): results, _ = search_services.search_explorations('Welcome', [], [], 2) self.assertEqual(results, []) exp_services.load_demo('0') results, _ = search_services.search_explorations('Welcome', [], [], 2) self.assertEqual(results, ['0'])
def test_clear_exploration_search_index(self): exp_services.load_demo('0') result = search_services.search_explorations('Welcome', [], [], 2)[0] self.assertEqual(result, ['0']) search_services.clear_exploration_search_index() result = search_services.search_explorations('Welcome', [], [], 2)[0] self.assertEqual(result, [])
def test_search_explorations(self): expected_query_string = 'a query string' expected_offset = 0 expected_size = 30 expected_result_offset = 30 doc_ids = ['id1', 'id2'] def mock_search( query_string, index, categories, language_codes, offset=None, size=20, ids_only=False, retries=3): self.assertEqual(query_string, expected_query_string) self.assertEqual(index, search_services.SEARCH_INDEX_EXPLORATIONS) self.assertEqual(categories, []) self.assertEqual(language_codes, []) self.assertEqual(offset, expected_offset) self.assertEqual(size, expected_size) self.assertEqual(ids_only, True) self.assertEqual(retries, 3) return doc_ids, expected_result_offset with self.swap(gae_search_services, 'search', mock_search): result, result_offset = search_services.search_explorations( expected_query_string, [], [], expected_size, offset=expected_offset, ) self.assertEqual(result_offset, expected_result_offset) self.assertEqual(result, doc_ids)
def test_search_explorations(self): expected_query_string = 'a query string' expected_cursor = 'cursor' expected_sort = 'title' expected_limit = 30 expected_result_cursor = 'rcursor' doc_ids = ['id1', 'id2'] def mock_search(query_string, index, cursor=None, limit=20, sort='', ids_only=False, retries=3): self.assertEqual(query_string, expected_query_string) self.assertEqual(index, search_services.SEARCH_INDEX_EXPLORATIONS) self.assertEqual(cursor, expected_cursor) self.assertEqual(limit, expected_limit) self.assertEqual(sort, expected_sort) self.assertEqual(ids_only, True) self.assertEqual(retries, 3) return doc_ids, expected_result_cursor with self.swap(gae_search_services, 'search', mock_search): result, cursor = search_services.search_explorations( expected_query_string, expected_limit, sort=expected_sort, cursor=expected_cursor, ) self.assertEqual(cursor, expected_result_cursor) self.assertEqual(result, doc_ids)
def test_clear_search_index(self): exp_services.load_demo('0') result_explorations = search_services.search_explorations( 'Welcome', 2)[0] self.assertEqual(result_explorations, ['0']) collection_services.load_demo('0') result_collections = search_services.search_collections('Welcome', 2)[0] self.assertEqual(result_collections, ['0']) self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME) self.login(self.ADMIN_EMAIL, is_super_admin=True) csrf_token = self.get_new_csrf_token() generated_exps_response = self.post_json( '/adminhandler', {'action': 'clear_search_index'}, csrf_token=csrf_token) self.assertEqual(generated_exps_response, {}) result_explorations = search_services.search_explorations( 'Welcome', 2)[0] self.assertEqual(result_explorations, []) result_collections = search_services.search_collections('Welcome', 2)[0] self.assertEqual(result_collections, [])
def test_search_explorations(self) -> None: expected_query_string = 'a query string' expected_offset = 0 expected_size = 30 expected_result_offset = 30 doc_ids = ['id1', 'id2'] def mock_search(query_string: str, index: str, categories: List[str], language_codes: List[str], offset: Optional[int] = None, size: int = 20, ids_only: bool = False, retries: int = 3) -> Tuple[List[str], Optional[int]]: self.assertEqual(query_string, expected_query_string) self.assertEqual(index, search_services.SEARCH_INDEX_EXPLORATIONS) self.assertEqual(categories, []) self.assertEqual(language_codes, []) self.assertEqual(offset, expected_offset) self.assertEqual(size, expected_size) self.assertEqual(ids_only, True) self.assertEqual(retries, 3) return doc_ids, expected_result_offset with self.swap(gae_search_services, 'search', mock_search): result, result_offset = search_services.search_explorations( expected_query_string, [], [], expected_size, offset=expected_offset, ) self.assertEqual(result_offset, expected_result_offset) self.assertEqual(result, doc_ids)
def get_library_groups(language_codes): """Returns a list of groups for the library index page. Each group has a header and a list of dicts representing activity summaries. Args: language_codes: list(str). A list of language codes. Only explorations with these languages will be returned. Returns: list(dict). A list of groups for the library index page. Each group is represented by a dict with the following keys and values: - activity_summary_dicts: list(dict). A list of dicts representing activity summaries. - categories: list(str). The list of group categories. - header_i18n_id: str. The i18n id for the header of the category. - has_full_results_page: bool. Whether the group header links to a "full results" page. This is always True for the "exploration category" groups. - full_results_url: str. The URL to the corresponding "full results" page. """ language_codes_suffix = '' if language_codes: language_codes_suffix = ' language_code=("%s")' % ( '" OR "'.join(language_codes)) def _generate_query(categories): """Generates query based on the categories and language codes. Args: categories: list(str). List of categories. Returns: str. Generated query. """ # This assumes that 'categories' is non-empty. return 'category=("%s")%s' % ('" OR "'.join(categories), language_codes_suffix) # Collect all collection ids so that the summary details can be retrieved # with a single get_multi() call. all_collection_ids = [] header_id_to_collection_ids = {} for group in _LIBRARY_INDEX_GROUPS: collection_ids = search_services.search_collections( _generate_query(group['search_categories']), 8)[0] header_id_to_collection_ids[group['header_i18n_id']] = collection_ids all_collection_ids += collection_ids collection_summaries = [ summary for summary in collection_services. get_collection_summaries_matching_ids(all_collection_ids) if summary is not None ] collection_summary_dicts = { summary_dict['id']: summary_dict for summary_dict in _get_displayable_collection_summary_dicts( collection_summaries) } # Collect all exp ids so that the summary details can be retrieved with a # single get_multi() call. all_exp_ids = [] header_to_exp_ids = {} for group in _LIBRARY_INDEX_GROUPS: exp_ids = search_services.search_explorations( _generate_query(group['search_categories']), 8)[0] header_to_exp_ids[group['header_i18n_id']] = exp_ids all_exp_ids += exp_ids exp_summaries = [ summary for summary in exp_fetchers.get_exploration_summaries_matching_ids(all_exp_ids) if summary is not None ] exp_summary_dicts = { summary_dict['id']: summary_dict for summary_dict in get_displayable_exp_summary_dicts(exp_summaries) } results = [] for group in _LIBRARY_INDEX_GROUPS: summary_dicts = [] collection_ids_to_display = ( header_id_to_collection_ids[group['header_i18n_id']]) summary_dicts = [ collection_summary_dicts[collection_id] for collection_id in collection_ids_to_display if collection_id in collection_summary_dicts ] exp_ids_to_display = header_to_exp_ids[group['header_i18n_id']] summary_dicts += [ exp_summary_dicts[exp_id] for exp_id in exp_ids_to_display if exp_id in exp_summary_dicts ] if not summary_dicts: continue results.append({ 'header_i18n_id': group['header_i18n_id'], 'categories': group['search_categories'], 'activity_summary_dicts': summary_dicts, 'has_full_results_page': True, 'full_results_url': None, }) return results