def test_index_manager_regenerate_indices_from_broken_state(self, *args): """ `regenerate_indices` should succeed and give us a working ElasticSearch when it runs and finds a broken state (eg. with an existing, incorrect index with the name of an alias). This can occur when ES restarts and an update signal is triggered before Richie had a chance to bootstrap ES. """ # Create a course and trigger a signal to index it. This will create a # broken "richie_test_courses" index course = CourseFactory(should_publish=True) apply_es_action_to_course(course.extended_object, "index", "en") self.assertIsNotNone(ES_INDICES_CLIENT.get("richie_test_courses")) # Call our `regenerate_indices command` creation_datetime = datetime(2010, 1, 1, tzinfo=timezone.utc) creation_string = creation_datetime.strftime("%Y-%m-%d-%Hh%Mm%S.%fs") with mock.patch.object(timezone, "now", return_value=creation_datetime): regenerate_indices(None) # No error was thrown, the courses index (like all others) was bootstrapped self.assertIsNotNone( ES_INDICES_CLIENT.get(f"richie_test_courses_{creation_string}")) # The expected alias is associated with the index self.assertEqual( list(ES_INDICES_CLIENT.get_alias("richie_test_courses").keys())[0], f"richie_test_courses_{creation_string}", )
def test_partial_mappings_code(self): """Make sure our code analyzer works as expected.""" index_name = "stub_index" # Create the index and set a mapping that includes the pattern we want to test ES_INDICES_CLIENT.create(index=index_name) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=index_name) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=index_name) ES_INDICES_CLIENT.open(index=index_name) self.assertEqual( [ t["token"] for t in ES_INDICES_CLIENT.analyze( body='{"analyzer": "code_trigram", "text": "003rst"}', index=index_name, )["tokens"] ], [ "003", "003r", "003rs", "003rst", "03r", "03rs", "03rst", "3rs", "3rst", "rst", ], ) self.assertEqual( [ t["token"] for t in ES_INDICES_CLIENT.analyze( body='{"analyzer": "code", "text": "003rst"}', index=index_name)["tokens"] ], ["003rst"], )
def execute_query(self, querystring="", **extra): """ Not a test. Prepare the ElasticSearch index and execute the query in it. """ persons = [ { "complete": { "en": slice_string_for_completion("Éponine Thénardier") }, "id": "25", "title": { "en": "Éponine Thénardier" }, }, { "complete": { "en": slice_string_for_completion("Monseigneur Bienvenu Myriel") }, "id": "34", "title": { "en": "Monseigneur Bienvenu Myriel" }, }, { "complete": { "en": slice_string_for_completion("Fantine") }, "id": "52", "title": { "en": "Fantine" }, }, ] # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index=PERSONS_INDEX) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=PERSONS_INDEX) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=PERSONS_INDEX) ES_INDICES_CLIENT.open(index=PERSONS_INDEX) # Use the default persons mapping from the Indexer ES_INDICES_CLIENT.put_mapping(body=PersonsIndexer.mapping, index=PERSONS_INDEX) # Actually insert our persons in the index actions = [{ "_id": person["id"], "_index": PERSONS_INDEX, "_op_type": "create", "absolute_url": { "en": "url" }, "logo": { "en": "/some/img.png" }, **person, } for person in persons] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh() response = self.client.get( f"/api/v1.0/persons/autocomplete/?{querystring:s}", **extra) self.assertEqual(response.status_code, 200) return persons, json.loads(response.content)
def test_indexable_filters_internationalization(self): """ Indexable filters (such as categories and organizations by default) should have their names localized in the filter definitions in course search responses. """ # Create the meta categories, each with a child category that should appear in facets subjects_meta = CategoryFactory(page_reverse_id="subjects", should_publish=True) subject = CategoryFactory(page_parent=subjects_meta.extended_object, should_publish=True) levels_meta = CategoryFactory(page_reverse_id="levels", should_publish=True) level = CategoryFactory(page_parent=levels_meta.extended_object, should_publish=True) # Create 2 organizations that should appear in facets org_meta = OrganizationFactory(page_reverse_id="organizations", should_publish=True) org_1 = OrganizationFactory( page_parent=org_meta.extended_object, page_title="First organization", should_publish=True, ) org_2 = OrganizationFactory( page_parent=org_meta.extended_object, page_title="Second organization", should_publish=True, ) # Create a course linked to our categories and organizations CourseFactory( fill_categories=[subject, level], fill_organizations=[org_1, org_2], should_publish=True, ) # Index our objects into ES bulk_compat( actions=[ *ES_INDICES.categories.get_es_documents(), *ES_INDICES.organizations.get_es_documents(), *ES_INDICES.courses.get_es_documents(), ], chunk_size=500, client=ES_CLIENT, ) ES_INDICES_CLIENT.refresh() response = self.client.get("/api/v1.0/courses/?scope=filters") self.assertEqual(response.status_code, 200) self.assertEqual( response.json()["filters"]["subjects"], { "base_path": "0001", "human_name": "Subjects", "is_autocompletable": True, "is_drilldown": False, "is_searchable": True, "name": "subjects", "position": 2, "has_more_values": False, "values": [{ "count": 1, "human_name": subject.extended_object.get_title(), "key": subject.get_es_id(), }], }, ) self.assertEqual( response.json()["filters"]["levels"], { "base_path": "0002", "human_name": "Levels", "is_autocompletable": True, "is_drilldown": False, "is_searchable": True, "name": "levels", "position": 3, "has_more_values": False, "values": [{ "count": 1, "human_name": level.extended_object.get_title(), "key": level.get_es_id(), }], }, ) self.assertEqual( response.json()["filters"]["organizations"], { "base_path": "0003", "human_name": "Organizations", "is_autocompletable": True, "is_drilldown": False, "is_searchable": True, "name": "organizations", "position": 4, "has_more_values": False, "values": [ { "count": 1, "human_name": org_1.extended_object.get_title(), "key": org_1.get_es_id(), }, { "count": 1, "human_name": org_2.extended_object.get_title(), "key": org_2.get_es_id(), }, ], }, )
def prepare_es(): """ Prepare the ES index so we only have to manage indexing and searches in our actual tests. """ # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create the indices we'll use to test the ES features for index in [ "richie_categories", "richie_courses", "richie_persons", "richie_organizations", ]: ES_INDICES_CLIENT.create(index=index) ES_INDICES_CLIENT.close(index=index) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=index) ES_INDICES_CLIENT.open(index=index) # Use the default courses mapping from the Indexer ES_INDICES_CLIENT.put_mapping(body=CoursesIndexer.mapping, index="richie_courses") # Add the sorting script ES_CLIENT.put_script(id="score", body=CoursesIndexer.scripts["score"]) ES_CLIENT.put_script(id="state_field", body=CoursesIndexer.scripts["state_field"])
def prepare_index(self, courses, organizations=None): """ Not a test. This method is doing the heavy lifting for the tests in this class: preparing the Elasticsearch index so that individual tests just have to execute the query. """ organizations = organizations or [] self.create_filter_pages() # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index for our organizations ES_INDICES_CLIENT.create(index="richie_organizations") ES_INDICES_CLIENT.close(index="richie_organizations") ES_INDICES_CLIENT.put_settings( body=ANALYSIS_SETTINGS, index="richie_organizations" ) ES_INDICES_CLIENT.open(index="richie_organizations") # Use the default organizations mapping from the Indexer ES_INDICES_CLIENT.put_mapping( body=OrganizationsIndexer.mapping, index="richie_organizations" ) # Set up empty indices for other objects. They need to exist to avoid errors # but we do not use results from them in our tests. ES_INDICES_CLIENT.create(index="richie_licences") ES_INDICES_CLIENT.create(index="richie_categories") ES_INDICES_CLIENT.create(index="richie_persons") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index="test_courses") ES_INDICES_CLIENT.close(index="test_courses") ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index="test_courses") ES_INDICES_CLIENT.open(index="test_courses") # Use the default courses mapping from the Indexer ES_INDICES_CLIENT.put_mapping(body=CoursesIndexer.mapping, index="test_courses") # Add the sorting script ES_CLIENT.put_script(id="score", body=CoursesIndexer.scripts["score"]) ES_CLIENT.put_script( id="state_field", body=CoursesIndexer.scripts["state_field"] ) # Prepare actions to insert our courses and organizations in their indices actions = [ OrganizationsIndexer.get_es_document_for_organization( organization.public_extension ) for organization in organizations ] + [ { "_id": course["id"], "_index": "test_courses", "_op_type": "create", **course, } for course in courses ] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh()
def execute_query(self, persons=None, querystring=""): """ Not a test. This method is doing the heavy lifting for the tests in this class: create and fill the index with our persons so we can run our queries and check the results. It also executes the query and returns the result from the API. """ # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index="test_persons") ES_INDICES_CLIENT.close(index="test_persons") ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index="test_persons") ES_INDICES_CLIENT.open(index="test_persons") # Use the default persons mapping from the Indexer ES_INDICES_CLIENT.put_mapping(body=PersonsIndexer.mapping, index="test_persons") # Actually insert our persons in the index actions = [{ "_id": person["id"], "_index": "test_persons", "_op_type": "create", "absolute_url": { "en": "en/url" }, "bio": { "en": "en/bio" }, "portrait": { "en": "en/image" }, "title_raw": person["title"], **person, } for person in persons or PERSONS] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh() response = self.client.get(f"/api/v1.0/persons/?{querystring:s}") self.assertEqual(response.status_code, 200) return json.loads(response.content)
def execute_query(self, kind, categories=None, querystring=""): """ Not a test. This method is doing the heavy lifting for the tests in this class: create and fill the index with our categories so we can run our queries and check the results. It also executes the query and returns the result from the API. """ # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index="test_categories") ES_INDICES_CLIENT.close(index="test_categories") ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index="test_categories") ES_INDICES_CLIENT.open(index="test_categories") # Use the default categories mapping from the Indexer ES_INDICES_CLIENT.put_mapping(body=CategoriesIndexer.mapping, index="test_categories") # Actually insert our categories in the index actions = [{ "_id": category["id"], "_index": "test_categories", "_op_type": "create", "absolute_url": { "en": "en/url" }, "description": { "en": "en/description" }, "icon": { "en": "en/icon" }, "is_meta": False, "logo": { "en": "en/logo" }, "nb_children": 0, "path": category["id"], "title_raw": category["title"], **category, } for category in categories or CATEGORIES] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh() response = self.client.get(f"/api/v1.0/{kind:s}/?{querystring:s}") self.assertEqual(response.status_code, 200) return json.loads(response.content)
def test_partial_mappings_french_diacritics(self): """ Make sure words ending in "icité" are analyzed the same way whether or not there is an accent. """ index_name = "stub_index" mapping = {"dynamic_templates": MULTILINGUAL_TEXT} # Create the index and set a mapping that includes the pattern we want to test ES_INDICES_CLIENT.create(index=index_name) ES_INDICES_CLIENT.put_mapping(index=index_name, body=mapping) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=index_name) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=index_name) ES_INDICES_CLIENT.open(index=index_name) self.assertEqual( ES_INDICES_CLIENT.analyze( body='{"analyzer": "french", "text": "électricité"}', index=index_name)["tokens"][0]["token"], "electricit", ) self.assertEqual( ES_INDICES_CLIENT.analyze( body='{"analyzer": "french", "text": "electricite"}', index=index_name)["tokens"][0]["token"], "electricit", )
def setUp(self): """ Make sure all indices are deleted before each new test is run. """ super().setUp() ES_INDICES_CLIENT.delete(index="_all")
def test_index_manager_regenerate_indices(self, *args): """ Make sure indices are created, aliases updated and old, no longer useful indices are pruned when the `regenerate_elasticsearch` function is called. """ # Create an unrelated index with an alias to make sure it is unaffected by our operations ES_INDICES_CLIENT.create(index="unrelated_index") ES_INDICES_CLIENT.put_alias(index="unrelated_index", name="unrelated_index_alias") self.assertIsNotNone( ES_INDICES_CLIENT.get("unrelated_index")["unrelated_index"]) self.assertEqual( list(ES_INDICES_CLIENT.get_alias("unrelated_index_alias").keys()) [0], "unrelated_index", ) # Create all our indices from scratch # Use a mocked timezone.now to check the names of our indices as they include a datetime creation1_datetime = datetime(2010, 1, 1, tzinfo=timezone.utc) creation1_string = creation1_datetime.strftime("%Y-%m-%d-%Hh%Mm%S.%fs") with mock.patch.object(timezone, "now", return_value=creation1_datetime): regenerate_indices(None) expected_indices = [ "richie_test_categories", "richie_test_courses", "richie_test_organizations", "richie_test_persons", ] # All indices were created and properly aliased for alias_name in expected_indices: new_index_name = f"{alias_name}_{creation1_string}" # The index is created self.assertIsNotNone( ES_INDICES_CLIENT.get(new_index_name)[new_index_name]) # The expected alias is associated with the index self.assertEqual( list(ES_INDICES_CLIENT.get_alias(alias_name).keys())[0], new_index_name) # Now regenerate the indices, replacing the ones we just created creation2_datetime = datetime(2011, 2, 2, tzinfo=timezone.utc) creation2_string = creation2_datetime.strftime("%Y-%m-%d-%Hh%Mm%S.%fs") with mock.patch.object(timezone, "now", return_value=creation2_datetime): regenerate_indices(None) # All indices were replaced and aliases updated for alias_name in expected_indices: # The index is created new_index_name = f"{alias_name}_{creation2_string}" self.assertIsNotNone( ES_INDICES_CLIENT.get(new_index_name)[new_index_name]) # The expected alias is associated with the new index self.assertEqual( list(ES_INDICES_CLIENT.get_alias(alias_name).keys())[0], new_index_name) # The previous version of the index is still around creation1_index_name = f"{alias_name}_{creation1_string}" self.assertIsNotNone( ES_INDICES_CLIENT.get(creation1_index_name) [creation1_index_name]) # But not aliased any more self.assertEqual( ES_INDICES_CLIENT.get(creation1_index_name) [creation1_index_name]["aliases"], {}, ) # Regenerate indices again to make sure versions n-2 of indices are # deleted (not just unaliased) creation3_datetime = datetime(2012, 3, 3, tzinfo=timezone.utc) creation3_string = creation3_datetime.strftime("%Y-%m-%d-%Hh%Mm%S.%fs") with mock.patch.object(timezone, "now", return_value=creation3_datetime): regenerate_indices(None) # All indices were replaced and had their aliases changed for index_name in expected_indices: new_index_name = f"{index_name}_{creation3_string}" # The index is created self.assertIsNotNone( ES_INDICES_CLIENT.get(new_index_name)[new_index_name]) # The expected alias is associated with the new index self.assertEqual( list(ES_INDICES_CLIENT.get_alias(index_name).keys())[0], new_index_name) # The previous version of the index is still around creation2_index_name = f"{alias_name}_{creation2_string}" self.assertIsNotNone( ES_INDICES_CLIENT.get(creation2_index_name) [creation2_index_name]) # But not aliased any more self.assertEqual( ES_INDICES_CLIENT.get(creation2_index_name) [creation2_index_name]["aliases"], {}, ) # Version n-2 of the index does not exist any more with self.assertRaises(NotFoundError): ES_INDICES_CLIENT.get(f"{index_name}_{creation1_string}") # Make sure our unrelated index was unaffected through regenerations self.assertIsNotNone( ES_INDICES_CLIENT.get("unrelated_index")["unrelated_index"]) self.assertEqual( list(ES_INDICES_CLIENT.get_alias("unrelated_index_alias").keys()) [0], "unrelated_index", )
def test_index_manager_perform_create_index(self): """ Perform all side-effects through the ES client and return the index name (incl. timestamp) """ # Create an indexable from scratch that mimicks the expected shape of the dynamic # import in es_index class IndexableClass: """Indexable stub""" index_name = "richie_courses" mapping = { "properties": { "code": { "type": "keyword" }, "name": { "type": "text" } } } # pylint: disable=no-self-use def get_es_documents(self, index, action="index"): """Stub method""" for i in range(0, 10): yield { "_id": i, "_index": index, "_op_type": action, "code": f"course-{i:d}", "name": f"Course Number {i:d}", } indexable = IndexableClass() # Set a fake time to check the name of the index now = datetime(2016, 5, 4, 3, 12, 33, 123456, tzinfo=pytz.utc) # Make sure our index is empty before we call the function self.assertEqual(ES_INDICES_CLIENT.get_alias("*"), {}) mock_logger = mock.Mock(spec=["info"]) with mock.patch.object(timezone, "now", return_value=now): new_index = perform_create_index(indexable, mock_logger) ES_INDICES_CLIENT.refresh() self.assertEqual(new_index, "richie_courses_2016-05-04-03h12m33.123456s") self.assertEqual(ES_CLIENT.count()["count"], 10) self.assertEqual( ES_INDICES_CLIENT.get_mapping(), { "richie_courses_2016-05-04-03h12m33.123456s": { "mappings": { "properties": { "code": { "type": "keyword" }, "name": { "type": "text" }, } } } }, ) mock_logger.info.assert_called()
def execute_query(self, querystring="", **extra): """ Not a test. Prepare the ElasticSearch index and execute the query in it. """ categories = [ { "complete": { "en": slice_string_for_completion("Electric Birdwatching"), "fr": slice_string_for_completion( "Observation des oiseaux électriques" ), }, "id": "24", "kind": "subjects", "path": "001000", "title": { "en": "Electric Birdwatching", "fr": "Observation des oiseaux électriques", }, }, { "complete": { "en": slice_string_for_completion("Ocean biking"), "fr": slice_string_for_completion("Cyclisme océanique"), }, "id": "33", "kind": "subjects", "path": "001001", "title": {"en": "Ocean biking", "fr": "Cyclisme océanique"}, }, { "complete": { "en": slice_string_for_completion("Elegiac bikeshedding"), "fr": slice_string_for_completion("Élégie de l'abri à vélos"), }, "id": "51", "kind": "subjects", "path": "001002", "title": { "en": "Elegiac bikeshedding", "fr": "Élégie de l'abri à vélos", }, }, { "complete": { "en": slice_string_for_completion("Electric Decoys"), "fr": slice_string_for_completion("Leurres électriques"), }, "id": "44", "kind": "not_subjects", "path": "001003", "title": {"en": "Electric Decoys", "fr": "Leurres électriques"}, }, ] # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index=CATEGORIES_INDEX) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=CATEGORIES_INDEX) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=CATEGORIES_INDEX) ES_INDICES_CLIENT.open(index=CATEGORIES_INDEX) # Use the default categories mapping from the Indexer ES_INDICES_CLIENT.put_mapping( body=CategoriesIndexer.mapping, index=CATEGORIES_INDEX ) # Actually insert our categories in the index actions = [ { "_id": category["id"], "_index": CATEGORIES_INDEX, "_op_type": "create", "absolute_url": {"en": "en/url", "fr": "fr/url"}, "cover_image": {"en": "en/image", "fr": "fr/image"}, "is_meta": False, "logo": {"en": "en/some/img.png", "fr": "fr/some/img.png"}, "nb_children": 0, **category, } for category in categories ] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh() response = self.client.get( f"/api/v1.0/subjects/autocomplete/?{querystring:s}", **extra ) self.assertEqual(response.status_code, 200) return categories, json.loads(response.content)
def test_partial_mappings_multilingual_text(self): """ Make sure our multilingual_text dynamic mapping results in the proper mappings being generated when objects with the expected format are indexed """ index_name = "stub_index" mapping = {"dynamic_templates": MULTILINGUAL_TEXT} # Create the index and set a mapping that includes the pattern we want to test ES_INDICES_CLIENT.create(index=index_name) ES_INDICES_CLIENT.put_mapping(index=index_name, body=mapping) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=index_name) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=index_name) ES_INDICES_CLIENT.open(index=index_name) # The stub mapping only contains our dynamic template mapping = ES_INDICES_CLIENT.get_mapping(index=index_name) self.assertEqual( mapping[index_name]["mappings"], {"dynamic_templates": MULTILINGUAL_TEXT}, ) # Index an object that should trigger a match for our dynamic template ES_CLIENT.index( index=index_name, doc_type="_doc", body={"title": { "fr": "Un titre en français à titre d'exemple" }}, ) # The stub mapping has been extended with a matching property for 'fr' mapping = ES_INDICES_CLIENT.get_mapping(index=index_name) self.assertEqual( mapping[index_name]["mappings"], { "dynamic_templates": MULTILINGUAL_TEXT, "properties": { "title": { "properties": { "fr": { "type": "text", "fields": { "language": { "type": "text", "analyzer": "french" }, "trigram": { "type": "text", "analyzer": "french_trigram", "search_analyzer": "french", }, }, } } } }, }, ) # Index an object that should trigger a different match for our dynamic template ES_CLIENT.index( index=index_name, doc_type="_doc", body={"title": { "en": "An English title as an example" }}, ) # The sub mapping has been extended with a matching property for 'en' mapping = ES_INDICES_CLIENT.get_mapping(index=index_name) self.assertEqual( mapping[index_name]["mappings"], { "dynamic_templates": MULTILINGUAL_TEXT, "properties": { "title": { "properties": { "en": { "type": "text", "fields": { "language": { "type": "text", "analyzer": "english" }, "trigram": { "type": "text", "analyzer": "english_trigram", "search_analyzer": "english", }, }, }, "fr": { "type": "text", "fields": { "language": { "type": "text", "analyzer": "french" }, "trigram": { "type": "text", "analyzer": "french_trigram", "search_analyzer": "french", }, }, }, } } }, }, )
def execute_query(self, querystring="", **extra): """ Not a test. Prepare the ElasticSearch index and execute the query in it. """ licences = [ {"id": "1", "title": {"en": "CC-BY-SA"}}, {"id": "2", "title": {"en": "CC-BY-NC"}}, {"id": "3", "title": {"en": "All Rights Résërvés"}}, ] # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index=LICENCES_INDEX) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=LICENCES_INDEX) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=LICENCES_INDEX) ES_INDICES_CLIENT.open(index=LICENCES_INDEX) # Use the default licences mapping from the Indexer ES_INDICES_CLIENT.put_mapping( body=LicencesIndexer.mapping, index=LICENCES_INDEX ) # Actually insert our licences in the index actions = [ { "_id": licence["id"], "_index": LICENCES_INDEX, "_op_type": "create", "complete": {"en": slice_string_for_completion(licence["title"]["en"])}, **licence, } for licence in licences ] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh() response = self.client.get( f"/api/v1.0/licences/autocomplete/?{querystring:s}", **extra ) self.assertEqual(response.status_code, 200) return licences, json.loads(response.content)
def execute_query(self, courses, querystring="", **extra): """ Not a test. Prepare the ElasticSearch index and execute the query in it. """ # Delete any existing indices so we get a clean slate ES_INDICES_CLIENT.delete(index="_all") # Create an index we'll use to test the ES features ES_INDICES_CLIENT.create(index=COURSES_INDEX) # The index needs to be closed before we set an analyzer ES_INDICES_CLIENT.close(index=COURSES_INDEX) ES_INDICES_CLIENT.put_settings(body=ANALYSIS_SETTINGS, index=COURSES_INDEX) ES_INDICES_CLIENT.open(index=COURSES_INDEX) # Use the default courses mapping from the Indexer ES_INDICES_CLIENT.put_mapping(body=CoursesIndexer.mapping, index=COURSES_INDEX) # Add the sorting script ES_CLIENT.put_script(id="score", body=CoursesIndexer.scripts["score"]) ES_CLIENT.put_script( id="state_field", body=CoursesIndexer.scripts["state_field"] ) # Actually insert our courses in the index actions = [ { "_id": course["id"], "_index": COURSES_INDEX, "_op_type": "create", "absolute_url": {"en": "en/url", "fr": "fr/url"}, "categories": ["1", "2", "3"], "cover_image": {"en": "en/image", "fr": "fr/image"}, "is_meta": False, "logo": {"en": "/en/some/img.png", "fr": "/fr/some/img.png"}, "nb_children": 0, "organizations": ["11", "12", "13"], **course, } for course in courses ] bulk_compat(actions=actions, chunk_size=500, client=ES_CLIENT) ES_INDICES_CLIENT.refresh() results = self.client.get( f"/api/v1.0/courses/autocomplete/?{querystring:s}", **extra ) self.assertEqual(results.status_code, 200) return json.loads(results.content)
def setUp(self): """ Instantiate our ES client and make sure all indices are deleted before each test """ super().setUp() ES_INDICES_CLIENT.delete(index="_all")