def test_bad_index(self): """Check to make sure that the mappings are strict""" index_mapping = self.es.indices.get_mapping( index=ParentIndexable.get_index_name(), doc_type=ParentIndexable.get_mapping_type_name()) alias_name = index_mapping.keys()[0] mapping = index_mapping[alias_name]["mappings"] self.assertDictEqual(mapping, ParentIndexable.get_mapping()) obj = ParentIndexable.objects.create(foo="Fighters") ParentIndexable.search_objects.refresh() doc = obj.extract_document() doc["extra"] = "Just an additional string" with self.assertRaises(elasticsearch.RequestError): self.es.update(obj.get_index_name(), obj.get_mapping_type_name(), obj.id, body=dict(doc=doc, doc_as_upsert=True)) index_mapping = self.es.indices.get_mapping( index=ParentIndexable.get_index_name(), doc_type=ParentIndexable.get_mapping_type_name()) alias_name = index_mapping.keys()[0] mapping = index_mapping[alias_name]["mappings"] self.assertDictEqual(mapping, ParentIndexable.get_mapping())
def test_synces(self): backup_settings = copy.copy(settings.ES_SETTINGS) test_tokenizer = { "type": "edgeNGram", "min_gram": "3", "max_gram": "4" } settings.ES_SETTINGS.update({ "index": { "analysis": { "tokenizer": { "edge_ngram_test_tokenizer": test_tokenizer } } } }) call_command("synces", self.index_suffix, force=True) es_settings = self.es.indices.get_settings( index=ParentIndexable.get_index_name()) index_settings = es_settings[es_settings.keys()[0]]["settings"] self.assertEqual( index_settings["index"]["analysis"]["tokenizer"] ["edge_ngram_test_tokenizer"], test_tokenizer) settings.ES_SETTINGS = backup_settings
def test_bad_index(self): """Check to make sure that the mappings are strict""" index_mapping = self.es.indices.get_mapping(index=ParentIndexable.get_index_name(), doc_type=ParentIndexable.get_mapping_type_name()) alias_name = index_mapping.keys()[0] mapping = index_mapping[alias_name]["mappings"] self.assertDictEqual(mapping, ParentIndexable.get_mapping()) obj = ParentIndexable.objects.create(foo="Fighters") ParentIndexable.search_objects.refresh() doc = obj.extract_document() doc["extra"] = "Just an additional string" with self.assertRaises(elasticsearch.RequestError): self.es.update( obj.get_index_name(), obj.get_mapping_type_name(), obj.id, body=dict(doc=doc, doc_as_upsert=True) ) index_mapping = self.es.indices.get_mapping(index=ParentIndexable.get_index_name(), doc_type=ParentIndexable.get_mapping_type_name()) alias_name = index_mapping.keys()[0] mapping = index_mapping[alias_name]["mappings"] self.assertDictEqual(mapping, ParentIndexable.get_mapping())
def test_synces(self): backup_settings = copy.copy(settings.ES_SETTINGS) test_tokenizer = { "type": "edgeNGram", "min_gram": "3", "max_gram": "4" } settings.ES_SETTINGS.update({ "index": { "analysis": { "tokenizer": { "edge_ngram_test_tokenizer": test_tokenizer } } } }) call_command("synces", self.index_suffix, force=True) es_settings = self.es.indices.get_settings(index=ParentIndexable.get_index_name()) index_settings = es_settings[es_settings.keys()[0]]["settings"] self.assertEqual(index_settings["index"]["analysis"]["tokenizer"]["edge_ngram_test_tokenizer"], test_tokenizer) settings.ES_SETTINGS = backup_settings