def update_in_index(instance, mapping): """ Utility function for signal listeners index to Elasticsearch. Currently uses synchronous tasks. And because of that all exceptions are caught, so failures will not interfere with the regular model updates. """ if settings.ES_DISABLED: return if hasattr(instance, 'is_deleted') and instance.is_deleted: remove_from_index(instance, mapping) else: logger.info(u'Updating instance %s: %s' % (instance.__class__.__name__, instance.pk)) # Extract all aliases available. aliases = list( itertools.chain(*[ v['aliases'].keys() for v in es.indices.get_aliases().itervalues() if 'aliases' in v ])) for index in [DEFAULT_INDEX, NEW_INDEX]: try: if index in aliases: tasks.index_objects(mapping, [instance.id], es=es, index=index) es.indices.refresh(index) except Exception, e: logger.error(traceback.format_exc(e))
def index(self, index_name): ''' Index objects from our index-enabled models. ''' for mappingClass in self.MAPPINGS: model = mappingClass.get_model() self.stdout.write('Indexing %s from index %s' % (model, index_name)) model_objs = model.objects.filter(is_deleted=False) model_ids = list(model_objs.values_list('pk', flat=True)) if model_ids: tasks.index_objects(mappingClass, model_ids, index=index_name)
def update_in_index(instance, mapping): """ Utility function for signal listeners index to Elasticsearch. Currently uses synchronous tasks. And because of that all exceptions are caught, so failures will not interfere with the regular model updates. """ logger.info('Updating instance %s' % instance) if instance.is_deleted: try: tasks.unindex_objects(mapping, [instance.id], index=settings.ES_INDEXES['default']) except: pass else: try: tasks.index_objects(mapping, [instance.id], index=settings.ES_INDEXES['default']) except Exception, e: logger.error(traceback.format_exc(e))
def test_tasks_kwargs(self): """Test chunk size, es, and index parameters affects bulk_index""" documents = [ {'id': 1, 'name': 'odin skullcrusher'}, {'id': 2, 'name': 'heimdall kneebiter'}, {'id': 3, 'name': 'erik rose'} ] for doc in documents: FakeModel(**doc) class MockMappingType(FakeDjangoMappingType): bulk_index_count = 0 index_kwarg = None es_kwarg = None @classmethod def bulk_index(cls, *args, **kwargs): cls.bulk_index_count += 1 cls.index_kwarg = kwargs.get('index') cls.es_kwarg = kwargs.get('es') index_objects(MockMappingType, [1, 2, 3]) eq_(MockMappingType.bulk_index_count, 1) MockMappingType.bulk_index_count = 0 index_objects(MockMappingType, [1, 2, 3], chunk_size=2) eq_(MockMappingType.bulk_index_count, 2) MockMappingType.bulk_index_count = 0 index_objects(MockMappingType, [1, 2, 3], chunk_size=1) eq_(MockMappingType.bulk_index_count, 3) # test index and es kwargs MockMappingType.index_kwarg = None MockMappingType.es_kwarg = None index_objects(MockMappingType, [1, 2, 3]) eq_(MockMappingType.index_kwarg, None) eq_(MockMappingType.es_kwarg, None) index_objects(MockMappingType, [1, 2, 3], es='crazy_es', index='crazy_index') eq_(MockMappingType.index_kwarg, 'crazy_index') eq_(MockMappingType.es_kwarg, 'crazy_es')
def test_tasks_kwargs(self): """Test chunk size, es, and index parameters affects bulk_index""" documents = [ {"id": 1, "name": "odin skullcrusher"}, {"id": 2, "name": "heimdall kneebiter"}, {"id": 3, "name": "erik rose"}, ] for doc in documents: FakeModel(**doc) class MockMappingType(FakeDjangoMappingType): bulk_index_count = 0 index_kwarg = None es_kwarg = None @classmethod def bulk_index(cls, *args, **kwargs): cls.bulk_index_count += 1 cls.index_kwarg = kwargs.get("index") cls.es_kwarg = kwargs.get("es") index_objects(MockMappingType, [1, 2, 3]) eq_(MockMappingType.bulk_index_count, 1) MockMappingType.bulk_index_count = 0 index_objects(MockMappingType, [1, 2, 3], chunk_size=2) eq_(MockMappingType.bulk_index_count, 2) MockMappingType.bulk_index_count = 0 index_objects(MockMappingType, [1, 2, 3], chunk_size=1) eq_(MockMappingType.bulk_index_count, 3) # test index and es kwargs MockMappingType.index_kwarg = None MockMappingType.es_kwarg = None index_objects(MockMappingType, [1, 2, 3]) eq_(MockMappingType.index_kwarg, None) eq_(MockMappingType.es_kwarg, None) index_objects(MockMappingType, [1, 2, 3], es="crazy_es", index="crazy_index") eq_(MockMappingType.index_kwarg, "crazy_index") eq_(MockMappingType.es_kwarg, "crazy_es")
def test_tasks(self): documents = [ {'id': 1, 'name': 'odin skullcrusher'}, {'id': 2, 'name': 'heimdall kneebiter'}, {'id': 3, 'name': 'erik rose'} ] for doc in documents: FakeModel(**doc) # Test index_objects task index_objects(FakeDjangoMappingType, [1, 2, 3]) FakeDjangoMappingType.refresh_index() eq_(FakeDjangoMappingType.search().count(), 3) # Test unindex_objects task unindex_objects(FakeDjangoMappingType, [1, 2, 3]) FakeDjangoMappingType.refresh_index() eq_(FakeDjangoMappingType.search().count(), 0)
def test_tasks(self): documents = [ {'id': 1, 'name': 'odin skullcrusher'}, {'id': 2, 'name': 'heimdall kneebiter'}, {'id': 3, 'name': 'erik rose'} ] for doc in documents: FakeModel(**doc) # Test index_objects task index_objects(FakeDjangoMappingType, [1, 2, 3]) FakeDjangoMappingType.refresh_index() eq_(FakeDjangoMappingType.search().count(), 3) # Test unindex_objects task unindex_objects(FakeDjangoMappingType, [1, 2, 3]) FakeDjangoMappingType.refresh_index() eq_(FakeDjangoMappingType.search().count(), 0)
def update_in_index(instance, mapping): """ Utility function for signal listeners index to Elasticsearch. Currently uses synchronous tasks. And because of that all exceptions are caught, so failures will not interfere with the regular model updates. """ if settings.ES_DISABLED: return if hasattr(instance, 'is_deleted') and instance.is_deleted: remove_from_index(instance, mapping) else: logger.info(u'Updating instance %s: %s' % (instance.__class__.__name__, instance.pk)) try: main_index_with_type = get_index_name(main_index, mapping) tasks.index_objects(mapping, [instance.id], es=es, index=main_index_with_type) es.indices.refresh(main_index_with_type) except Exception, e: logger.error(traceback.format_exc(e))
def test_tasks(self): documents = [ {"id": 1, "name": "odin skullcrusher"}, {"id": 2, "name": "heimdall kneebiter"}, {"id": 3, "name": "erik rose"}, ] for doc in documents: FakeModel(**doc) # Test index_objects task index_objects(FakeDjangoMappingType, [1, 2, 3]) FakeDjangoMappingType.refresh_index() eq_(FakeDjangoMappingType.search().count(), 3) # Test unindex_objects task unindex_objects(FakeDjangoMappingType, [1, 2, 3]) FakeDjangoMappingType.refresh_index() eq_(FakeDjangoMappingType.search().count(), 0)
def test_tasks_chunk_size(self): """Test chunk size affects bulk_index""" documents = [ {'id': 1, 'name': 'odin skullcrusher'}, {'id': 2, 'name': 'heimdall kneebiter'}, {'id': 3, 'name': 'erik rose'} ] for doc in documents: FakeModel(**doc) class MockMappingType(FakeDjangoMappingType): bulk_index_count = 0 @classmethod def bulk_index(cls, *args, **kwargs): cls.bulk_index_count += 1 index_objects(MockMappingType, [1, 2, 3]) eq_(MockMappingType.bulk_index_count, 1) MockMappingType.bulk_index_count = 0 index_objects(MockMappingType, [1, 2, 3], chunk_size=2) eq_(MockMappingType.bulk_index_count, 2) MockMappingType.bulk_index_count = 0 index_objects(MockMappingType, [1, 2, 3], chunk_size=1) eq_(MockMappingType.bulk_index_count, 3)
def update_in_index(instance, mapping): """ Utility function for signal listeners index to Elasticsearch. Currently uses synchronous tasks. And because of that all exceptions are caught, so failures will not interfere with the regular model updates. """ if settings.ES_DISABLED: return if hasattr(instance, 'is_deleted') and instance.is_deleted: remove_from_index(instance, mapping) else: logger.info(u'Updating instance %s: %s' % (instance.__class__.__name__, instance.pk)) # Extract all aliases available. aliases = list(itertools.chain(*[v['aliases'].keys() for v in es.indices.get_aliases().itervalues() if 'aliases' in v])) for index in [DEFAULT_INDEX, NEW_INDEX]: try: if index in aliases: tasks.index_objects(mapping, [instance.id], es=es, index=index) es.indices.refresh(index) except Exception, e: logger.error(traceback.format_exc(e))