def process_bulk_queue(self, es_bulk_kwargs=None, stats_only=True): """Process bulk indexing queue. :param dict es_bulk_kwargs: Passed to :func:`elasticsearch:elasticsearch.helpers.bulk`. :param boolean stats_only: if `True` only report number of successful/failed operations instead of just number of successful and a list of error responses """ with current_celery_app.pool.acquire(block=True) as conn: consumer = Consumer( connection=conn, queue=self.mq_queue.name, exchange=self.mq_exchange.name, routing_key=self.mq_routing_key, ) req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT'] es_bulk_kwargs = es_bulk_kwargs or {} count = bulk(self.client, self._actionsiter(consumer.iterqueue()), stats_only=stats_only, request_timeout=req_timeout, expand_action_callback=(_es7_expand_action if ES_VERSION[0] >= 7 else default_expand_action), **es_bulk_kwargs) consumer.close() return count
def test_indexer_bulk_index(app, queue): """Test delay indexing.""" with app.app_context(): with establish_connection() as c: indexer = RecordIndexer() id1 = uuid.uuid4() id2 = uuid.uuid4() indexer.bulk_index([id1, id2]) indexer.bulk_delete([id1, id2]) consumer = Consumer( connection=c, queue=indexer.mq_queue.name, exchange=indexer.mq_exchange.name, routing_key=indexer.mq_routing_key) messages = list(consumer.iterqueue()) [m.ack() for m in messages] assert len(messages) == 4 data0 = messages[0].decode() assert data0['id'] == str(id1) assert data0['op'] == 'index' data2 = messages[2].decode() assert data2['id'] == str(id1) assert data2['op'] == 'delete'
def process_notifications(cls, verbose=False): """Process notifications queue.""" count = {'send': 0, 'reject': 0, 'error': 0} with current_celery_app.pool.acquire(block=True) as conn: consumer = Consumer( connection=conn, queue=cls.mq_queue.name, exchange=cls.mq_exchange.name, routing_key=cls.mq_routing_key, ) for message in consumer.iterqueue(): payload = message.decode() try: pid = payload['pid'] notification = Notification.get_record_by_pid(pid) Dispatcher().dispatch_notification(notification, verbose) message.ack() count['send'] += 1 except NoResultFound: message.reject() count['reject'] += 1 except Exception: message.reject() current_app.logger.error( "Failed to dispatch notification {pid}".format( pid=payload.get('pid') ), exc_info=True ) count['error'] += 1 consumer.close() return count
def process_bulk_queue(self, es_bulk_kwargs=None): """Process bulk indexing queue. :param dict es_bulk_kwargs: Passed to :func:`elasticsearch:elasticsearch.helpers.bulk`. """ with current_celery_app.pool.acquire(block=True) as conn: consumer = Consumer( connection=conn, queue=self.mq_queue.name, exchange=self.mq_exchange.name, routing_key=self.mq_routing_key, ) req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT'] es_bulk_kwargs = es_bulk_kwargs or {} count = bulk(self.client, self._actionsiter(consumer.iterqueue()), stats_only=True, request_timeout=req_timeout, **es_bulk_kwargs) consumer.close() return count
def test_bulkrecordindexer_index_delete_by_record(app, queue): """Test utility class BulkRecordIndexer index/delete by record object.""" with app.app_context(): with establish_connection() as c: recid = uuid.uuid4() record = Record.create({'title': 'Test'}, id_=recid) db.session.commit() indexer = BulkRecordIndexer() indexer.index(record) indexer.delete(record) consumer = Consumer( connection=c, queue=indexer.mq_queue.name, exchange=indexer.mq_exchange.name, routing_key=indexer.mq_routing_key) messages = list(consumer.iterqueue()) [m.ack() for m in messages] assert len(messages) == 2 data0 = messages[0].decode() assert data0['id'] == str(recid) assert data0['op'] == 'index' data1 = messages[1].decode() assert data1['id'] == str(recid) assert data1['op'] == 'delete'
def test_bulkrecordindexer_index_delete_by_record_id(app, queue): """Test utility class BulkRecordIndexer index/delete by record id.""" with app.app_context(): with establish_connection() as c: indexer = BulkRecordIndexer() id1 = uuid.uuid4() indexer.index_by_id(id1) indexer.delete_by_id(id1) consumer = Consumer( connection=c, queue=indexer.mq_queue.name, exchange=indexer.mq_exchange.name, routing_key=indexer.mq_routing_key) messages = list(consumer.iterqueue()) [m.ack() for m in messages] assert len(messages) == 2 data0 = messages[0].decode() assert data0['id'] == str(id1) assert data0['op'] == 'index' data1 = messages[1].decode() assert data1['id'] == str(id1) assert data1['op'] == 'delete'
def consumer(self, conn): """Get a consumer for a connection.""" return Consumer( connection=conn, queue=self.queue.name, exchange=self.exchange.name, routing_key=self.routing_key, no_ack=self.no_ack, )
def consumer(app, queue): """Get a consumer on the queue object for testing bulk operations.""" # TODO: Move this fixture to pytest-invenio with establish_connection() as c: yield Consumer( connection=c, queue=app.config['INDEXER_MQ_QUEUE'].name, exchange=app.config['INDEXER_MQ_EXCHANGE'].name, routing_key=app.config['INDEXER_MQ_ROUTING_KEY'], )
def consumer(app, queue): """Get a consumer on the queue object for testing bulk operations.""" # TODO: Move this fixture to pytest-invenio with establish_connection() as c: yield Consumer( connection=c, queue=queue.name, exchange=queue.exchange.name, routing_key=queue.routing_key )
def consumer(self, conn): """Get a consumer for a connection.""" return Consumer( connection=conn, queue=self.queue.name, exchange=self.exchange.name, exchange_type=self.exchange.type, durable=self.exchange.durable, auto_delete=self.exchange.auto_delete, routing_key=self.routing_key, no_ack=self.no_ack, )
def process_bulk_queue(self): """Process bulk indexing queue.""" with establish_connection() as conn: consumer = Consumer( connection=conn, queue=self.mq_queue.name, exchange=self.mq_exchange.name, routing_key=self.mq_routing_key, ) count = bulk(self.client, self._actionsiter(consumer.iterqueue()), stats_only=True) consumer.close() return count
def process_bulk_queue(self): """Process bulk harvesting queue.""" from .tasks import bulk_records count = 0 with current_celery_app.pool.acquire(block=True) as conn: try: consumer = Consumer( connection=conn, queue=self.mq_queue.name, exchange=self.mq_exchange.name, routing_key=self.mq_routing_key, ) count = bulk_records(self._actionsiter(consumer.iterqueue())) consumer.close() except Exception as e: click.secho('Harvester Bulk queue Error: {e}'.format(e=e), fg='red') return count
def process_bulk_queue(self): """Process bulk indexing queue.""" with current_celery_app.pool.acquire(block=True) as conn: consumer = Consumer( connection=conn, queue=self.mq_queue.name, exchange=self.mq_exchange.name, routing_key=self.mq_routing_key, ) req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT'] count = bulk( self.client, self._actionsiter(consumer.iterqueue()), stats_only=True, request_timeout=req_timeout, ) consumer.close() return count