Beispiel #1
0
def test_bulkrecordindexer_index_delete_by_record_id(app, queue):
    """Test utility class BulkRecordIndexer index/delete by record id."""
    with app.app_context():
        with establish_connection() as c:
            indexer = BulkRecordIndexer()
            id1 = uuid.uuid4()
            indexer.index_by_id(id1)
            indexer.delete_by_id(id1)

            consumer = Consumer(
                connection=c,
                queue=indexer.mq_queue.name,
                exchange=indexer.mq_exchange.name,
                routing_key=indexer.mq_routing_key)

            messages = list(consumer.iterqueue())
            [m.ack() for m in messages]

            assert len(messages) == 2
            data0 = messages[0].decode()
            assert data0['id'] == str(id1)
            assert data0['op'] == 'index'
            data1 = messages[1].decode()
            assert data1['id'] == str(id1)
            assert data1['op'] == 'delete'
Beispiel #2
0
def test_bulkrecordindexer_index_delete_by_record(app, queue):
    """Test utility class BulkRecordIndexer index/delete by record object."""
    with app.app_context():
        with establish_connection() as c:
            recid = uuid.uuid4()
            record = Record.create({'title': 'Test'}, id_=recid)
            db.session.commit()
            indexer = BulkRecordIndexer()
            indexer.index(record)
            indexer.delete(record)

            consumer = Consumer(
                connection=c,
                queue=indexer.mq_queue.name,
                exchange=indexer.mq_exchange.name,
                routing_key=indexer.mq_routing_key)

            messages = list(consumer.iterqueue())
            [m.ack() for m in messages]

            assert len(messages) == 2
            data0 = messages[0].decode()
            assert data0['id'] == str(recid)
            assert data0['op'] == 'index'
            data1 = messages[1].decode()
            assert data1['id'] == str(recid)
            assert data1['op'] == 'delete'
Beispiel #3
0
def process_data():
    connection = establish_connection()
    consumer = Consumer(connection=connection,
                        queue="data",
                        exchange="data",
                        routing_key="increment_data",
                        exchange_type="direct")

    data_to_save = {}
    for message in consumer.iterqueue():
        data = message.body
        data = loads(data)
        checksum = data['checksum']
        data_to_save.setdefault(checksum, []).append(data)

    from sentry.models import Group
    for checksum in data_to_save:
        data = data_to_save[checksum]
        merged_times_seen = len(data)
        data = merge_data(data)
        data['timestamp'] = datetime.strptime(data['timestamp'], '%Y-%m-%d %H:%M:%S')
        event = Group.objects.from_kwargs(**data)
        event.group.update(times_seen=F('times_seen') + merged_times_seen - 1)
    
    consumer.close()
    connection.close()
Beispiel #4
0
    def process_notifications(cls, verbose=False):
        """Process notifications queue."""
        count = {'send': 0, 'reject': 0, 'error': 0}
        with current_celery_app.pool.acquire(block=True) as conn:
            consumer = Consumer(
                connection=conn,
                queue=cls.mq_queue.name,
                exchange=cls.mq_exchange.name,
                routing_key=cls.mq_routing_key,
            )

            for message in consumer.iterqueue():
                payload = message.decode()
                try:
                    pid = payload['pid']
                    notification = Notification.get_record_by_pid(pid)
                    Dispatcher().dispatch_notification(notification, verbose)
                    message.ack()
                    count['send'] += 1
                except NoResultFound:
                    message.reject()
                    count['reject'] += 1
                except Exception:
                    message.reject()
                    current_app.logger.error(
                        "Failed to dispatch notification {pid}".format(
                            pid=payload.get('pid')
                        ),
                        exc_info=True
                    )
                    count['error'] += 1
            consumer.close()

        return count
Beispiel #5
0
def test_indexer_bulk_index(app, queue):
    """Test delay indexing."""
    with app.app_context():
        with establish_connection() as c:
            indexer = RecordIndexer()
            id1 = uuid.uuid4()
            id2 = uuid.uuid4()
            indexer.bulk_index([id1, id2])
            indexer.bulk_delete([id1, id2])

            consumer = Consumer(
                connection=c,
                queue=indexer.mq_queue.name,
                exchange=indexer.mq_exchange.name,
                routing_key=indexer.mq_routing_key)

            messages = list(consumer.iterqueue())
            [m.ack() for m in messages]

            assert len(messages) == 4
            data0 = messages[0].decode()
            assert data0['id'] == str(id1)
            assert data0['op'] == 'index'
            data2 = messages[2].decode()
            assert data2['id'] == str(id1)
            assert data2['op'] == 'delete'
Beispiel #6
0
def process_upvotes():
    """Process all currently gathered clicks by saving them to the
    database."""
    connection = establish_connection()
    consumer = Consumer(
        connection=connection, queue="test", exchange="test", routing_key="test", exchange_type="direct"
    )
    # First process the messages: save the number of clicks
    # for every URL.
    upvotes_for_post = {}
    messages_for_post = {}
    for message in consumer.iterqueue():
        id = message.body
        upvotes_for_post[id] = upvotes_for_post.get(id, 0) + 1
        # We also need to keep the message objects so we can ack the
        # messages as processed when we are finished with them.
        if id in messages_for_post:
            messages_for_post[id].append(message)
        else:
            messages_for_post[id] = [message]

    # Then increment the clicks in the database so we only need
    # one UPDATE/INSERT for each URL.
    for id, vote_count in upvotes_for_post.items():
        p = Post.objects.get(id=int(id))  # is id a string or int?
        p.upvotes += vote_count
        p.save()
        # Now that the clicks has been registered for this URL we can
        # acknowledge the messages
        [message.ack() for message in messages_for_post[id]]

    consumer.close()
    connection.close()
Beispiel #7
0
    def process_bulk_queue(self, es_bulk_kwargs=None, stats_only=True):
        """Process bulk indexing queue.

        :param dict es_bulk_kwargs: Passed to
            :func:`elasticsearch:elasticsearch.helpers.bulk`.
        :param boolean stats_only: if `True` only report number of
            successful/failed operations instead of just number of
            successful and a list of error responses
        """
        with current_celery_app.pool.acquire(block=True) as conn:
            consumer = Consumer(
                connection=conn,
                queue=self.mq_queue.name,
                exchange=self.mq_exchange.name,
                routing_key=self.mq_routing_key,
            )

            req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT']

            es_bulk_kwargs = es_bulk_kwargs or {}
            count = bulk(self.client,
                         self._actionsiter(consumer.iterqueue()),
                         stats_only=stats_only,
                         request_timeout=req_timeout,
                         expand_action_callback=(_es7_expand_action
                                                 if ES_VERSION[0] >= 7 else
                                                 default_expand_action),
                         **es_bulk_kwargs)

            consumer.close()

        return count
Beispiel #8
0
    def process_bulk_queue(self, es_bulk_kwargs=None):
        """Process bulk indexing queue.

        :param dict es_bulk_kwargs: Passed to
            :func:`elasticsearch:elasticsearch.helpers.bulk`.
        """
        with current_celery_app.pool.acquire(block=True) as conn:
            consumer = Consumer(
                connection=conn,
                queue=self.mq_queue.name,
                exchange=self.mq_exchange.name,
                routing_key=self.mq_routing_key,
            )

            req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT']

            es_bulk_kwargs = es_bulk_kwargs or {}
            count = bulk(self.client,
                         self._actionsiter(consumer.iterqueue()),
                         stats_only=True,
                         request_timeout=req_timeout,
                         **es_bulk_kwargs)

            consumer.close()

        return count
def process_stool(debug=False):
	""" Process all of the gathered increments by saving them 
	to the database. """
	connection = establish_connection()
	consumer = Consumer(connection=connection,
						queue="dogstool",
						exchange="dogstool",
						routing_key="increment_stool",
						exchange_type="direct")

	clicks_for_dog = {}
	messages_for_dog = {}
	for message in consumer.iterqueue():
		pass
Beispiel #10
0
    def process_bulk_queue(self):
        """Process bulk indexing queue."""
        with establish_connection() as conn:
            consumer = Consumer(
                connection=conn,
                queue=self.mq_queue.name,
                exchange=self.mq_exchange.name,
                routing_key=self.mq_routing_key,
            )

            count = bulk(self.client,
                         self._actionsiter(consumer.iterqueue()),
                         stats_only=True)

            consumer.close()

        return count
Beispiel #11
0
    def declare(self, exchange, exchange_type, binding="", queue=""):
        """declares the exchange, the queue and binds the queue to the exchange
        
        exchange        - exchange name
        exchange_type   - direct, topic, fanout
        binding         - binding to queue (optional)
        queue           - queue to bind to exchange using binding (optional)
        """
        if (binding and not queue) or (queue and not binding):
            if queue and not exchange_type == "fanout":
                raise Error("binding and queue are not mutually exclusive")

        consumer = Consumer(connection=self.broker,
                            exchange=exchange, exchange_type=exchange_type,
                            routing_key=binding, queue=queue)
        consumer.declare()
        consumer.close()
Beispiel #12
0
 def consumer(self, conn):
     """Get a consumer for a connection."""
     return Consumer(
         connection=conn,
         queue=self.queue.name,
         exchange=self.exchange.name,
         routing_key=self.routing_key,
         no_ack=self.no_ack,
     )
Beispiel #13
0
    def process_bulk_queue(self):
        """Process bulk indexing queue."""
        with establish_connection() as conn:
            consumer = Consumer(
                connection=conn,
                queue=self.mq_queue.name,
                exchange=self.mq_exchange.name,
                routing_key=self.mq_routing_key,
            )

            count = bulk(
                self.client,
                self._actionsiter(consumer.iterqueue()),
                stats_only=True)

            consumer.close()

        return count
def consumer(app, queue):
    """Get a consumer on the queue object for testing bulk operations."""
    # TODO: Move this fixture to pytest-invenio
    with establish_connection() as c:
        yield Consumer(
            connection=c,
            queue=queue.name,
            exchange=queue.exchange.name,
            routing_key=queue.routing_key
        )
Beispiel #15
0
def consumer(app, queue):
    """Get a consumer on the queue object for testing bulk operations."""
    # TODO: Move this fixture to pytest-invenio
    with establish_connection() as c:
        yield Consumer(
            connection=c,
            queue=app.config['INDEXER_MQ_QUEUE'].name,
            exchange=app.config['INDEXER_MQ_EXCHANGE'].name,
            routing_key=app.config['INDEXER_MQ_ROUTING_KEY'],
        )
Beispiel #16
0
    def process_bulk_queue(self):
        """Process bulk harvesting queue."""
        from .tasks import bulk_records
        count = 0
        with current_celery_app.pool.acquire(block=True) as conn:
            try:
                consumer = Consumer(
                    connection=conn,
                    queue=self.mq_queue.name,
                    exchange=self.mq_exchange.name,
                    routing_key=self.mq_routing_key,
                )

                count = bulk_records(self._actionsiter(consumer.iterqueue()))
                consumer.close()
            except Exception as e:
                click.secho('Harvester Bulk queue Error: {e}'.format(e=e),
                            fg='red')
        return count
Beispiel #17
0
 def consumer(self, conn):
     """Get a consumer for a connection."""
     return Consumer(
         connection=conn,
         queue=self.queue.name,
         exchange=self.exchange.name,
         exchange_type=self.exchange.type,
         durable=self.exchange.durable,
         auto_delete=self.exchange.auto_delete,
         routing_key=self.routing_key,
         no_ack=self.no_ack,
     )
Beispiel #18
0
def check_end(num):
    connection = BrokerConnection(hostname = 'myhost',
                                  userid = 'webfis',
                                  password = '******',
                                  virtual_host = 'webfishost',
                                  port = 5672)
    consumer = Consumer(connection=connection,
                        queue="end"+str(num),
                        exchange="end",
                        routing_key="end"+str(num),
                        exchange_type="direct")

    message = consumer.fetch()
    if message and message.payload == "end":
        end = True
    else:
        end = False

    consumer.close()
    connection.release()
    return end
Beispiel #19
0
def process_proximos_trenes():
    from lineas.documents import Linea, ProximoTren
    connection = establish_connection()
    consumer = Consumer(
        connection=connection,
        queue="proximos_trenes",
        exchange="proximos_trenes",
        routing_key="save_proximos_trenes",
        exchange_type="direct"
    )
    before = datetime.now()
    for message in consumer.iterqueue():
        d = json.loads(message.body)
        try:
            pt = ProximoTren()
            pt.linea = Linea.objects.get(id=d['linea'])
            pt._estacion = d['estacion']
            pt.proximos_origen = d['proximos_origen']
            pt.proximos_destino = d['proximos_destino']
            pt.save()
        except Exception as e:
            logger.debug(d, e)
        message.ack()
    pts = ProximoTren.objects.filter(created__gt=before)
    pts = pts.order_by('-id')
    pts = [
        {
            'linea': str(pt.linea.nombre),
            'estacion': str(pt.estacion),
            'proximos_origen': pt.proximos_origen,
            'proximos_destino': pt.proximos_destino,
        } for pt in pts
    ]
    r = get_redis_client()
    r.publish(
        'proximos-trenes',
        json.dumps(pts),
    )
    consumer.close()
    connection.close()
Beispiel #20
0
    def process_bulk_queue(self):
        """Process bulk indexing queue."""
        with current_celery_app.pool.acquire(block=True) as conn:
            consumer = Consumer(
                connection=conn,
                queue=self.mq_queue.name,
                exchange=self.mq_exchange.name,
                routing_key=self.mq_routing_key,
            )

            req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT']

            count = bulk(
                self.client,
                self._actionsiter(consumer.iterqueue()),
                stats_only=True,
                request_timeout=req_timeout,
            )

            consumer.close()

        return count
Beispiel #21
0
    def process_bulk_queue(self):
        """Process bulk indexing queue."""
        with establish_connection() as conn:
            consumer = Consumer(
                connection=conn,
                queue=self.mq_queue.name,
                exchange=self.mq_exchange.name,
                routing_key=self.mq_routing_key,
            )

            req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT']

            count = bulk(
                self.client,
                self._actionsiter(consumer.iterqueue()),
                stats_only=True,
                request_timeout=req_timeout,
            )

            consumer.close()

        return count
Beispiel #22
0
def get_error(num):
    connection = BrokerConnection(hostname = 'myhost',
                                  userid = 'webfis',
                                  password = '******',
                                  virtual_host = 'webfishost',
                                  port = 5672)
    consumer = Consumer(connection=connection,
                        queue=str(num),
                        exchange="error",
                        routing_key=str(num),
                        exchange_type="direct")

    message = consumer.fetch()
    if message:
        error = message.payload
        message.ack()
        print("geterror: " + str(error))
    else:
        error = "wait"

    consumer.close()
    connection.release()
    return error
Beispiel #23
0
    def consume(self, queue, limit=None, callback=None, auto_declare=False):
        """consume messages in queue
        
        queue           - name of queue
        limit           - amount of messages to iterate through (default: no limit)

        callback        - method to call when a new message is received
                          must take two arguments: message_data, message
                          must send the acknowledgement: message.ack()
                          default: print message to stdout and send ack

        auto_declare    - automatically declare the queue (default: false)
        """
        if not callback:
            callback = _consume_callback

        consumer = Consumer(connection=self.broker, queue=queue,
                            auto_declare=auto_declare)

        consumer.register_callback(callback)
        for message in consumer.iterqueue(limit=limit, infinite=False):
            consumer.receive(message.payload, message)

        consumer.close()
	def test_process_message(self):
		""" Test sending messages. Item will be dogs
		and I will count how many times they each 
		create a lovely stool. Scenario:
		 - Dopey will poop 5 times
		 - Jiggles will poop 7 times
		"""

		termprint("INFO", "Sending messages")
		dopey = [self.__send_message("dopey") for x in range(0,5)]
		jiggles = [self.__send_message("jiggles") for x in range(0,7)]

		connection = establish_connection()
		consumer = Consumer(connection=connection,
							queue="test_messages",
							exchange="test_messages",
							routing_key="test_increment_number",
							exchange_type="direct")
		clicks_for_item = {}
		messages_for_item = {}
		

		# save number of clicks for each 'item'
		termprint("ERROR", consumer)
		termprint("WARNING", dir(consumer))
		messages_count = 0
		for message in consumer.iterqueue():
			data = message.body
			messages_count += 1
			self.assertTrue(data)
			termprint("WARNING", dir(message))
			termprint("WARNING", "\n\tMessage body: %s" % data)
			clicks_for_item[data] = clicks_for_item.get(data, 0) + 1

			# store the message objects too so we can update them after
			if data in messages_for_item.keys():
				messages_for_url[data].append(message)
			else:
				messages_for_url[data] = [message]

			# display the information
			for item, click_count in clicks_for_item.items():
				#termprint("INFO", "\n%s has %s clicks" % item, click_count)
				# acknowledge the message
				[msgs.ack() for msgs in messages_for_item[item]]


		self.assertEquals(messages_count, 12)
		self.assertTrue("dopey" in clicks_for_item.keys())
		self.assertTrue("jiggles" in clicks_for_item.keys())
		self.assertTrue("dopey" in messages_for_item.keys())
		self.assertTrue("jiggles" in messages_for_item.keys())
		self.assertEquals(clicks_for_item.get("dopey"), 5)
		self.assertEquals(clicks_for_item.get("dopey"), 7)

		# queue should now be empty
		messages_queue2 = consumer.iterqueue()
		messages2_count = 0
		[messages2_count + 1 for i in messages_queue2]
		self.assertTrue(len(messages2_count) == 0)

		consumer.close()
		connection.close()