def main(exchange, routing_key, payload): exchange = Exchange(exchange, type='topic') publisher = PoolPublisher(cfg.CONF.messaging.url) with Connection(cfg.CONF.messaging.url): publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key)
def main(exchange, routing_key, payload): exchange = Exchange(exchange, type="topic") publisher = PoolPublisher() publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key) eventlet.sleep(0.5)
def test_stop_consumption_on_shutdown(self): exchange = Exchange("st2.execution.test", type="topic") queue_name = "test-" + str(random.randint(1, 10000)) queue = Queue( name=queue_name, exchange=exchange, routing_key="#", auto_delete=True ) publisher = PoolPublisher() with transport_utils.get_connection() as connection: connection.connect() watcher = ActionsQueueConsumer( connection=connection, queues=queue, handler=self ) watcher_thread = eventlet.greenthread.spawn(watcher.run) # Give it some time to start up since we are publishing on a new queue eventlet.sleep(0.5) body = LiveActionDB( status="scheduled", action="core.local", action_is_workflow=False ) publisher.publish(payload=body, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(self.message_count, 1) body = LiveActionDB( status="scheduled", action="core.local", action_is_workflow=True ) watcher.shutdown() eventlet.sleep(1) publisher.publish(payload=body, exchange=exchange) # Second published message won't be consumed. self.assertEqual(self.message_count, 1) watcher_thread.kill()
def main(exchange, routing_key, payload): exchange = Exchange(exchange, type='topic') publisher = PoolPublisher(urls=transport_utils.get_messaging_urls()) publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key)
def test_publish_compression(self): live_action_db = LiveActionDB() live_action_db.id = ObjectId() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = {"foo": "bar"} exchange = Exchange("st2.execution.test", type="topic") queue_name = "test-" + str(random.randint(1, 10000)) queue = Queue( name=queue_name, exchange=exchange, routing_key="#", auto_delete=True ) publisher = PoolPublisher() with transport_utils.get_connection() as connection: connection.connect() watcher = QueueConsumer(connection=connection, queue=queue) watcher_thread = eventlet.greenthread.spawn(watcher.run) # Give it some time to start up since we are publishing on a new queue eventlet.sleep(0.5) self.assertEqual(len(watcher.received_messages), 0) # 1. Verify compression is off as a default publisher.publish(payload=live_action_db, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 1) self.assertEqual( watcher.received_messages[0][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[0][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[0][1].properties["application_headers"], {} ) self.assertEqual(watcher.received_messages[0][0].id, live_action_db.id) # 2. Verify config level option is used cfg.CONF.set_override(name="compression", group="messaging", override="zstd") publisher.publish(payload=live_action_db, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 2) self.assertEqual( watcher.received_messages[1][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[1][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[1][1].properties["application_headers"], {"compression": "application/zstd"}, ) self.assertEqual(watcher.received_messages[1][0].id, live_action_db.id) # 2. Verify argument level option is used and has precedence over config one cfg.CONF.set_override(name="compression", group="messaging", override="zstd") publisher.publish(payload=live_action_db, exchange=exchange, compression="gzip") eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 3) self.assertEqual( watcher.received_messages[2][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[2][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[2][1].properties["application_headers"], {"compression": "application/x-gzip"}, ) self.assertEqual(watcher.received_messages[2][0].id, live_action_db.id) watcher_thread.kill()
def main(exchange, routing_key, payload): exchange = Exchange(exchange, type='topic') publisher = PoolPublisher() publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key) eventlet.sleep(0.5)