def test_read_execution(benchmark, fixture_file: str, compression): with open(os.path.join(FIXTURES_DIR, fixture_file), "rb") as fp: content = fp.read() cfg.CONF.set_override(name="compressors", group="database", override=compression) # NOTE: It's important we correctly reestablish connection before each setting change disconnect() connection = db_setup() if compression is None: assert "compressors" not in str(connection) elif compression == "zstd": assert "compressors=['zstd']" in str(connection) live_action_db = LiveActionDB() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = content inserted_live_action_db = LiveAction.add_or_update(live_action_db) def run_benchmark(): retrieved_live_action_db = LiveAction.get_by_id( inserted_live_action_db.id) return retrieved_live_action_db retrieved_live_action_db = benchmark(run_benchmark) # Assert that result is correctly converted back to dict on retrieval assert retrieved_live_action_db == inserted_live_action_db
def run_benchmark(): live_action_db = LiveActionDB() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = content inserted_live_action_db = LiveAction.add_or_update(live_action_db) return inserted_live_action_db
def _create_inquiry(self, ttl, timestamp): action_db = self.models['actions']['ask.yaml'] liveaction_db = LiveActionDB() liveaction_db.status = action_constants.LIVEACTION_STATUS_PENDING liveaction_db.start_timestamp = timestamp liveaction_db.action = ResourceReference(name=action_db.name, pack=action_db.pack).ref liveaction_db.result = {'ttl': ttl} liveaction_db = LiveAction.add_or_update(liveaction_db) executions.create_execution_object(liveaction_db)
def run_benchmark(): live_action_db = LiveActionDB() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = data publisher.publish( payload=live_action_db, exchange=exchange, compression=compression )
def run_benchmark(): live_action_db = LiveActionDB() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = data serialized = pickle.dumps(live_action_db) if algorithm == "zstandard": c = zstd.ZstdCompressor() serialized = c.compress(serialized) return serialized
def get_liveaction_instance(self, status=None, result=None): callback = { 'source': MISTRAL_RUNNER_NAME, 'url': 'http://127.0.0.1:8989/v2/action_executions/12345' } liveaction = LiveActionDB(action='core.local', parameters={'cmd': 'uname -a'}, callback=callback, context=dict()) if status: liveaction.status = status if result: liveaction.result = result return liveaction
def get_liveaction_instance(self, status=None, result=None): callback = { 'source': MISTRAL_RUNNER_NAME, 'url': 'http://127.0.0.1:8989/v2/action_executions/12345' } liveaction = LiveActionDB( action='core.local', parameters={'cmd': 'uname -a'}, callback=callback, context=dict() ) if status: liveaction.status = status if result: liveaction.result = result return liveaction
def test_publish_compression(self): live_action_db = LiveActionDB() live_action_db.id = ObjectId() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = {"foo": "bar"} exchange = Exchange("st2.execution.test", type="topic") queue_name = "test-" + str(random.randint(1, 10000)) queue = Queue( name=queue_name, exchange=exchange, routing_key="#", auto_delete=True ) publisher = PoolPublisher() with transport_utils.get_connection() as connection: connection.connect() watcher = QueueConsumer(connection=connection, queue=queue) watcher_thread = eventlet.greenthread.spawn(watcher.run) # Give it some time to start up since we are publishing on a new queue eventlet.sleep(0.5) self.assertEqual(len(watcher.received_messages), 0) # 1. Verify compression is off as a default publisher.publish(payload=live_action_db, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 1) self.assertEqual( watcher.received_messages[0][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[0][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[0][1].properties["application_headers"], {} ) self.assertEqual(watcher.received_messages[0][0].id, live_action_db.id) # 2. Verify config level option is used cfg.CONF.set_override(name="compression", group="messaging", override="zstd") publisher.publish(payload=live_action_db, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 2) self.assertEqual( watcher.received_messages[1][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[1][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[1][1].properties["application_headers"], {"compression": "application/zstd"}, ) self.assertEqual(watcher.received_messages[1][0].id, live_action_db.id) # 2. Verify argument level option is used and has precedence over config one cfg.CONF.set_override(name="compression", group="messaging", override="zstd") publisher.publish(payload=live_action_db, exchange=exchange, compression="gzip") eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 3) self.assertEqual( watcher.received_messages[2][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[2][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[2][1].properties["application_headers"], {"compression": "application/x-gzip"}, ) self.assertEqual(watcher.received_messages[2][0].id, live_action_db.id) watcher_thread.kill()