def test_ssl_connect_client_side_cert_authentication(self): # 1. Success, valid client side cert provided ssl_keyfile = os.path.join(CERTS_FIXTURES_PATH, 'client/private_key.pem') ssl_certfile = os.path.join(CERTS_FIXTURES_PATH, 'client/client_certificate.pem') ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, 'ca/ca_certificate_bundle.pem') cfg.CONF.set_override(name='ssl_keyfile', override=ssl_keyfile, group='messaging') cfg.CONF.set_override(name='ssl_certfile', override=ssl_certfile, group='messaging') cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release() # 2. Invalid client side cert provided - failure ssl_keyfile = os.path.join(CERTS_FIXTURES_PATH, 'client/private_key.pem') ssl_certfile = os.path.join(CERTS_FIXTURES_PATH, 'server/server_certificate.pem') ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, 'ca/ca_certificate_bundle.pem') cfg.CONF.set_override(name='ssl_keyfile', override=ssl_keyfile, group='messaging') cfg.CONF.set_override(name='ssl_certfile', override=ssl_certfile, group='messaging') cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') expected_msg = r'\[X509: KEY_VALUES_MISMATCH\] key values mismatch' self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect)
def test_ssl_connection_on_ssl_listener_success(self): # Using query param notation urls = "amqp://*****:*****@127.0.0.1:5671/?ssl=true" connection = transport_utils.get_connection(urls=urls) try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release() # Using messaging.ssl config option cfg.CONF.set_override(name="ssl", override=True, group="messaging") connection = transport_utils.get_connection( urls="amqp://*****:*****@127.0.0.1:5671/" ) try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release()
def test_ssl_connection_ca_certs_provided(self): ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, 'ca/ca_certificate_bundle.pem') cfg.CONF.set_override(name='ssl', override=True, group='messaging') cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') # 1. Validate server cert against a valid CA bundle (success) - cert required cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release() # 2. Validate server cert against other CA bundle (failure) # CA bundle which was not used to sign the server cert ca_cert_path = os.path.join('/etc/ssl/certs/thawte_Primary_Root_CA.pem') cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') expected_msg = r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed' self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) # 3. Validate server cert against other CA bundle (failure) ca_cert_path = os.path.join('/etc/ssl/certs/thawte_Primary_Root_CA.pem') cfg.CONF.set_override(name='ssl_cert_reqs', override='optional', group='messaging') cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') expected_msg = r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed' self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) # 4. Validate server cert against other CA bundle (failure) # We use invalid bundle but cert_reqs is none ca_cert_path = os.path.join('/etc/ssl/certs/thawte_Primary_Root_CA.pem') cfg.CONF.set_override(name='ssl_cert_reqs', override='none', group='messaging') cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release()
def test_stop_consumption_on_shutdown(self): exchange = Exchange("st2.execution.test", type="topic") queue_name = "test-" + str(random.randint(1, 10000)) queue = Queue( name=queue_name, exchange=exchange, routing_key="#", auto_delete=True ) publisher = PoolPublisher() with transport_utils.get_connection() as connection: connection.connect() watcher = ActionsQueueConsumer( connection=connection, queues=queue, handler=self ) watcher_thread = eventlet.greenthread.spawn(watcher.run) # Give it some time to start up since we are publishing on a new queue eventlet.sleep(0.5) body = LiveActionDB( status="scheduled", action="core.local", action_is_workflow=False ) publisher.publish(payload=body, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(self.message_count, 1) body = LiveActionDB( status="scheduled", action="core.local", action_is_workflow=True ) watcher.shutdown() eventlet.sleep(1) publisher.publish(payload=body, exchange=exchange) # Second published message won't be consumed. self.assertEqual(self.message_count, 1) watcher_thread.kill()
def start(self): try: self.connection = transport_utils.get_connection() self._updates_thread = concurrency.spawn(self.run) except: LOG.exception("Failed to start sensor_watcher.") self.connection.release()
def get_notifier(): with transport_utils.get_connection() as conn: return Notifier( conn, [NOTIFIER_ACTIONUPDATE_WORK_QUEUE], trigger_dispatcher=TriggerDispatcher(LOG), )
def start(self): try: self.connection = transport_utils.get_connection() self._updates_thread = eventlet.spawn(self.run) except: LOG.exception('Failed to start sensor_watcher.') self.connection.release()
def test_non_ssl_connection_on_ssl_listener_port_failure(self): connection = transport_utils.get_connection( urls="amqp://*****:*****@127.0.0.1:5671/" ) expected_msg_1 = ( "[Errno 104]" # followed by: ' Connection reset by peer' or ' ECONNRESET' ) expected_msg_2 = "Socket closed" expected_msg_3 = "Server unexpectedly closed connection" try: connection.connect() except Exception as e: self.assertFalse(connection.connected) self.assertIsInstance(e, (IOError, socket.error)) self.assertTrue( expected_msg_1 in six.text_type(e) or expected_msg_2 in six.text_type(e) or expected_msg_3 in six.text_type(e) ) else: self.fail("Exception was not thrown") if connection: connection.release()
def register_exchanges(): LOG.debug("Registering exchanges...") connection_urls = transport_utils.get_messaging_urls() with transport_utils.get_connection() as conn: # Use ConnectionRetryWrapper to deal with rmq clustering etc. retry_wrapper = ConnectionRetryWrapper( cluster_size=len(connection_urls), logger=LOG ) def wrapped_register_exchanges(connection, channel): for exchange in EXCHANGES: _do_register_exchange( exchange=exchange, connection=connection, channel=channel, retry_wrapper=retry_wrapper, ) retry_wrapper.run(connection=conn, wrapped_callback=wrapped_register_exchanges) def wrapped_predeclare_queues(connection, channel): for queue in QUEUES: _do_predeclare_queue(channel=channel, queue=queue) retry_wrapper.run(connection=conn, wrapped_callback=wrapped_predeclare_queues)
def start(self): try: self.connection = transport_utils.get_connection() self._updates_thread = concurrency.spawn(self.run) self._load_thread = concurrency.spawn(self._load_triggers_from_db) except: LOG.exception('Failed to start watcher.') self.connection.release()
def start(self): try: self.connection = transport_utils.get_connection() self._updates_thread = eventlet.spawn(self.run) self._load_thread = eventlet.spawn(self._load_triggers_from_db) except: LOG.exception('Failed to start watcher.') self.connection.release()
def get_listener(name): global _stream_listener global _execution_output_listener if name == 'stream': if not _stream_listener: with transport_utils.get_connection() as conn: _stream_listener = StreamListener(conn) eventlet.spawn_n(listen, _stream_listener) return _stream_listener elif name == 'execution_output': if not _execution_output_listener: with transport_utils.get_connection() as conn: _execution_output_listener = ExecutionOutputListener(conn) eventlet.spawn_n(listen, _execution_output_listener) return _execution_output_listener else: raise ValueError('Invalid listener name: %s' % (name))
def main(queue, exchange, routing_key='#'): exchange = Exchange(exchange, type='topic') queue = Queue(name=queue, exchange=exchange, routing_key=routing_key, auto_delete=True) with transport_utils.get_connection() as connection: connection.connect() watcher = QueueConsumer(connection=connection, queue=queue) watcher.run()
def _cleanup_old_queues(self): with transport_utils.get_connection() as connection: for q in self.OLD_QS: bound_q = q(connection.default_channel) try: bound_q.delete() except: print('Failed to delete %s.' % q.name) traceback.print_exc()
def test_process_message(self): with transport_utils.get_connection() as conn: tracker = ResultsTracker(conn, [RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE]) tracker._bootstrap() state = ActionStateConsumerTests.get_state( ActionStateConsumerTests.liveactions['liveaction1.yaml']) tracker._queue_consumer._process_message(state) querier = tracker.get_querier('test_querymodule') self.assertEqual(querier._query_contexts.qsize(), 1)
def __init__(self, urls=None): """ :param urls: Connection URLs to use. If not provided it uses a default value from th config. :type urls: ``list`` """ urls = urls or transport_utils.get_messaging_urls() connection = transport_utils.get_connection( urls=urls, connection_kwargs={"failover_strategy": "round-robin"}) self.pool = connection.Pool(limit=10) self.cluster_size = len(urls)
def __init__(self, urls=None): """ :param urls: Connection URLs to use. If not provided it uses a default value from th config. :type urls: ``list`` """ urls = urls or transport_utils.get_messaging_urls() connection = transport_utils.get_connection(urls=urls, connection_kwargs={'failover_strategy': 'round-robin'}) self.pool = connection.Pool(limit=10) self.cluster_size = len(urls)
def test_ssl_connection_on_ssl_listener_success(self): # Using query param notation urls = 'amqp://*****:*****@127.0.0.1:5671/?ssl=true' connection = transport_utils.get_connection(urls=urls) try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release() # Using messaging.ssl config option cfg.CONF.set_override(name='ssl', override=True, group='messaging') connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release()
def test_non_ssl_connection_on_ssl_listener_port_failure(self): connection = transport_utils.get_connection(urls='amqp://*****:*****@127.0.0.1:5671/') expected_msg_1 = '[Errno 104] Connection reset by peer' expected_msg_2 = 'Socket closed' try: connection.connect() except Exception as e: self.assertFalse(connection.connected) self.assertTrue(isinstance(e, (IOError, socket.error))) self.assertTrue(expected_msg_1 in six.text_type(e) or expected_msg_2 in six.text_type(e)) else: self.fail('Exception was not thrown') if connection: connection.release()
def test_ssl_connection_on_ssl_listener_success(self): ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, "ca/ca_certificate_bundle.pem") cfg.CONF.set_override(name="ssl", override=True, group="messaging") cfg.CONF.set_override(name="ssl_ca_certs", override=ca_cert_path, group="messaging") urls = "amqp://*****:*****@127.0.0.1:5671/" connection = transport_utils.get_connection(urls=urls) try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release()
def test_non_ssl_connection_on_ssl_listener_port_failure(self): connection = transport_utils.get_connection( urls='amqp://*****:*****@127.0.0.1:5671/') expected_msg_1 = '[Errno 104] Connection reset by peer' expected_msg_2 = 'Socket closed' try: connection.connect() except Exception as e: self.assertFalse(connection.connected) self.assertTrue(isinstance(e, (IOError, socket.error))) self.assertTrue(expected_msg_1 in six.text_type(e) or expected_msg_2 in six.text_type(e)) else: self.fail('Exception was not thrown') if connection: connection.release()
def register_exchanges(): LOG.debug('Registering exchanges...') connection_urls = transport_utils.get_messaging_urls() with transport_utils.get_connection() as conn: # Use ConnectionRetryWrapper to deal with rmq clustering etc. retry_wrapper = ConnectionRetryWrapper(cluster_size=len(connection_urls), logger=LOG) def wrapped_register_exchanges(connection, channel): for exchange in EXCHANGES: _do_register_exchange(exchange=exchange, connection=connection, channel=channel, retry_wrapper=retry_wrapper) retry_wrapper.run(connection=conn, wrapped_callback=wrapped_register_exchanges) def wrapped_predeclare_queues(connection, channel): for queue in QUEUES: _do_predeclare_queue(channel=channel, queue=queue) retry_wrapper.run(connection=conn, wrapped_callback=wrapped_predeclare_queues)
def get_worker(): with transport_utils.get_connection() as conn: return TriggerInstanceDispatcher(conn, [RULESENGINE_WORK_QUEUE])
def get_notifier(): with transport_utils.get_connection() as conn: return Notifier(conn, [NOTIFIER_ACTIONUPDATE_WORK_QUEUE], trigger_dispatcher=TriggerDispatcher(LOG))
def get_scheduler_entrypoint(): with transport_utils.get_connection() as conn: return SchedulerEntrypoint(conn, [ACTIONSCHEDULER_REQUEST_QUEUE])
def get_tracker(): with transport_utils.get_connection() as conn: return ResultsTracker(conn, [RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE])
def get_worker(): with transport_utils.get_connection() as conn: return ActionExecutionDispatcher(conn, ACTIONRUNNER_QUEUES)
def test_ssl_connection_ca_certs_provided(self): ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, "ca/ca_certificate_bundle.pem") cfg.CONF.set_override(name="ssl", override=True, group="messaging") cfg.CONF.set_override( name="ssl_ca_certs", override=ca_cert_path, group="messaging" ) # 1. Validate server cert against a valid CA bundle (success) - cert required cfg.CONF.set_override( name="ssl_cert_reqs", override="required", group="messaging" ) connection = transport_utils.get_connection( urls="amqp://*****:*****@127.0.0.1:5671/" ) try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release() # 2. Validate server cert against other CA bundle (failure) # CA bundle which was not used to sign the server cert ca_cert_path = os.path.join("/etc/ssl/certs/SecureTrust_CA.pem") cfg.CONF.set_override( name="ssl_cert_reqs", override="required", group="messaging" ) cfg.CONF.set_override( name="ssl_ca_certs", override=ca_cert_path, group="messaging" ) connection = transport_utils.get_connection( urls="amqp://*****:*****@127.0.0.1:5671/" ) expected_msg = r"\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed" self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) # 3. Validate server cert against other CA bundle (failure) ca_cert_path = os.path.join("/etc/ssl/certs/SecureTrust_CA.pem") cfg.CONF.set_override( name="ssl_cert_reqs", override="optional", group="messaging" ) cfg.CONF.set_override( name="ssl_ca_certs", override=ca_cert_path, group="messaging" ) connection = transport_utils.get_connection( urls="amqp://*****:*****@127.0.0.1:5671/" ) expected_msg = r"\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed" self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) # 4. Validate server cert against other CA bundle (failure) # We use invalid bundle but cert_reqs is none ca_cert_path = os.path.join("/etc/ssl/certs/SecureTrust_CA.pem") cfg.CONF.set_override(name="ssl_cert_reqs", override="none", group="messaging") cfg.CONF.set_override( name="ssl_ca_certs", override=ca_cert_path, group="messaging" ) connection = transport_utils.get_connection( urls="amqp://*****:*****@127.0.0.1:5671/" ) try: self.assertTrue(connection.connect()) self.assertTrue(connection.connected) finally: if connection: connection.release()
def test_publish_compression(self): live_action_db = LiveActionDB() live_action_db.id = ObjectId() live_action_db.status = "succeeded" live_action_db.action = "core.local" live_action_db.result = {"foo": "bar"} exchange = Exchange("st2.execution.test", type="topic") queue_name = "test-" + str(random.randint(1, 10000)) queue = Queue( name=queue_name, exchange=exchange, routing_key="#", auto_delete=True ) publisher = PoolPublisher() with transport_utils.get_connection() as connection: connection.connect() watcher = QueueConsumer(connection=connection, queue=queue) watcher_thread = eventlet.greenthread.spawn(watcher.run) # Give it some time to start up since we are publishing on a new queue eventlet.sleep(0.5) self.assertEqual(len(watcher.received_messages), 0) # 1. Verify compression is off as a default publisher.publish(payload=live_action_db, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 1) self.assertEqual( watcher.received_messages[0][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[0][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[0][1].properties["application_headers"], {} ) self.assertEqual(watcher.received_messages[0][0].id, live_action_db.id) # 2. Verify config level option is used cfg.CONF.set_override(name="compression", group="messaging", override="zstd") publisher.publish(payload=live_action_db, exchange=exchange) eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 2) self.assertEqual( watcher.received_messages[1][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[1][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[1][1].properties["application_headers"], {"compression": "application/zstd"}, ) self.assertEqual(watcher.received_messages[1][0].id, live_action_db.id) # 2. Verify argument level option is used and has precedence over config one cfg.CONF.set_override(name="compression", group="messaging", override="zstd") publisher.publish(payload=live_action_db, exchange=exchange, compression="gzip") eventlet.sleep(0.2) self.assertEqual(len(watcher.received_messages), 3) self.assertEqual( watcher.received_messages[2][1].properties["content_type"], "application/x-python-serialize", ) self.assertEqual( watcher.received_messages[2][1].properties["content_encoding"], "binary" ) self.assertEqual( watcher.received_messages[2][1].properties["application_headers"], {"compression": "application/x-gzip"}, ) self.assertEqual(watcher.received_messages[2][0].id, live_action_db.id) watcher_thread.kill()
def get_worker(): with transport_utils.get_connection() as conn: return ExecutionsExporter(conn, [EXPORTER_WORK_QUEUE])
def get_engine(): with txpt_utils.get_connection() as conn: return WorkflowExecutionHandler(conn, WORKFLOW_EXECUTION_QUEUES)