def test_create_ssl_context_py34(self): cafile, certfile, keyfile = self._check_ssl_dir() context = create_ssl_context() self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED) self.assertEqual(context.check_hostname, True) context = create_ssl_context(cafile=str(cafile)) self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED) self.assertEqual(context.check_hostname, True) der_ca = context.get_ca_certs(binary_form=True) self.assertTrue(der_ca) # Same with `cadata` argument with cafile.open("rb") as f: data = f.read() context = create_ssl_context(cadata=data.decode("ascii")) self.assertEqual(context.get_ca_certs(binary_form=True), der_ca) # And with DER encoded binary form context = create_ssl_context(cadata=der_ca[0]) self.assertEqual(context.get_ca_certs(binary_form=True), der_ca) context = create_ssl_context(cafile=str(cafile), certfile=str(certfile), keyfile=str(keyfile), password="******") self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED) self.assertEqual(context.check_hostname, True) self.assertTrue(context.get_ca_certs())
async def get_aiokafka_producer( bootstrap_servers: List[str] = None, security_protocol: str = None, ssl_cafile: str = None, ssl_certfile: str = None, ssl_keyfile: str = None, sasl_mechanism: str = None, sasl_plain_username: str = None, sasl_plain_password: str = None, ) -> AIOKafkaProducer: """ Simply create and return a KafkaProducer using given arguments. """ ssl_cafile = ssl_cafile or certifi.where() ssl_context = create_ssl_context(cafile=ssl_cafile, certfile=ssl_certfile, keyfile=ssl_keyfile) kp = AIOKafkaProducer( bootstrap_servers=bootstrap_servers, security_protocol=security_protocol, ssl_context=ssl_context, sasl_mechanism=sasl_mechanism, sasl_plain_username=sasl_plain_username, sasl_plain_password=sasl_plain_password, ) await kp.start() return kp
async def main(kafka_topic=KAFKA_TOPIC, sites=SITES, run_total=None): ssl_context = None security_protocol = 'PLAINTEXT' if KAFKA_SSL_CA is not None: ssl_context = create_ssl_context(cafile=KAFKA_SSL_CA, certfile=KAFKA_SSL_CERT, keyfile=KAFKA_SSL_KEY) security_protocol = 'SSL' producer = AIOKafkaProducer(bootstrap_servers=KAFKA_HOST, value_serializer=serializer, security_protocol=security_protocol, ssl_context=ssl_context) msh = Scheduler() try: for site in sites: url = site['url'] log.info(f'Monitoring {url}') msh.add_job( CronJob(run_total=run_total, name=f'check_{url}').every().second.go( check_site, producer, site, kafka_topic)) await producer.start() await msh.start() except Exception as e: log.error(f'Got error starting scheduler: {e}') raise e finally: await producer.stop()
async def test_send_message(): set_envs() url = 'localhost' test_message_broker_credentials = form_message_broker_credentials() context = create_ssl_context( cafile="./.ca-cert", certfile="./.cert-signed", keyfile="./.cert-key", ) consumer = AIOKafkaConsumer( kafka_topic, bootstrap_servers=test_message_broker_credentials, security_protocol="SSL", ssl_context=context) await consumer.start() website = Website(url, test_message_broker_credentials, 'bar') test_message = website.form_message(True) await website.send_message(test_message) async for msg in consumer: received_message = json.loads(msg.value) await consumer.stop() assert received_message == test_message
def __post_init__( self, ssl_cafile: Optional[str] = None, ssl_certfile: Optional[str] = None, ssl_keyfile: Optional[str] = None, ssl_password: Optional[str] = None, ) -> None: if ssl_cafile or ssl_certfile or ssl_keyfile: self.ssl_context = create_ssl_context( cafile=ssl_cafile, certfile=ssl_certfile, keyfile=ssl_keyfile, password=ssl_password, ) if isinstance(self.bootstrap_servers, str): self.bootstrap_servers = [ s.strip() for s in self.bootstrap_servers.split(",") ] self._client_kwargs = dict( bootstrap_servers=self.bootstrap_servers, security_protocol=self.security_protocol, ssl_context=self.ssl_context, sasl_mechanism=self.sasl_mechanism, sasl_plain_password=self.sasl_plain_password, sasl_plain_username=self.sasl_plain_username, sasl_kerberos_service_name=self.sasl_kerberos_service_name, sasl_kerberos_domain_name=self.sasl_kerberos_domain_name, ) if self.client_id: self._client_kwargs["client_id"] = self.client_id
def client(loop, aiohttp_client): app = web.Application() app.add_routes([ web.get('/api/v1/system/is_alive', is_alive), web.get('/api/v1/producer/send', send_to_kafka_topic), web.get('/api/v1/consumer/start', start_consumer), web.get('/api/v1/consumer/stop', stop_consumer), web.get('/api/v1/postgres/events', get_events_from_pg) ]) app.pool = loop.run_until_complete( asyncpg.create_pool(dsn=settings.POSTGRES_URL, min_size=2, max_size=20, loop=loop)) if settings.ENVIRONMENT == "aiven": app.ssl_context = create_ssl_context( cafile=settings.SSL_CAFILE_KAFKA, certfile=settings.SSL_CERTFILE_KAFKA, keyfile=settings.SSL_KEYFILE) else: app.ssl_context = None yield loop.run_until_complete(aiohttp_client(app)) if hasattr(app, "consumer"): loop.run_until_complete(app.consumer.stop()) loop.run_until_complete(app.pool.close())
async def init_app(loop): app = web.Application(loop=loop) app.add_routes([ web.get('/api/v1/system/is_alive', is_alive), web.get('/api/v1/producer/send', send_to_kafka_topic), web.get('/api/v1/consumer/start', start_consumer), web.get('/api/v1/consumer/stop', stop_consumer), web.get('/api/v1/postgres/events', get_events_from_pg) ]) app.pool = await asyncpg.create_pool(dsn=settings.POSTGRES_URL, min_size=2, max_size=20, loop=loop) if settings.ENVIRONMENT == "aiven": app.ssl_context = create_ssl_context( cafile=settings.SSL_CAFILE_KAFKA, certfile=settings.SSL_CERTFILE_KAFKA, keyfile=settings.SSL_KEYFILE) else: app.ssl_context = None return app
async def kafka_producer_factory(config): if config["ssl_context"]: config = dict(config, ssl_context=create_ssl_context(**config["ssl_context"])) producer = aiokafka.AIOKafkaProducer(**config) await producer.start() return producer
async def kafka_consumer_factory(topic, config): if config["ssl_context"]: config = dict(config, ssl_context=create_ssl_context(**config["ssl_context"])) consumer = aiokafka.AIOKafkaConsumer(topic, **config) await consumer.start() return consumer
def ssl_context(self): if self.cafile and self.certfile and self.keyfile: return helpers.create_ssl_context( cafile=self.cafile, certfile=self.certfile, keyfile=self.keyfile) return None
def _set_kafka_ssl_legacy(self): self.security_protocol = "SASL_SSL" self.ssl_context = create_ssl_context( cafile=os.getenv("KAFKA_CA_CERT", "/opt/certs/kafka-cacert")) self.sasl_mechanism = "SCRAM-SHA-512" self.sasl_plain_username = os.getenv("KAFKA_USERNAME", "") self.sasl_plain_password = os.getenv("KAFKA_PASSWORD", "")
def create_ssl_context(self): context = create_ssl_context( cafile=str(self.ssl_folder / "ca-cert"), certfile=str(self.ssl_folder / "cl_client.pem"), keyfile=str(self.ssl_folder / "cl_client.key"), password="******") context.check_hostname = False return context
def get_kafka_security_context() -> SSLContext: """Get the security context required to connect to Kafka. :return: The security context required to connect to Kafka. """ return create_ssl_context(cafile=KAFKA_CA, certfile=KAFKA_CERT, keyfile=KAFKA_KEY)
def kafka_ssl_context(): try: context = create_ssl_context( cafile=KAFKA_SETTINGS.get("cafile"), certfile=KAFKA_SETTINGS.get("certfile"), keyfile=KAFKA_SETTINGS.get("keyfile"), password=KAFKA_SETTINGS.get("cert_password")) except Exception: context = None return context
def get_context(config): if config.proto == 'SSL': context = create_ssl_context( cafile=config.cafile, certfile=config.certfile, keyfile=config.keyfile, password=config.password, ) else: context = None return context
async def kafka_consumer( kafka_servers: str, kafka_topic: str, queue: asyncio.Queue[Response], *, deserializer: Optional[Callable] = None, kafka_ssl_cafile: str = None, kafka_ssl_certfile: str = None, kafka_ssl_keyfile: str = None, ) -> None: """ kafka_consumer reads data from kafka and send it to a queue """ loop = asyncio.get_event_loop() kafka_kwargs = { "loop": loop, "bootstrap_servers": kafka_servers, "client_id": "client-storage", "group_id": "my-group", "enable_auto_commit": True, "auto_commit_interval_ms": 1000, # Autocommit every second "auto_offset_reset": "earliest", # start from beginning "value_deserializer": deserializer, } if not kafka_ssl_cafile: consumer = AIOKafkaConsumer(kafka_topic, **kafka_kwargs) else: context = create_ssl_context( cafile=kafka_ssl_cafile, certfile=kafka_ssl_certfile, keyfile=kafka_ssl_keyfile, ) consumer = AIOKafkaConsumer( kafka_topic, security_protocol="SSL", ssl_context=context, **kafka_kwargs, ) await consumer.start() try: # Consume messages async for msg in consumer: if msg.value is not None: logger.debug(f"Message received: {msg.value} at {msg.timestamp}") try: asyncio.get_event_loop().call_soon_threadsafe( queue.put_nowait, msg.value ) except asyncio.QueueFull as err: logger.error("queue is full cannot send a response - {}", err) finally: # Will leave consumer group; perform autocommit if enabled. await consumer.stop()
def _kafka_ssl(sslconf: Dict): ret = {} if sslconf.get('enabled', False): context = create_ssl_context( cafile=sslconf['cafile'], certfile=sslconf['certfile'], keyfile=sslconf['keyfile'], password=sslconf['password'], ) ret['security_protocol'] = 'SSL' ret['ssl_context'] = context return ret
def from_file(path: Path = CONFIG_PATH, ) -> "KafkaConfig": with open(str(path), "r") as file: config_yml: Dict = yaml.safe_load(file) ssl_context: SSLContext = create_ssl_context( cafile=KAFKA_CONFIG_DIR / "ca.pem", certfile=KAFKA_CONFIG_DIR / "service.cert", keyfile=KAFKA_CONFIG_DIR / "service.key", ) return KafkaConfig( bootstrap_servers=config_yml["kafka"]["bootstrap_servers"], metrics_topic=config_yml["kafka"]["metrics_topic"], ssl_context=ssl_context, )
def get_ssl_context(cafile, certfile, keyfile): """ create ssl context :param cafile: CA used to sign certificate. :param certfile: Signed certificat :param keyfile: Private Key file of `certfile` certificate :return: """ return create_ssl_context( cafile=cafile, certfile=certfile, keyfile=keyfile, )
def test_create_ssl_context(self): # TODO: I would realy want to check that proper certificates load, but # the API is fuzzy, and 3.3 misses a lot of functions, so # for now this test only checks that no error are raised during # context creation... cafile, certfile, keyfile = self._check_ssl_dir() context = create_ssl_context() self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED) context = create_ssl_context(cafile=str(cafile)) self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED) context = create_ssl_context(cafile=str(cafile), certfile=str(certfile), keyfile=str(keyfile), password="******") self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED) with self.assertRaisesRegexp(ValueError, "`cadata` not supported by Python3.3"): with cafile.open("rb") as f: data = f.read() create_ssl_context(cadata=data.decode("ascii"))
def __init__(self, config: Config, logger: logging.Logger, event_loop: asyncio.AbstractEventLoop, queue: asyncio.Queue): super().__init__(config, logger, event_loop, queue) context = create_ssl_context( cafile=self.config.kafka.cafile, certfile=self.config.kafka.cert, keyfile=self.config.kafka.key, password=self.config.kafka.passwd, ) self.producer = aiokafka.AIOKafkaProducer( loop=self.loop, bootstrap_servers=self.config.kafka.servers, security_protocol="SSL", ssl_context=context, )
def __post_init__(self): loop = asyncio.get_event_loop() if not self.kafka_ssl_cafile: self.producer = AIOKafkaProducer( loop=loop, bootstrap_servers=self.bootstrap_servers) else: context = create_ssl_context( cafile=self.kafka_ssl_cafile, certfile=self.kafka_ssl_certfile, keyfile=self.kafka_ssl_keyfile, ) self.producer = AIOKafkaProducer( loop=loop, bootstrap_servers=self.bootstrap_servers, security_protocol="SSL", ssl_context=context, )
def _setup_connection(self) -> aiokafka.AIOKafkaConsumer: if self.settings.kafka_ssl_auth: ssl_context = create_ssl_context( cafile="init/kafka/ca.pem", certfile="init/kafka/service.cert", keyfile="init/kafka/service.key", ) connection = aiokafka.AIOKafkaConsumer( *self.settings.metrics_topics.split(","), bootstrap_servers=self.settings.bootstrap_servers, ssl_context=ssl_context, security_protocol="SSL", ) else: connection = aiokafka.AIOKafkaConsumer( *self.settings.metrics_topics.split(","), bootstrap_servers=self.settings.bootstrap_servers, ) return connection
async def get_aiokafka_consumer( topics: List[str], bootstrap_servers: List[str] = None, security_protocol: str = None, ssl_cafile: str = None, ssl_certfile: str = None, ssl_keyfile: str = None, sasl_mechanism: str = None, sasl_plain_username: str = None, sasl_plain_password: str = None, group_id: str = None, auto_offset_reset="latest", enable_auto_commit: bool = False, offset: int = 0, ): """ Simply create and return a KafkaConsumer using given arguments. Use seek_to_offset() to subscribe to given topic(s) and seek to default offset 0. Note: the consumer is already started here, thus it suffices to simply start consuming messages in the main app. """ ssl_cafile = ssl_cafile or certifi.where() ssl_context = create_ssl_context(cafile=ssl_cafile, certfile=ssl_certfile, keyfile=ssl_keyfile) kc = AIOKafkaConsumer( *topics, bootstrap_servers=bootstrap_servers, security_protocol=security_protocol, ssl_context=ssl_context, sasl_mechanism=sasl_mechanism, sasl_plain_username=sasl_plain_username, sasl_plain_password=sasl_plain_password, group_id=group_id, auto_offset_reset=auto_offset_reset, enable_auto_commit=enable_auto_commit, ) logging.info("Starting KafkaConsumer and subscribing to topics: {}".format(topics)) await kc.start() for topic in topics: await seek_to_offset(kc, topic, offset) return kc
def createSSLConext(cafile, certfile, keyfile): #Try to gracefully handle issues with creating SSL sslContext = None try: sslContext = create_ssl_context(cafile=cafile, certfile=certfile, keyfile=keyfile) except FileNotFoundError as error: logger.critical( 'File not found while creating SSL context for Kafka - ensure your CA, Certificate file and Private Key are configured: %s' % (error, )) except SSLError as error: # error = sys.exc_info()[0] logger.critical( 'Unable to create SSL contact for Kafka - ensure your CA, Certificate file and Private key are valid: %s' % (error, )) finally: if sslContext is not None: return (sslContext) else: sys.exit(1)
async def consume(pool, kafka_host, kafka_topic): ssl_context = None security_protocol = 'PLAINTEXT' if KAFKA_SSL_CA is not None: ssl_context = create_ssl_context(cafile=KAFKA_SSL_CA, certfile=KAFKA_SSL_CERT, keyfile=KAFKA_SSL_KEY) security_protocol = 'SSL' consumer = AIOKafkaConsumer(kafka_topic, auto_offset_reset='latest', value_deserializer=deserializer, bootstrap_servers=kafka_host, security_protocol=security_protocol, ssl_context=ssl_context) await consumer.start() try: async for msg in consumer: asyncio.create_task(handle_message(pool, msg)) except Exception as e: raise e finally: await consumer.stop()
import asyncio from aiokafka import AIOKafkaProducer, AIOKafkaConsumer from aiokafka.helpers import create_ssl_context from kafka.common import TopicPartition context = create_ssl_context( cafile="./ca-cert", # CA used to sign certificate. # `CARoot` of JKS store container certfile="./cert-signed", # Signed certificate keyfile="./cert-key", # Private Key file of `certfile` certificate password="******" ) async def produce_and_consume(loop): # Produce producer = AIOKafkaProducer( loop=loop, bootstrap_servers='localhost:9093', security_protocol="SSL", ssl_context=context) await producer.start() try: msg = await producer.send_and_wait( 'my_topic', b"Super Message", partition=0) finally: await producer.stop() consumer = AIOKafkaConsumer( "my_topic", loop=loop, bootstrap_servers='localhost:9093', security_protocol="SSL", ssl_context=context) await consumer.start() try:
async def main(): parser = argparse.ArgumentParser(description=__doc__) default_conf = "site_checker.yaml" parser.add_argument( "--config-path", "-c", default=default_conf, help="specify path to site checker YAML config", ) parser.add_argument( "--log-level", "-l", default="info", choices=("info", "debug", "error", "warning"), ) parser.add_argument( "--check-site-id", type=int, default=None, help="specify check_site_info.id for check(for tests)", ) parser.add_argument( "--max-check-count", type=int, default=None, help="specify maximium number of checks to run(for tests)", ) args = parser.parse_args() logging.basicConfig( level=LOG_LEVELS[args.log_level], format= "%(levelname)s, %(asctime)s, %(filename)s +%(lineno)s, %(message)s", ) conf = yaml.safe_load(open(args.config_path)) db_conf = { "database": conf["database"]["name"], "host": conf["database"]["host"], "port": conf["database"]["port"], "user": conf["database"]["user"], "password": conf["database"]["password"], } if conf["database"]["ssl"]: db_ssl_ctx = ssl.create_default_context( cafile=conf["database"]["cafile"], capath=certifi.where()) db_conf["ssl"] = db_ssl_ctx db_pool = await asyncpg.create_pool( min_size=conf["database"]["pool_min_size"], max_size=conf["database"]["pool_max_size"], **db_conf, ) kafka_server = "{}:{}".format(conf["kafka"]["host"], conf["kafka"]["port"]) kafka_ssl_context = create_ssl_context( cafile=conf["kafka"]["cafile"], # CA used to sign certificate. # `CARoot` of JKS store container certfile=conf["kafka"]["certfile"], # Signed certificate keyfile=conf["kafka"] ["keyfile"], # Private Key file of `certfile` certificate ) kafka_producer = AIOKafkaProducer( bootstrap_servers=kafka_server, security_protocol="SSL", ssl_context=kafka_ssl_context, ) await kafka_producer.start() context = { "args": args, "conf": conf, "db_pool": db_pool, "kafka_producer": kafka_producer, "event_loop": asyncio.get_event_loop(), "check_stats": CheckStats(total_limit=args.max_check_count), } try: if args.check_site_id: await run_one_check(context) else: await run_checks(context) finally: await kafka_producer.stop() db_pool.terminate()
def create(self): """Create SSL context based on this description.""" return create_ssl_context(**asdict(self))
import json import re from typing import Dict from aiohttp import ClientSession, TraceConfig from aiokafka import AIOKafkaProducer from aiokafka.helpers import create_ssl_context from config import kafka_topic from utils.logger import init_sys_logger logger = init_sys_logger(__name__) context = create_ssl_context( cafile="./.ca-cert", certfile="./.cert-signed", keyfile="./.cert-key", ) class Website: """ Class for checking info and storing check data """ def __init__(self, url: str, message_broker: str, regexp: str): self.url = url self.message_broker = message_broker self.regexp = regexp self.latency = -1 self.error_code = -1 self.trace_config = TraceConfig()
def create_ssl_context(self): return create_ssl_context( cafile=str(self.ssl_folder / "ca-cert"), certfile=str(self.ssl_folder / "cl_client.pem"), keyfile=str(self.ssl_folder / "cl_client.key"), password="******")