示例#1
0
 def test_legacy_wrap_socket(self):
     """Test socket-wrapping without SSLContext"""
     config = SslConfig(cafile=self.kafka.certs.root_cert)
     config._wrap_socket = config._legacy_wrap_socket()
     client = KafkaClient(self.kafka.brokers_ssl, ssl_config=config,
                          broker_version=kafka_version)
     self.roundtrip_test(client)
示例#2
0
 def test_legacy_wrap_socket(self):
     """Test socket-wrapping without SSLContext"""
     config = SslConfig(cafile=self.kafka.certs.root_cert)
     config._wrap_socket = config._legacy_wrap_socket()
     client = KafkaClient(self.kafka.brokers_ssl,
                          ssl_config=config,
                          broker_version=kafka_version)
     self.roundtrip_test(client)
示例#3
0
    def __init__(self,
                 kafka_host,
                 kafka_topic,
                 pg_host,
                 pg_port,
                 pg_username,
                 pg_password,
                 logger=None):
        self.logger = logger if logger else logging.getLogger(
            self.__class__.__name__)

        config = SslConfig(cafile='./ca.pem',
                           certfile='./service.cert',
                           keyfile='./service.key')

        self.kafka_client = KafkaClient(hosts=kafka_host, ssl_config=config)
        self.logger.log(logging.INFO, 'connected to kafka')

        self.pg_con = psycopg2.connect(host=pg_host,
                                       port=pg_port,
                                       user=pg_username,
                                       password=pg_password,
                                       dbname="os_metrics",
                                       sslmode='require')
        self.logger.log(logging.INFO, 'connected to pgsql')

        self.KAFKA_TOPIC = kafka_topic
示例#4
0
def create_certificate():
    """
    Creating certificate
    :return certificate:
    :return type: :class: `pykafka.connection.SslConfig`
    """
    ca_path = 'ca.cert'  # type: ignore
    client_path = 'client.cert'
    client_key_path = 'client_key.key'
    if CA_CERT:
        with open(ca_path, 'wb') as file:
            file.write(CA_CERT)
            ca_path = os.path.abspath(ca_path)
        if CLIENT_CERT:
            with open(client_path, 'wb') as file:
                file.write(CLIENT_CERT)
                client_path = os.path.abspath(client_path)
        else:
            client_path = None  # type: ignore
        if CLIENT_CERT_KEY:
            with open(client_key_path, 'wb') as file:
                file.write(CLIENT_CERT_KEY)
        else:
            client_key_path = None  # type: ignore
    else:
        ca_path = None  # type: ignore

    return SslConfig(cafile=ca_path,
                     certfile=client_path,
                     keyfile=client_key_path,
                     password=PASSWORD)
示例#5
0
def create_certificate(ca_cert=None,
                       client_cert=None,
                       client_cert_key=None,
                       password=None):
    """
    Creating certificate
    :return certificate:
    :return type: :class: `pykafka.connection.SslConfig`
    """
    ca_path = None
    client_path = None
    client_key_path = None
    if ca_cert:
        ca_path = 'ca.cert'  # type: ignore
        with open(ca_path, 'wb') as file:
            file.write(ca_cert)
            ca_path = os.path.abspath(ca_path)
    if client_cert:
        client_path = 'client.cert'
        with open(client_path, 'wb') as file:
            file.write(client_cert)
            client_path = os.path.abspath(client_path)
    if client_cert_key:
        client_key_path = 'client_key.key'
        with open(client_key_path, 'wb') as file:
            file.write(client_cert_key)
    return SslConfig(cafile=ca_path,
                     certfile=client_path,
                     keyfile=client_key_path,
                     password=password)
示例#6
0
 def test_ca_only(self):
     """Connect with CA cert only (ie no client cert)"""
     config = SslConfig(cafile=self.kafka.certs.root_cert)
     client = KafkaClient(self.kafka.brokers_ssl,
                          ssl_config=config,
                          broker_version=kafka_version)
     self.roundtrip_test(client)
示例#7
0
文件: __init__.py 项目: Temikus/esque
 def _get_pykafka_ssl_conf(self) -> Optional[SslConfig]:
     if not self.ssl_enabled:
         return None
     ssl_params = {"cafile": self.ssl_params.get("cafile", None)}
     if self.ssl_params.get("certfile"):
         ssl_params["certfile"] = self.ssl_params["certfile"]
     if self.ssl_params.get("keyfile"):
         ssl_params["keyfile"] = self.ssl_params["keyfile"]
     if self.ssl_params.get("password"):
         ssl_params["password"] = self.ssl_params["password"]
     return SslConfig(**ssl_params)
示例#8
0
文件: def_kafka.py 项目: danlg/zato
    def _init_client(self):

        with self.update_lock:

            if self.is_connected:
                return

            # TLS is optional
            if self.config.is_tls_enabled:
                tls_config = SslConfig(
                    **{
                        'certfile': self.config.tls_cert_file,
                        'keyfile': self.config.tls_private_key_file,
                        'password': self.config.tls_pem_passphrase,
                        'cafile': self.config.tls_ca_certs_file,
                    })
            else:
                tls_config = None

            # Our server list needs to be reformatted in accordance with what KafkaClient expects
            # and it may be turned into a Kafka or ZooKeeper server list.

            server_list = self.config.server_list.splitlines()
            server_list = ','.join(server_list)

            if self.config.should_use_zookeeper:
                hosts = None
                zookeeper_hosts = server_list
            else:
                hosts = server_list
                zookeeper_hosts = None

            client_config = {
                'hosts': hosts,
                'zookeeper_hosts': zookeeper_hosts,
                'socket_timeout_ms': self.config.socket_timeout * 1000,
                'offsets_channel_socket_timeout_ms':
                self.config.offset_timeout * 1000,
                'use_greenlets': True,
                'exclude_internal_topics':
                self.config.should_exclude_internal_topics,
                'source_address': self.config.source_address or '',
                'ssl_config': tls_config,
                'broker_version': self.config.broker_version,
            }

            # Create the actual connection object
            self._impl = KafkaClient(**client_config)

            # Confirm the connection was established
            self.ping()

            # We can assume we are connected now
            self.is_connected = True
示例#9
0
 def test_client_cert(self):
     """Connect with client certificate"""
     # This would be a more potent test if we could on-the-fly reconfigure
     # the test cluster to refuse connections without client certs, but
     # that's not easy to achieve with our current setup
     certs = self.kafka.certs
     config = SslConfig(cafile=certs.root_cert,
                        certfile=certs.client_cert,
                        keyfile=certs.client_key,
                        password=certs.client_pass)
     client = KafkaClient(self.kafka.brokers_ssl, ssl_config=config)
     self.roundtrip_test(client)
示例#10
0
def hello(event, context):

    # init with Certs n' stuff
    config = SslConfig(
        cafile='certs/ca.pem',
        certfile='certs/cert.pem',  # optional
        keyfile='certs/key.pem')  # optional
    #password='******')  # optional
    client = KafkaClient(
        hosts=
        "steamer-01.srvs.cloudkafka.com:9093,steamer-03.srvs.cloudkafka.com:9093,steamer-02.srvs.cloudkafka.com:9093",
        ssl_config=config)

    topic = client.topics[b"47x9-default"]
    message = ''

    with topic.get_sync_producer() as producer:
        for i in range(100):
            message = 'test message ' + str(i)

            producer.produce(message.encode(), b'data')

    # with topic.get_producer(delivery_reports=True) as producer:
    #   count = 0
    #   while True:
    #       count += 1
    #       producer.produce('test msg', partition_key='{}'.format(count))
    #       if count % 10 ** 5 == 0:  # adjust this or bring lots of RAM ;)
    #           while True:
    #               try:
    #                   msg, exc = producer.get_delivery_report(block=False)
    #                   if exc is not None:
    #                       print('Failed to deliver msg {}: {}'.format(msg.partition_key, repr(exc)))
    #                   else:
    #                       print('Successfully delivered msg {}'.format(msg.partition_key))
    #               except: # Queue.Empty:
    #                   break

    body = {
        "message": "Go Serverless v1.0! Your function executed successfully!",
        "input": event
    }

    response = {"statusCode": 200, "body": json.dumps(body)}

    return response

    # Use this code if you don't use the http event with the LAMBDA-PROXY
    # integration
    """
示例#11
0
    def __init__(self,
                 kafka_host,
                 kafka_topic,
                 machine_identifier,
                 logger=None):
        self.logger = logger if logger else logging.getLogger(
            self.__class__.__name__)

        self.KAFKA_TOPIC = kafka_topic
        self.MACHINE_IDENTIFIER = machine_identifier

        config = SslConfig(cafile='./ca.pem',
                           certfile='./service.cert',
                           keyfile='./service.key')

        self.kafka_client = KafkaClient(hosts=kafka_host, ssl_config=config)

        self.logger.log(logging.INFO, 'connected to kafka')
示例#12
0
parser.add_argument('--keyfile',
                    dest='keyfile',
                    help='The private key file if using a SSL context')

args = parser.parse_args()

brokers = args.brokers
topic = args.topic
is_using_latest_offset = args.follow
cafile = args.cafile
certfile = args.certfile
keyfile = args.keyfile
message_count = 0

if cafile is not None and certfile is not None and keyfile is not None:
    config = SslConfig(cafile=cafile, certfile=certfile, keyfile=keyfile)
    client = KafkaClient(hosts=brokers, ssl_config=config)
else:
    client = KafkaClient(hosts=brokers)

topic = client.topics[str.encode(topic)]

signal.signal(signal.SIGINT, signal_handler)

auto_offset_reset = OffsetType.LATEST if is_using_latest_offset else OffsetType.EARLIEST
consumer = topic.get_simple_consumer(auto_offset_reset=auto_offset_reset)
for message in consumer:
    if message is not None:
        print(message.offset, message.value)
        message_count += 1
示例#13
0
 def ssl_config(self) -> SslConfig:
     if self._ssl_config is None:
         self._ssl_config = SslConfig(cafile=self._config.cafile,
                                      certfile=self._config.certfile,
                                      keyfile=self._config.keyfile)
     return self._ssl_config
示例#14
0
    'with OpenSSL version:',
    OPENSSL_VERSION
]

print('\n'.join([lineblock]*3+message+[lineblock]*3))

sleep(1)

stdout.flush()

sleep(1)

import logging
import os
logging.basicConfig(level=os.environ.get("LOGLEVEL", "DEBUG"))

KAFKA_ADDRESS = 'kafka:9092' # Probably 'kafka:9092' or 'kafka:9093'
SSL_PATH = 'mre.pem' # if not using, change to None

from pykafka import KafkaClient, SslConfig; import pykafka

if SSL_PATH is not None:
    ssl_config = SslConfig (
        cafile=SSL_PATH,
        certfile=SSL_PATH,
        keyfile=SSL_PATH,
    )
else:
    ssl_config = None
    
client = KafkaClient(KAFKA_ADDRESS,ssl_config=ssl_config)
示例#15
0
    def __init__(self,
                 brokers=[],
                 zookeeper_server="",
                 topic="",
                 initial_offset=OffsetType.EARLIEST,
                 consumer_group="",
                 use_rdkafka=False,
                 kafka_use_ssl=False,
                 kafka_ssl_cafile="",
                 kafka_ssl_certfile="",
                 kafka_ssl_keyfile="",
                 kafka_ssl_password="",
                 splunk_server="",
                 splunk_hec_port="8088",
                 splunk_hec_channel="",
                 splunk_hec_token="",
                 splunk_sourcetype="",
                 splunk_source="",
                 use_https=True,
                 verify_ssl=True,
                 use_compression=False,
                 compresslevel=9,
                 batch_size=1024,
                 retry_attempts=5,
                 sleeptime=60,
                 max_sleeptime=300,
                 sleepscale=1.5,
                 jitter=1,
                 loglevel="warning"):
        """
        Keyword Arguments:
        brokers (list) -- list of Kafka brokers <host>:<port>
        zookeeper_sever (string) -- zookeeper server of Kafka cluster
        topic (string) --  Kafka topic to be consumed
        initial_offset (OffsetType) -- Initial offset for new topic: earliest or latest offset (default: earliest)
        consumer_group (string) -- Arbitrary group name to manage balanced consumption of topic
        use_rdkafka (boolean) -- Use librdkafka for speed increase
        kafka_use_ssl (boolean) -- Use SSL to communicate with secured Kafka brokers
        kafka_ssl_cafile (string) -- Path to trusted CA certificate
        kafka_ssl_certfile (string) --  Path to client certificate
        kafka_ssl_keyfile (string) --  Path to client private-key file
        kafka_ssl_password (string) -- Password for private key
        splunk_server (string) -- Hostname or IP address of HEC server or load balancer
        splunk_hec_port (string) -- Port of Splunk HEC server (default 8088)
        splunk_hec_channel (string) -- UUID used by Splunk HEC on per application basis
        splunk_hec_token (string) -- UUID of token generated by Splunk for HEC input
        splunk_sourcetype (string) -- Splunk sourcetype for data source
        splunk_source (string) -- Splunk source
        use_https (boolean) -- Use HTTPS or HTTP protocol to send to HEC
        verify_ssl (boolean) -- Verify SSL certificate of Splunk HEC endpoint (default True)
	use_compression (boolean) -- Use gzip compression sending data to HEC
	compresslevel (string) -- Compression level 0-9; 0=none, 1=fastest/least, 9=slowest/most (default: 9)
        batch_size (int) -- Number of messages to consume before attempting to send to Splunk HEC
        retry_attempts (int) -- Number of retry attempts before quitting (default 1024)
        sleeptime (int) -- Sleeptime between retries (default 60)
        max_sleeptime (int) --  Maximum sleeptime for any retry (default 300)
        sleepscale (float) -- Sleep time multiplier applied to each iteration
        jitter (int) -- Random jitter introduced to each iteration, random between [-jitter, +jitter]
        loglevel (string) -- (debug|info|warning|error|critical)

        Returns:
        kafkaConsumer class instance
        """
        self.messages = []
        self.brokers = brokers

        # Create SSL client if configured
        if (kafka_use_ssl):
            self.kafka_ssl_config = SslConfig(cafile=kafka_ssl_cafile,
                                              certfile=kafka_ssl_certfile,
                                              keyfile=kafka_ssl_keyfile,
                                              password=kafka_ssl_password)
            self.client = KafkaClient(hosts=','.join(self.brokers),
                                      ssl_config=self.kafka_ssl_config)
        # Create plaintext client
        else:
            self.client = KafkaClient(hosts=','.join(self.brokers))

        self.zookeeper_server = zookeeper_server
        self.topic = topic
        self.initial_offset = OffsetType.EARLIEST if initial_offset.lower(
        ) == 'earliest' else OffsetType.LATEST
        self.consumer_group = consumer_group
        self.use_rdkafka = use_rdkafka
        self.splunk_server = splunk_server
        self.splunk_hec_port = splunk_hec_port
        self.splunk_hec_channel = splunk_hec_channel
        self.splunk_hec_token = splunk_hec_token
        self.splunk_sourcetype = splunk_sourcetype
        self.splunk_source = splunk_source
        self.use_https = use_https
        self.verify_ssl = verify_ssl
        self.use_compression = use_compression
        self.compresslevel = compresslevel
        self.batch_size = batch_size
        self.retry_attempts = retry_attempts
        self.sleeptime = sleeptime
        self.max_sleeptime = max_sleeptime
        self.sleepscale = sleepscale
        self.jitter = jitter
        self.loglevel = loglevel
        self.consumer_started = False
        self.loglevels = {
            'debug': logging.DEBUG,
            'info': logging.INFO,
            'warning': logging.WARNING,
            'error': logging.ERROR,
            'critical': logging.CRITICAL
        }
        self.initLogger()
import json
from django.conf import settings
from pykafka import KafkaClient, SslConfig

kafka_settings = settings.KAFKA
config = None
product_producer = None


def json_serializer(message, partition_key):
    return json.dumps(message).encode('utf-8'), partition_key


if kafka_settings.get('SSL', None):
    ssl_settings = kafka_settings['SSL']
    config = SslConfig(cafile=ssl_settings['CAFILE'],
                       certfile=ssl_settings.get('CERTFILE', None),
                       keyfile=ssl_settings.get('KEYFILE', None),
                       password=ssl_settings.get('PASSWORD', None))

client = KafkaClient(hosts=",".join(kafka_settings['HOSTS']),
                     ssl_config=config)

if client.topics:
    product_topic = client.topics['product']
    product_producer = product_topic.get_producer(serializer=json_serializer,
                                                  min_queued_messages=1,
                                                  linger_ms=0)