def __init__(self, driver, nameSalt):
        self.driver = driver
        self.fileName = "travis_correct_confluent_protobuf_protobuf"
        self.topic = self.fileName + nameSalt

        self.sensor = sensor_pb2.SensorReading()
        self.sensor.dateTime = 1234
        self.sensor.reading = 321.321
        self.sensor.device.deviceID = "555-4321"
        self.sensor.device.enabled = True

        self.sensor.float_val = 4321.4321
        self.sensor.int32_val = (1 << 31) - 1
        self.sensor.sint32_val = (1 << 31) - 1
        self.sensor.sint64_val = (1 << 63) - 1
        self.sensor.uint32_val = (1 << 32) - 1

        self.sensor.bytes_val = b'\xDE\xAD'
        self.sensor.double_array_val.extend([1 / 3, 32.21, 434324321])
        self.sensor.uint64_val = (1 << 64) - 1

        self.schema_registry_client = SchemaRegistryClient(
            {'url': driver.schemaRegistryAddress})
        self.keyProtobufSerializer = ProtobufSerializer(
            sensor_pb2.SensorReading, self.schema_registry_client)
        self.valueProtobufSerializer = ProtobufSerializer(
            sensor_pb2.SensorReading, self.schema_registry_client)
        producer_conf = {
            'bootstrap.servers': driver.kafkaAddress,
            'key.serializer': self.keyProtobufSerializer,
            'value.serializer': self.valueProtobufSerializer
        }

        self.protobufProducer = SerializingProducer(producer_conf)
예제 #2
0
def test_index_encoder(msg_idx, expected_hex):
    buf = BytesIO()
    ProtobufSerializer._encode_uvarints(buf, msg_idx)
    buf.flush()
    # ignore array length prefix
    buf.seek(1)
    assert binascii.b2a_hex(buf.read()) == expected_hex

    # reset reader and test decoder
    buf.seek(0)
    assert msg_idx == ProtobufDeserializer._decode_index(buf)
예제 #3
0
def test_index_encoder(msg_idx, zigzag, expected_hex):
    buf = BytesIO()
    ProtobufSerializer._encode_varints(buf, msg_idx, zigzag=zigzag)
    buf.flush()
    buf.seek(0)
    assert binascii.b2a_hex(buf.read()) == expected_hex

    # reset reader and test decoder
    buf.seek(0)
    decoded_msg_idx = ProtobufDeserializer._decode_index(buf, zigzag=zigzag)
    assert decoded_msg_idx == msg_idx
예제 #4
0
def test_index_serialization(pb2):
    msg_idx = _create_msg_index(pb2.DESCRIPTOR)
    buf = BytesIO()
    ProtobufSerializer._encode_uvarints(buf, msg_idx)
    buf.flush()

    # reset buffer cursor
    buf.seek(0)
    decoded_msg_idx = ProtobufDeserializer._decode_index(buf)
    buf.close()

    assert decoded_msg_idx == msg_idx
예제 #5
0
 def _make_serializer(self):
     return {
         SchemaType.AVRO:
         AvroSerializer(self.sr_client, AVRO_SCHEMA),
         SchemaType.PROTOBUF:
         ProtobufSerializer(ProtobufPayloadClass, self.sr_client)
     }[self.schema_type]
예제 #6
0
def test_protobuf_deserializer_type_mismatch(kafka_cluster):
    """
    Ensures an Exception is raised when deserializing an unexpected type.

    """
    pb2_1 = PublicTestProto_pb2.TestMessage
    pb2_2 = metadata_proto_pb2.HDFSOptions

    sr = kafka_cluster.schema_registry()
    topic = kafka_cluster.create_topic("serialization-proto-refs")
    serializer = ProtobufSerializer(pb2_1, sr)
    deserializer = ProtobufDeserializer(pb2_2)

    producer = kafka_cluster.producer(key_serializer=serializer)
    consumer = kafka_cluster.consumer(key_deserializer=deserializer)
    consumer.assign([TopicPartition(topic, 0)])

    def dr(err, msg):
        print("dr msg {} {}".format(msg.key(), msg.value()))

    producer.produce(topic,
                     key=pb2_1(test_string='abc',
                               test_bool=True,
                               test_bytes=b'def'),
                     partition=0)
    producer.flush()

    with pytest.raises(ConsumeError, match="Error parsing message"):
        consumer.poll()
예제 #7
0
def getConfigs():

    sr_client_props = {
        'url': '<CCLOUD_SR_DNS>',
        'basic.auth.user.info': '<CCLOUD_SR_KEY>:<CCLOUD_SR_SECRET>'
    }

    sr_client = SchemaRegistryClient(sr_client_props)
    value_serializer = ProtobufSerializer(FoodPreferences_pb2.PersonFood,
                                          sr_client)

    configs = {
        'bootstrap.servers': '<CCLOUD_DNS>',
        'security.protocol': 'SASL_SSL',
        'sasl.mechanism': 'PLAIN',
        'sasl.username': '******',
        'sasl.password': '******',
        'client.id': 'pythonProduction',
        'compression.type': 'zstd',
        'retries': '10',
        'linger.ms': '5',
        'key.serializer': StringSerializer('utf_8'),
        'value.serializer': value_serializer
    }

    return configs
def test_protobuf_message_serialization(kafka_cluster, pb2, data):
    """
    Validates that we get the same message back that we put in.

    """
    topic = kafka_cluster.create_topic("serialization-proto")
    sr = kafka_cluster.schema_registry()

    value_serializer = ProtobufSerializer(pb2, sr,
                                          {'use.deprecated.format': False})
    value_deserializer = ProtobufDeserializer(pb2,
                                              {'use.deprecated.format': False})

    producer = kafka_cluster.producer(value_serializer=value_serializer)
    consumer = kafka_cluster.consumer(value_deserializer=value_deserializer)
    consumer.assign([TopicPartition(topic, 0)])

    expect = pb2(**data)
    producer.produce(topic, value=expect, partition=0)
    producer.flush()

    msg = consumer.poll()
    actual = msg.value()

    assert [getattr(expect, k) == getattr(actual, k) for k in data.keys()]
예제 #9
0
    def __protobuf_serializer(self):
        schema_registry_conf = {'url': self.config['schemaregistry.url']}
        schema_registry_client = SchemaRegistryClient(schema_registry_conf)

        _proto_conf = {
            'auto.register.schemas': self.config['auto.register.schemas'],
        }

        return ProtobufSerializer(self.config['proto_msg_type'],
                                  schema_registry_client,
                                  conf=_proto_conf)
예제 #10
0
def test_protobuf_reference_registration(kafka_cluster, pb2, expected_refs):
    """
    Registers multiple messages with dependencies then queries the Schema
    Registry to ensure the references match up.

    """
    sr = kafka_cluster.schema_registry()
    topic = kafka_cluster.create_topic("serialization-proto-refs")
    serializer = ProtobufSerializer(pb2, sr)
    producer = kafka_cluster.producer(key_serializer=serializer)

    producer.produce(topic, key=pb2(), partition=0)
    producer.flush()

    registered_refs = sr.get_schema(serializer._schema_id).references

    assert expected_refs.sort() == [ref.name for ref in registered_refs].sort()
예제 #11
0
def test_protobuf_serializer_type_mismatch(kafka_cluster):
    """
    Ensures an Exception is raised when deserializing an unexpected type.

    """
    pb2_1 = TestProto_pb2.TestMessage
    pb2_2 = NestedTestProto_pb2.NestedMessage

    sr = kafka_cluster.schema_registry()
    topic = kafka_cluster.create_topic("serialization-proto-refs")
    serializer = ProtobufSerializer(pb2_1, sr)

    producer = kafka_cluster.producer(key_serializer=serializer)

    with pytest.raises(KafkaException,
                       match=r"message must be of type <class"
                       r" 'TestProto_pb2.TestMessage'\> not \<class"
                       r" 'NestedTestProto_pb2.NestedMessage'\>"):
        producer.produce(topic, key=pb2_2())
def main(args):
    topic = args.topic

    schema_registry_conf = {'url': args.schema_registry}
    schema_registry_client = SchemaRegistryClient(schema_registry_conf)

    protobuf_serializer = ProtobufSerializer(user_pb2.User,
                                             schema_registry_client,
                                             {'use.deprecated.format': True})

    producer_conf = {
        'bootstrap.servers': args.bootstrap_servers,
        'key.serializer': StringSerializer('utf_8'),
        'value.serializer': protobuf_serializer
    }

    producer = SerializingProducer(producer_conf)

    print("Producing user records to topic {}. ^C to exit.".format(topic))
    while True:
        # Serve on_delivery callbacks from previous calls to produce()
        producer.poll(0.0)
        try:
            user_name = input("Enter name: ")
            user_favorite_number = int(input("Enter favorite number: "))
            user_favorite_color = input("Enter favorite color: ")
            user = user_pb2.User(name=user_name,
                                 favorite_color=user_favorite_color,
                                 favorite_number=user_favorite_number)
            producer.produce(topic=topic,
                             partition=0,
                             key=str(uuid4()),
                             value=user,
                             on_delivery=delivery_report)
        except (KeyboardInterrupt, EOFError):
            break
        except ValueError:
            print("Invalid input, discarding record...")
            continue

    print("\nFlushing records...")
    producer.flush()
from uuid import uuid4

# Protobuf generated class; resides at ./meal_pb2.py
# Create it by running
# protoc -I=. --python_out=. ./meal.proto

import meal_pb2
from confluent_kafka import SerializingProducer
from confluent_kafka.serialization import StringSerializer
from confluent_kafka.schema_registry import SchemaRegistryClient
from confluent_kafka.schema_registry.protobuf import ProtobufSerializer

topic = 'meal'
schema_registry_client = SchemaRegistryClient({'url': 'http://t620.lan:8081'})
protobuf_serializer = ProtobufSerializer(meal_pb2.Meal, schema_registry_client)

producer_conf = {
    'bootstrap.servers': 't620.lan:9092',
    'key.serializer': StringSerializer('utf_8'),
    'value.serializer': protobuf_serializer
}

producer = SerializingProducer(producer_conf)

producer.poll(0.0)

mybeer = meal_pb2.Meal.DrinkItems(drink_name="beer")
mywine = meal_pb2.Meal.DrinkItems(drink_name="wine")

meal = meal_pb2.Meal(name='pizza', drink=[mybeer, mywine])
예제 #14
0
 def get_protobuf_serializer(self,
                             msg_type,
                             schema_registry_client,
                             conf=None):
     return ProtobufSerializer(msg_type, schema_registry_client, conf)
예제 #15
0
from uuid import uuid4

epic = "CHART:CS.D.EURUSD.MINI.IP:1MINUTE"
config_file = 'librdkafka.config'
topic = "eurusd"
conf = ccloud_lib.read_ccloud_config(config_file)

# Create topic if needed
ccloud_lib.create_topic(conf, topic)
delivered_records = 0
schema_registry_conf = {
    'url': conf['schema.registry.url'],
    'basic.auth.user.info': conf['schema.registry.basic.auth.user.info']
}
schema_registry_client = SchemaRegistryClient(schema_registry_conf)
protobuf_serializer = ProtobufSerializer(lightbringer_pb2.CandlePrice,
                                         schema_registry_client)
producer_conf = {
    'bootstrap.servers': conf['bootstrap.servers'],
    'sasl.mechanisms': conf['sasl.mechanisms'],
    'security.protocol': conf['security.protocol'],
    'sasl.username': conf['sasl.username'],
    'sasl.password': conf['sasl.password'],
    'key.serializer': StringSerializer('utf_8'),
    'value.serializer': protobuf_serializer
}
producer = SerializingProducer(producer_conf)
print("Producing user records to topic {}. ^C to exit.".format(topic))


#///////////////////////////////////////////////
def delivery_report(err, msg):