示例#1
0
def test_protobuf_deserializer_type_mismatch(kafka_cluster):
    """
    Ensures an Exception is raised when deserializing an unexpected type.

    """
    pb2_1 = PublicTestProto_pb2.TestMessage
    pb2_2 = metadata_proto_pb2.HDFSOptions

    sr = kafka_cluster.schema_registry()
    topic = kafka_cluster.create_topic("serialization-proto-refs")
    serializer = ProtobufSerializer(pb2_1, sr)
    deserializer = ProtobufDeserializer(pb2_2)

    producer = kafka_cluster.producer(key_serializer=serializer)
    consumer = kafka_cluster.consumer(key_deserializer=deserializer)
    consumer.assign([TopicPartition(topic, 0)])

    def dr(err, msg):
        print("dr msg {} {}".format(msg.key(), msg.value()))

    producer.produce(topic,
                     key=pb2_1(test_string='abc',
                               test_bool=True,
                               test_bytes=b'def'),
                     partition=0)
    producer.flush()

    with pytest.raises(ConsumeError, match="Error parsing message"):
        consumer.poll()
示例#2
0
def main(args):
    topic = args.topic

    protobuf_deserializer = ProtobufDeserializer(user_pb2.User)
    string_deserializer = StringDeserializer('utf_8')

    consumer_conf = {'bootstrap.servers': args.bootstrap_servers,
                     'key.deserializer': string_deserializer,
                     'value.deserializer': protobuf_deserializer,
                     'group.id': args.group,
                     'auto.offset.reset': "earliest"}

    consumer = DeserializingConsumer(consumer_conf)
    consumer.subscribe([topic])

    while True:
        try:
            msg = consumer.poll(1.0)
            if msg is None:
                continue

            user = msg.value()
            if user is not None:
                print(f"User record {msg.key()}:\n name: {user.name}\n"
                      f"\tfavorite_number: {user.favorite_color}\n"
                      f"\tfavorite_color: {user.favorite_number}\n")
        except KeyboardInterrupt:
            break

    consumer.close()
def test_protobuf_message_serialization(kafka_cluster, pb2, data):
    """
    Validates that we get the same message back that we put in.

    """
    topic = kafka_cluster.create_topic("serialization-proto")
    sr = kafka_cluster.schema_registry()

    value_serializer = ProtobufSerializer(pb2, sr,
                                          {'use.deprecated.format': False})
    value_deserializer = ProtobufDeserializer(pb2,
                                              {'use.deprecated.format': False})

    producer = kafka_cluster.producer(value_serializer=value_serializer)
    consumer = kafka_cluster.consumer(value_deserializer=value_deserializer)
    consumer.assign([TopicPartition(topic, 0)])

    expect = pb2(**data)
    producer.produce(topic, value=expect, partition=0)
    producer.flush()

    msg = consumer.poll()
    actual = msg.value()

    assert [getattr(expect, k) == getattr(actual, k) for k in data.keys()]
def main(args):
    topic = args.topic

    protobuf_deserializer = ProtobufDeserializer(user_pb2.User)
    string_deserializer = StringDeserializer('utf_8')

    consumer_conf = {'bootstrap.servers': args.bootstrap_servers,
                     'key.deserializer': string_deserializer,
                     'value.deserializer': protobuf_deserializer,
                     'group.id': args.group,
                     'auto.offset.reset': "earliest"}

    consumer = DeserializingConsumer(consumer_conf)
    consumer.subscribe([topic])

    while True:
        try:
            # SIGINT can't be handled when polling, limit timeout to 1 second.
            msg = consumer.poll(1.0)
            if msg is None:
                continue

            user = msg.value()
            if user is not None:
                print("User record {}: name: {}\n"
                      "\tfavorite_number: {}\n"
                      "\tfavorite_color: {}\n"
                      .format(msg.key(), user.name,
                              user.favorite_color,
                              user.favorite_number))
        except KeyboardInterrupt:
            break

    consumer.close()
示例#5
0
 def _make_deserializer(self):
     return {
         SchemaType.AVRO:
         AvroDeserializer(self.sr_client,
                          AVRO_SCHEMA,
                          from_dict=lambda d, _: AvroPayload(d['val'])),
         SchemaType.PROTOBUF:
         ProtobufDeserializer(ProtobufPayloadClass)
     }[self.schema_type]
示例#6
0
def test_index_encoder(msg_idx, expected_hex):
    buf = BytesIO()
    ProtobufSerializer._encode_uvarints(buf, msg_idx)
    buf.flush()
    # ignore array length prefix
    buf.seek(1)
    assert binascii.b2a_hex(buf.read()) == expected_hex

    # reset reader and test decoder
    buf.seek(0)
    assert msg_idx == ProtobufDeserializer._decode_index(buf)
示例#7
0
def test_index_encoder(msg_idx, zigzag, expected_hex):
    buf = BytesIO()
    ProtobufSerializer._encode_varints(buf, msg_idx, zigzag=zigzag)
    buf.flush()
    buf.seek(0)
    assert binascii.b2a_hex(buf.read()) == expected_hex

    # reset reader and test decoder
    buf.seek(0)
    decoded_msg_idx = ProtobufDeserializer._decode_index(buf, zigzag=zigzag)
    assert decoded_msg_idx == msg_idx
示例#8
0
def test_index_serialization(pb2):
    msg_idx = _create_msg_index(pb2.DESCRIPTOR)
    buf = BytesIO()
    ProtobufSerializer._encode_uvarints(buf, msg_idx)
    buf.flush()

    # reset buffer cursor
    buf.seek(0)
    decoded_msg_idx = ProtobufDeserializer._decode_index(buf)
    buf.close()

    assert decoded_msg_idx == msg_idx
示例#9
0
def getConfigs():
    value_deserializer = ProtobufDeserializer(FoodPreferences_pb2.PersonFood)

    configs = {
        'bootstrap.servers': '<CCLOUD_DNS>',
        'security.protocol': 'SASL_SSL',
        'sasl.mechanism': 'PLAIN',
        'sasl.username': '******',
        'sasl.password': '******',
        'group.id': 'consumingPythonWorld',
        'client.id': 'pythonConsumption',
        'key.deserializer': StringDeserializer('utf_8'),
        'value.deserializer': value_deserializer
    }

    return configs
示例#10
0
 def get_protobuf_deserializer(self, message_type):
     return ProtobufDeserializer(message_type)
示例#11
0
epic = "CHART:CS.D.EURUSD.MINI.IP:1MINUTE"

if __name__ == "__main__":
    # Read arguments and configurations and initialize
    config_file = 'librdkafka.config'
    topic = "eurusd"
    conf = ccloud_lib.read_ccloud_config(config_file)

    schema_registry_conf = {
        'url': conf['schema.registry.url'],
        'basic.auth.user.info': conf['schema.registry.basic.auth.user.info']
    }
    schema_registry_client = SchemaRegistryClient(schema_registry_conf)

    value_deserializer = ProtobufDeserializer(lightbringer_pb2.CandlePrice)
    string_deserializer = StringDeserializer('utf_8')

    # for full list of configurations, see:
    #   https://docs.confluent.io/current/clients/confluent-kafka-python/#deserializingconsumer
    consumer_conf = {
        'bootstrap.servers': conf['bootstrap.servers'],
        'sasl.mechanisms': conf['sasl.mechanisms'],
        'security.protocol': conf['security.protocol'],
        'sasl.username': conf['sasl.username'],
        'sasl.password': conf['sasl.password'],
        'key.deserializer': string_deserializer,
        'value.deserializer': value_deserializer,
        'group.id': 'consumer_test',
        'auto.offset.reset': 'earliest'
    }