def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    schema_registry = SchemaRegistryClient(endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=token_credential)
    serializer = SchemaRegistryAvroSerializer(schema_registry, SCHEMA_GROUP)
    bytes_data_ben, bytes_data_alice = serialize(serializer)
    dict_data_ben = deserialize(serializer, bytes_data_ben)
    dict_data_alice = deserialize(serializer, bytes_data_alice)
    serializer.close()

    return func.HttpResponse(
            "Schema Registry Executed.",
            status_code=200
    )
Exemple #2
0
    def test_basic_sr_avro_serializer(self, schemaregistry_endpoint,
                                      schemaregistry_group, **kwargs):
        sr_client = self.create_basic_client(SchemaRegistryClient,
                                             endpoint=schemaregistry_endpoint)
        sr_avro_serializer = SchemaRegistryAvroSerializer(
            sr_client, schemaregistry_group, auto_register_schemas=True)

        schema_str = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}"""
        schema = avro.schema.parse(schema_str)

        dict_data = {
            "name": u"Ben",
            "favorite_number": 7,
            "favorite_color": u"red"
        }
        encoded_data = sr_avro_serializer.serialize(dict_data, schema_str)

        assert schema_str in sr_avro_serializer._user_input_schema_cache
        assert str(
            avro.schema.parse(schema_str)) in sr_avro_serializer._schema_to_id

        assert encoded_data[0:4] == b'\0\0\0\0'
        schema_id = sr_client.get_schema_id(schemaregistry_group,
                                            schema.fullname, "Avro",
                                            str(schema)).schema_id
        assert encoded_data[4:36] == schema_id.encode("utf-8")

        assert schema_id in sr_avro_serializer._id_to_schema

        decoded_data = sr_avro_serializer.deserialize(encoded_data)
        assert decoded_data["name"] == u"Ben"
        assert decoded_data["favorite_number"] == 7
        assert decoded_data["favorite_color"] == u"red"

        sr_avro_serializer.close()
Exemple #3
0
    }

    # Schema would be automatically registered into Schema Registry and cached locally.
    payload_ben = serializer.serialize(dict_data_ben, SCHEMA_STRING)
    # The second call won't trigger a service call.
    payload_alice = serializer.serialize(dict_data_alice, SCHEMA_STRING)

    print('Encoded bytes are: ', payload_ben)
    print('Encoded bytes are: ', payload_alice)
    return [payload_ben, payload_alice]


def deserialize(serializer, bytes_payload):
    # serializer.deserialize would extract the schema id from the payload,
    # retrieve schema from Schema Registry and cache the schema locally.
    # If the schema id is the local cache, the call won't trigger a service call.
    dict_data = serializer.deserialize(bytes_payload)

    print('Deserialized data is: ', dict_data)
    return dict_data


if __name__ == '__main__':
    schema_registry = SchemaRegistryClient(endpoint=SCHEMA_REGISTRY_ENDPOINT,
                                           credential=token_credential)
    serializer = SchemaRegistryAvroSerializer(schema_registry, SCHEMA_GROUP)
    bytes_data_ben, bytes_data_alice = serialize(serializer)
    dict_data_ben = deserialize(serializer, bytes_data_ben)
    dict_data_alice = deserialize(serializer, bytes_data_alice)
    serializer.close()
    # The serialize method would automatically register the schema into the Schema Registry Service and
    # schema would be cached locally for future usage.
    payload_bytes = serializer.serialize(data=dict_data, schema=SCHEMA_STRING)
    print('The bytes of serialized dict data is {}.'.format(payload_bytes))

    event_data = EventData(body=payload_bytes)  # pass the bytes data to the body of an EventData
    event_data_batch.add(event_data)
    producer.send_batch(event_data_batch)
    print('Send is done.')


# create an EventHubProducerClient instance
eventhub_producer = EventHubProducerClient.from_connection_string(
    conn_str=EVENTHUB_CONNECTION_STR,
    eventhub_name=EVENTHUB_NAME
)


# create a SchemaRegistryAvroSerializer instance
avro_serializer = SchemaRegistryAvroSerializer(
    schema_registry=SchemaRegistryClient(
        endpoint=SCHEMA_REGISTRY_ENDPOINT,
        credential=DefaultAzureCredential()
    ),
    schema_group=SCHEMA_GROUP
)


with eventhub_producer, avro_serializer:
    send_event_data_batch(eventhub_producer, avro_serializer)