Пример #1
0
def test_register():
    try:
        class MyCodec(Codec):
            ...
        register('mine', MyCodec)
        assert get_codec('mine') is MyCodec
    finally:
        codecs.pop('mine')
Пример #2
0
def test_register():
    try:

        class MyCodec(Codec):
            ...

        register("mine", MyCodec)
        assert get_codec("mine") is MyCodec
    finally:
        codecs.pop("mine")
Пример #3
0
logger = logging.getLogger(__name__)

CONFIG = load_config(os.environ.get('CONFIG'))()

KAFKA_BROKER = SETTINGS.get("confluent", "bootstrap.servers")
SCHEMA_REGISTRY_URL = SETTINGS.get("confluent", "schema.registry.url")

app = faust.App(id=CONFIG.consumer_name, broker="kafka://" + KAFKA_BROKER)

source_topic = app.topic(CONFIG.source_topic, value_serializer="FaustAvroSerializer")
out_topic = app.topic(CONFIG.output_topic, value_serializer="FaustAvroSerializer", key_serializer="FaustAvroKeySerializer")

schema_registry_client = CachedSchemaRegistryClient(url=SCHEMA_REGISTRY_URL)
out_schema = parse_schema_descriptor(CONFIG.output_value_schema)
out_key_schema = parse_schema_descriptor(CONFIG.output_key_schema)

codecs.register("FaustAvroSerializer", FaustAvroSerializer(schema_registry_client=schema_registry_client,
                                                           destination_topic=out_topic.topics[0],
                                                           schema=out_schema.schema))

codecs.register("FaustAvroKeySerializer", FaustAvroSerializer(schema_registry_client=schema_registry_client,
                                                              destination_topic=out_topic.topics[0],
                                                              schema=out_key_schema.schema,
                                                              is_key=True))


@app.agent(source_topic)
async def score(stream):
    async for records in stream.take(1000, within=30):
        # Your processing code here
Пример #4
0
import logging
import os
import ssl
from typing import Type

import faust
from faust.serializers import codecs

from aiodeu.codecs import AvroJsonCodec
from aiodeu.config import Config
from aiodeu.utils import write_to_file

codecs.register('AvroJsonCodec', AvroJsonCodec())

logger = logging.getLogger(__name__)


def create_app(config: Type[Config], faust_app_kwargs: dict = {}) -> faust.App:
    app_kwargs = {
        "broker": config.BROKER_LIST,
        "value_serializer": "AvroJsonCodec",
        "store": "memory://",
        "topic_replication_factor": 3,
        "topic_partitions": 12,
        "topic_allow_declare": False,
        "topic_disable_leader": True,
        "consumer_auto_offset_reset": "earliest",
        # "stream_wait_empty": False
    }
    app_kwargs.update(faust_app_kwargs)
Пример #5
0
from faust.serializers import codecs
from faust_s3_backed_serializer import S3BackedSerializer

import faust

broker = "kafka://localhost:9094"
input_topic = "texts"

app = faust.App("faust-s3-backed-demo", broker=broker)

value_serializer = "s3_backed_str_serializer"
str_serializer = codecs.get_codec("raw")
s3_serializer = S3BackedSerializer()
codecs.register(value_serializer, str_serializer | s3_serializer)

schema = faust.Schema(key_type=str,
                      key_serializer="raw",
                      value_type=str,
                      value_serializer=value_serializer)
texts_topic = app.topic(input_topic, schema=schema)


@app.agent(texts_topic)
async def print_length(texts):
    async for key, text in texts.items():
        print("{} has {} characters".format(key, len(text)))


if __name__ == '__main__':
    app.main()