import time
from faust import App
from datetime import datetime
from config import BROKER_URLS, ROCKS_DB_URL

app = App(
    'app_main',
    broker=BROKER_URLS,
    store=ROCKS_DB_URL,
    autodiscover=True,
    reply_create_topic=True,
)

topic = app.topic(
    'sample_topic',
    # value_type=bytes,
    value_type=str,
    partitions=1,
)


@app.agent(topic)
async def read_topic(streams):
    async for payload in streams:
        print("RECEIVED:", payload)
        print("DONE")


if __name__ == '__main__':
    app.main()
示例#2
0
import aiotools
import asyncio
import faust
from faust import App
from faust.worker import Worker
from broker.models import Answer, Question
from broker.settings import KAFKA_BROKER
import logging
import os

logger = logging.getLogger(__name__)
app = App('ship-app', stream_wait_empty=False, broker=KAFKA_BROKER, store='memory://', autodiscover=True)
answer_table = app.Table('answers_table', default=Answer)
ship_topic = app.topic('ship-topic', value_type=str, internal=True)


@app.agent(ship_topic)
async def process_answers(questions):
    logger.info("LET US SEE - WE GOT SOMETHING")
    
    answers = []
    anser_text = "our answer to "
    async for question in questions.group_by(Question.id):
        answer = Answer(id=1, question_id=question.id,
                        slack_id=question.slack_id,
                        question=question.text,
                        answer=answer_text+f" - {question.slack_id} {question.text}")
        answer_table[question.id]=answer
        logger.info(f"Question received. Question Id {question.id}")
        yield question