Пример #1
0
 def App(self, id='myid', **kwargs):
     app = App(id, **kwargs)
     app.finalize()
     return app
Пример #2
0
 def test_version_cannot_be_zero(self):
     app = App('id', version=0)
     with pytest.raises(ImproperlyConfigured):
         app.finalize()
Пример #3
0
import time
from faust import App
from datetime import datetime
from config import BROKER_URLS, ROCKS_DB_URL

app = App(
    'app_main',
    broker=BROKER_URLS,
    store=ROCKS_DB_URL,
    autodiscover=True,
    reply_create_topic=True,
)

topic = app.topic(
    'sample_topic',
    # value_type=bytes,
    value_type=str,
    partitions=1,
)


@app.agent(topic)
async def read_topic(streams):
    async for payload in streams:
        print("RECEIVED:", payload)
        print("DONE")


if __name__ == '__main__':
    app.main()
Пример #4
0
from os import getenv
from faust import App
import logging
from app.db.handler import pgQuery
from datetime import datetime

pg = pgQuery()

redis_server = getenv('REDIS_SERVER', 'redis://redis:6385/0')
kafka_broker = getenv('KAFKA_SERVER', 'kafka://kafka:9092')

faust_app = App(
    'main_app',
    version=1,
    autodiscover=True,
    origin='app',
    broker=kafka_broker,
    store=redis_server,
    key_serializer='json',
    value_serializer='json',
)


@faust_app.task
async def on_started():
    print('Fasut Main APP STARTED . . .  \n\n')


logging.basicConfig(
    filename='faustLogs.log',
    level=logging.DEBUG,
    format="[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s"
Пример #5
0
import time
from faust import App
from datetime import datetime


app = App(
    'app_main',
    broker='kafka://kafka:9094',
    store='rocksdb://',
)

topic = app.topic(
    'sample_topic',
    value_type=bytes,
    partitions=1,
)


@app.agent(topic)
async def read_topic(streams):
    async for payload in streams:
        print("RECEIVED:", payload)
        print("DONE")


if __name__ == '__main__':
    app.main()
Пример #6
0
def create_livecheck(app: faust.App):
    return app.LiveCheck()
Пример #7
0
import aiotools
import asyncio
import faust
from faust import App
from faust.worker import Worker
from broker.models import Answer, Question
from broker.settings import KAFKA_BROKER
import logging
import os

logger = logging.getLogger(__name__)
app = App('ship-app', stream_wait_empty=False, broker=KAFKA_BROKER, store='memory://', autodiscover=True)
answer_table = app.Table('answers_table', default=Answer)
ship_topic = app.topic('ship-topic', value_type=str, internal=True)


@app.agent(ship_topic)
async def process_answers(questions):
    logger.info("LET US SEE - WE GOT SOMETHING")
    
    answers = []
    anser_text = "our answer to "
    async for question in questions.group_by(Question.id):
        answer = Answer(id=1, question_id=question.id,
                        slack_id=question.slack_id,
                        question=question.text,
                        answer=answer_text+f" - {question.slack_id} {question.text}")
        answer_table[question.id]=answer
        logger.info(f"Question received. Question Id {question.id}")
        yield question