Exemplo n.º 1
0
from app.db.redis import get_redis_database
from app.services.kline_service import KLineService
from fastapi import APIRouter
from loguru import logger
from starlette.endpoints import WebSocketEndpoint
from starlette.types import Scope, Receive, Send

router = APIRouter()


async def init():
    redis_client = await get_redis_database()
    await KLineService.init_kline(redis_client, [60, 3600, 86400])


router.add_event_handler("startup", init)


@router.websocket_route("/subscribe/market/{market}/interval/{interval}")
class WebsocketConsumer(WebSocketEndpoint):
    def __init__(self, scope: Scope, receive: Receive, send: Send):
        super().__init__(scope, receive, send)
        loop = asyncio.get_event_loop()
        bootstrap_server = settings.KAFKA_INTERNAL_HOST_PORT
        market = self._get_market(scope["path"])
        interval = self._get_interval(scope["path"])
        topic = f"^kline-{market}.*-{interval}.*-latest$"
        logger.info(f"subscribing to topic:: {topic}")
        self.kafka_consumer = create_kafka_consumer(loop, f"{topic}_consumer",
                                                    bootstrap_server)
        self.kafka_consumer.subscribe(pattern=topic)
Exemplo n.º 2
0
async def shut():
    global kafka_producer
    await close_kafka_producer(kafka_producer)


def get_kafka_producer() -> AIOKafkaProducer:
    loop = asyncio.get_event_loop()
    client_id = "balanced_kafka_producer"
    bootstrap_server = settings.KAFKA_INTERNAL_HOST_PORT
    producer = create_kafka_producer(loop=loop,
                                     client_id=client_id,
                                     bootstrap_server=bootstrap_server)
    return producer


router.add_event_handler("startup", init)
router.add_event_handler("shutdown", shut)


@router.post("/event")
async def event(
    _event_or_trade: Union[models.EventLog, models.TradeLog],
    mongodb_client: AsyncIOMotorClient = Depends(get_mongodb_database),
    redis_client: Redis = Depends(get_redis_database)):
    # save raw input in mongodb
    # logger.info(f"mongodb_client db => {mongodb_client}")
    # is_saved = await TradeService.save_raw_event(redis_client, _event_or_trade)
    # logger.info(f"TradeService.save_raw_event - is_saved ? {is_saved}")

    # 1. update "depth", then send new depth value to kafka topic
    # 2. update "kline", then send new kline(add 1min) to kafka topic