async def update_status_history(payload: Something, context: EventContext) -> Something: if payload.status: payload.history.append(payload.status) payload.status = Status(ts=datetime.now(tz=timezone.utc), type=StatusType.LOADED) return payload
def stream_event(payload: Something, context: EventContext) -> Something: logger.info(context, "streaming event", extra=extra(something_id=payload.id)) if payload.status: payload.history.append(payload.status) payload.status = Status(ts=datetime.now(tz=timezone.utc), type=StatusType.SUBMITTED) return payload
async def fork_something(payload: Something, context: EventContext) -> Spawn[Union[FirstPart, SecondPart]]: """ Produces 2 variants from payload to be processed in parallel """ logger.info(context, "producing 2 variants of payload", extra=extra(something_id=payload.id)) if payload.status: payload.history.append(payload.status) payload.status = Status( ts=datetime.now(tz=timezone.utc), type=StatusType.SUBMITTED ) yield FirstPart(payload) yield SecondPart(payload)
def update_status(payload: Something, context: EventContext) -> Something: """ Updates status of payload to PROCESSED and puts previous status in history. :param payload: Something, object :param context: EventContext """ logger.info(context, "updating something status", extra=extra(something_id=payload.id)) if payload.status: payload.history.append(payload.status) payload.status = Status(ts=datetime.now(), type=StatusType.PROCESSED) return payload
async def spawn_many_events(payload: Something, context: EventContext) -> Spawn[Something]: """ Produces 3 events to be published to stream """ logger.info(context, "spawning event 3 times", extra=extra(something_id=payload.id)) if payload.status: payload.history.append(payload.status) for i in range(3): payload.status = Status(ts=datetime.now(tz=timezone.utc), type=StatusType.SUBMITTED) payload.id = str(i) yield payload