async def data_subscribe(session: BaseSession, stream: Union[DataStream, str, int]) -> Pipe: # make sure the input is compatible src_stream = await data_stream_get(session, stream) if type(stream) is DataStream: if src_stream.layout != src_stream.layout: raise errors.ApiError( "Input [%s] configured for [%s] but source is [%s]" % (stream, stream.layout, src_stream.layout)) # make sure the stream is being produced if not src_stream.is_destination: raise errors.ApiError( "DataStream [%s] is not being produced, specify time bounds for historic execution" % src_stream.name) # replace the stub stream (from config file) with actual stream # do not let the buffer grow beyond 5 server chunks pipe = LocalPipe(src_stream.layout, name=src_stream.name, stream=src_stream, write_limit=5) pipe.stream = src_stream task = asyncio.create_task(_live_reader(session, src_stream, pipe)) async def close(): task.cancel() try: await task except asyncio.CancelledError: pass pipe.close_cb = close return pipe
async def data_read(session: BaseSession, stream: Union[DataStream, str, int], start: Optional[int] = None, end: Optional[int] = None, max_rows: Optional[int] = None) -> Pipe: # make sure the input is compatible src_stream = await data_stream_get(session, stream) if type(stream) is DataStream: if src_stream.layout != src_stream.layout: raise errors.ApiError( "Input [%s] configured for [%s] but source is [%s]" % (stream, stream.layout, src_stream.layout)) # replace the stub stream (from config file) with actual stream pipe = LocalPipe(src_stream.layout, name=src_stream.name, stream=src_stream, write_limit=5) pipe.stream = src_stream task = asyncio.create_task( _historic_reader(session, src_stream, pipe, start, end, max_rows)) async def close(): task.cancel() try: await task except asyncio.CancelledError: pass pipe.close_cb = close return pipe