async def connect(self, retry=3): if self._evt is not None: return self._evt = EventSource(self.url, session=self.session, timeout=-1, on_open=partial(self.emit, 'connected'), on_error=partial(self.emit, 'error'), **self._options()) retry = 0 if retry < 0 else retry await self._evt.connect(retry) async def _process_data(event_source, emit, loop): try: async for evt in event_source: emit("rawdata", evt) msg = await loop.run_in_executor(None, json.loads, evt.data) emit("message", msg) except ClientConnectionError as e: emit("disconnected", e) except Exception as e: emit("clientError", e) coro = _process_data(self._evt, self.emit, self._loop) self._loop.create_task(coro)
async def stream( self, url: str, params: Dict[str, str] = None) -> AsyncGenerator[Dict[str, Any], None]: """Init the sse session """ if self._sse_session is None: # No timeout, no special connector # Other headers such as "Accept: text/event-stream" are added by thr SSEClient self._sse_session = aiohttp.ClientSession() query_params = {**params} if params else dict() if query_params.get("cursor") is None: query_params["cursor"] = "now" # Start monitoring from now. query_params.update(**IDENTIFICATION_HEADERS) retry = 0.1 while True: try: """ Create a new SSEClient: Using the last id as the cursor Headers are needed because of a bug that makes "params" override the default headers """ async with EventSource( url, session=self._sse_session, params=query_params, headers=self.headers.copy(), ) as client: """ We want to throw a TimeoutError if we didnt get any event in the last x seconds. read_timeout in aiohttp is not implemented correctly https://github.com/aio-libs/aiohttp/issues/1954 So we will create our own way to do that. Note that the timeout starts from the first event forward. There is no until we get the first event. """ async for event in client: if event.last_event_id != "": query_params["cursor"] = event.last_event_id # Events that dont have an id are not useful for us (hello/byebye events) retry = client._reconnection_time.total_seconds() try: data = event.data if data != '"hello"' and data != '"byebye"': yield json.loads(data) except json.JSONDecodeError: # Content was not json-decodable pass except aiohttp.ClientConnectionError: # Retry if the connection dropped after we got the initial response logger.warning( "We have encountered an error and we will try to reconnect, cursor = {}" .format(query_params["cursor"])) await asyncio.sleep(retry)
async def events(self): logger.info('Connecting to the streaming API…') async with EventSource(url, params=self.params, headers=headers, timeout=None) as source: logger.info('Connected.') async for server_event in source: if server_event.type == 'put': for event in yield_events(server_event): yield event
async def stream( # type: ignore[override] self, url: str, params: Dict[str, str] = None) -> AsyncGenerator[Dict[str, Any], None]: """Perform Stream request. :param url: the request url :param params: the request params :return: the stream response from server :raise: :exc:`StreamClientError <kuknos_sdk.exceptions.StreamClientError>` - Failed to fetch stream resource. """ # Init the sse session if self._sse_session is None: # No special connector # Other headers such as "Accept: text/event-stream" are added by thr SSEClient timeout = aiohttp.ClientTimeout(total=60 * 5) self._sse_session = aiohttp.ClientSession(timeout=timeout) query_params = {**params} if params else dict() if query_params.get("cursor") is None: query_params["cursor"] = "now" # Start monitoring from now. query_params.update(**IDENTIFICATION_HEADERS) retry = 0.1 while True: try: """ Create a new SSEClient: Using the last id as the cursor Headers are needed because of a bug that makes "params" override the default headers """ async with EventSource( url, session=self._sse_session, params=query_params, headers=self.headers.copy(), ) as client: """ We want to throw a TimeoutError if we didnt get any event in the last x seconds. read_timeout in aiohttp is not implemented correctly https://github.com/aio-libs/aiohttp/issues/1954 So we will create our own way to do that. Note that the timeout starts from the first event forward. There is no until we get the first event. """ async for event in client: if event.last_event_id: query_params["cursor"] = event.last_event_id # Events that dont have an id are not useful for us (hello/byebye events) retry = client._reconnection_time.total_seconds() try: data = event.data if data != '"hello"' and data != '"byebye"': yield json.loads(data) except json.JSONDecodeError: # Content was not json-decodable pass # pragma: no cover except aiohttp.ClientError as e: raise StreamClientError(query_params["cursor"], "Failed to get stream message.") from e except asyncio.TimeoutError: logger.warning( f"We have encountered an timeout error and we will try to reconnect, cursor = {query_params.get('cursor')}" ) await asyncio.sleep(retry)