def listen(self, events=None, **kwargs): # Late import to avoid very expensive in-direct import (~1 second) when this function is # not called / used from sseclient import SSEClient url = self._url query_params = {} request_params = {} if events and isinstance(events, six.string_types): events = [events] if 'token' in kwargs: query_params['x-auth-token'] = kwargs.get('token') if 'api_key' in kwargs: query_params['st2-api-key'] = kwargs.get('api_key') if events: query_params['events'] = ','.join(events) if self.cacert is not None: request_params['verify'] = self.cacert query_string = '?' + urllib.parse.urlencode(query_params) url = url + query_string response = requests.get(url, stream=True, **request_params) client = SSEClient(response) for message in client.events(): # If the execution on the API server takes too long, the message # can be empty. In this case, rerun the query. if not message.data: continue yield json.loads(message.data)
def iter_sse_stream(cls, resp, yield_sse_elts=False): client = SSEClient(resp) for event in client.events(): data = cls._event_to_dict(event) if yield_sse_elts: yield {'data': data, 'event': event, 'client': client} else: yield data if event.event == 'end': client.close() break
def operation_follow(self, operation): response = self.get_stream( url='/operations/{}/follow'.format(operation)) client = SSEClient(event_source=response) try: for event in client.events(): if event.data: progress = loads(event.data) yield progress if 'completed' in progress and progress['completed']: return finally: client.close()
def sse_client(wrap): from sseclient import SSEClient def with_requests(url): """Get a streaming response for the given event feed using requests.""" import requests return requests.get(url, stream=True) url = f'http://0.0.0.0:{RANDOM_PORT}/stream/log' response = with_requests(url) client = SSEClient(response) events = client.events() event = next(events) if LOG_MESSAGE in event.data: wrap.count += 1
def handle(self, **options): for addon in Addon.objects.all_with_deleted().filter(deleted=True).all(): print 'Try to recover volumes for addons', addon try: recover_volumes(addon) except Exception as e: print e url = settings.MARATHON_SERVERS[0] + "/v2/events" if settings.MARATHON_USERNAME and settings.MARATHON_PASSWORD: client = SSEClient(url, auth=(settings.MARATHON_USERNAME, settings.MARATHON_PASSWORD)) else: client = SSEClient(url) for event in client: try: # logger.error("received event: {0}".format(event)) # marathon might also send empty messages as keepalive... if (event.data.strip() != ''): # marathon sometimes sends more than one json per event # e.g. {}\r\n{}\r\n\r\n for real_event_data in re.split(r'\r\n', event.data): data = json.loads(real_event_data) print "received event of type {0}".format(data['eventType']) if data['eventType'] == 'app_terminated_event': app_id = data['appId'] # /jldd-wond-meiye try: name, namespace = app_id[1:].rsplit("-", 1) except Exception, e: print 'Error', e continue print 'Try to recover volumes for addons', app_id try: addon = Addon.objects.all_with_deleted().get(name=name, namespace=namespace) except Addon.DoesNotExist: continue if addon.deleted: try: recover_volumes(addon) except Exception, e: print e else: print "skipping empty message"
def get_event_stream(self, timeout): """Get the Server Side Event (SSE) event stream.""" url = self.host+"/v2/events" logger.info( "SSE Active, trying fetch events from from {0}".format(url)) return SSEClient(url, auth=self.__auth, verify=self.__verify, timeout=timeout)
def _query(self, url, params=None, sse=False): reply = None if not sse: try: reply = self._session.get(url, params=params, timeout=self.request_timeout) return reply.json() except (RequestException, NewConnectionError, ValueError) as e: if reply is not None: raise HorizonError( 'Invalid horizon reply: [{}] {}'.format( reply.status_code, reply.text), reply.status_code) else: raise HorizonRequestError(e) # SSE connection if SSEClient is None: raise ImportError( 'SSE not supported, missing `stellar-base-sseclient` module') return SSEClient(url, retry=0, session=self._sse_session, connect_retry=-1, params=params)
def _subscribe_to_event(self, event, callback): messages = SSEClient( MeatHook.api_sse_url.substitute( dict(event=event, token=self.token_id))) for msg in messages: if msg.data: callback(json.loads(msg.data))
def poll_chat(): sse = SSEClient(FIREBASE_URL + "PythonChatDemo/Messages.json") print("Watching Firebase node - %s" % (FIREBASE_URL + "PythonChatDemo/Messages.json")) for new_message in sse: message_data = json.loads(new_message.data) if message_data is None: # Keep alive continue if message_data["data"] is None: continue # print("message_data = %s\n" %(message_data)) if message_data["path"] == "/": # Initial Read for old messages print("Previous messages") for (nodeid, message) in message_data["data"].items(): try: # print("message = %s" % (message)) print("%s says: %s" % (message["name"], message["message"])) except: pass else: # New Message try: print("%s says: %s" % (message_data["data"]["name"], message_data["data"]["message"])) except: pass
def __event(self, client): messages = SSEClient(self.api + 'stream', headers=self.headers) for message in messages: if message.data == "ping": continue msg = json.loads(message.data) if msg["event_type"] == "state_changed": if "motion" in msg["data"]["entity_id"]: if msg["data"]["new_state"]["state"] == "on": client.send_text_message( self.states[msg["data"]["entity_id"]] + "被触发了") elif msg["data"]["new_state"]["state"] == "on": if msg["data"]["old_state"]["state"] == "unavailable": client.send_text_message( self.states[msg["data"]["entity_id"]] + "可用了") continue client.send_text_message( self.states[msg["data"]["entity_id"]] + "被打开了") elif msg["data"]["new_state"]["state"] == "off": if msg["data"]["old_state"]["state"] == "unavailable": client.send_text_message( self.states[msg["data"]["entity_id"]] + "可用了") continue client.send_text_message( self.states[msg["data"]["entity_id"]] + "被关闭了") elif msg["data"]["new_state"]["state"] == "unavailable": client.send_text_message( self.states[msg["data"]["entity_id"]] + "不可用")
def _streamSSE(url, cnx): ''' function to receive data and insert them into a db :param url: :param cnx: :return: ''' # primary key deal_id = 20001 # dict to store primary key of each unique ctpy/instrument name ctpy_name_to_id = insert_counterparty(cnx) instrument_name_to_id = insert_instrument(cnx) messages = SSEClient(url) for msg in messages: output_msg = msg.data if type(output_msg) is str: output_json = json.loads(output_msg) # insert deal data into deal table mycursor = cnx.cursor() sql = "INSERT IGNORE INTO deal (deal_id, deal_time, deal_counterparty_id, deal_instrument_id, deal_type, deal_amount, deal_quantity) VALUES (%s, %s, %s, %s, %s, %s, %s)" time = convert_to_datetime(output_json['time']) val = (deal_id, time, ctpy_name_to_id[output_json['cpty']], instrument_name_to_id[output_json['instrumentName']], output_json['type'], output_json['price'], output_json['quantity']) mycursor.execute(sql, val) cnx.commit() # increment primary keys deal_id += 1 print(mycursor.rowcount, "record inserted.")
def _listen_and_notify_alarms(self): while True: try: alarm_stream_url = \ 'http://{0}:{1}/analytics/alarm-stream'.format( self._args.analytics_api_server, self._args.analytics_api_server_port) alarm_stream = SSEClient(alarm_stream_url) for alarm in alarm_stream: if alarm.event != 'update': continue alarm_data = self._parse_alarm(alarm) if alarm_data: if alarm_data.cleared: subject = \ '[Contrail Alarm] {0}:{1} - Cleared'.format( alarm_data.table, alarm_data.key) if not self._try_sending_email(subject, ''): self._log_alarm({'subject': subject, 'msg': ''}) else: for alarm_elt in alarm_data.alarms: subject = \ '[Contrail Alarm] {0} -- {1}:{2}'.format( alarm_elt.summary, alarm_data.table, alarm_data.key) body = self._format_email_body(alarm_data, alarm_elt) if not self._try_sending_email(subject, body): self._log_alarm({'subject': subject, 'msg': body}) except Exception as e: time.sleep(1)
def fetch(self, endpoint, params={}, stream=False): response = self.session.get( url=self.URLS['base'], params=self.params(endpoint, params)) if not stream else SSEClient( self.URLS['stream'].format(endpoint, self.token, self.symbol)) return response.json()[ self.symbol][endpoint] if not stream else response
def events(self, eventname): endpoint = '/'.join( ['db', self.__id_safe, 'events', urlquote(eventname, safe='')]) #return SSEClient('{}/{}'.format(self.__client.base_url, endpoint), session=self.__client.session) req = self.__client._call_raw('get', endpoint, stream=True) return SSEClient(req).events()
def get( self, path, stream=False, iterator=False, yield_sse_elts=False, return_sse_client=False ): url = self._make_api_url(path) r = self.requests.get(url, headers=self.headers, stream=stream) if not self._is_response_ok(r) and self.has_username_and_passwd: self.auth() r = self.requests.get(url, headers=self.headers, stream=stream) if r.status_code == requests.codes.ok: if stream: if return_sse_client: return SSEClient(r) elif iterator: return self.iter_sse_stream( r, yield_sse_elts=yield_sse_elts ) else: return [ data for data in self.iter_sse_stream(r) # noqa pylint: disable=R1721 ] else: return json.loads(r.text) else: r.raise_for_status() return False
def _query(self, url, params=None, sse=False): reply = None if not sse: try: reply = self._session.get(url, params=params, timeout=self.request_timeout) return reply.json() except (RequestException, NewConnectionError, ValueError) as e: if reply is not None: raise HorizonError( 'Invalid horizon reply: [{}] {}'.format( reply.status_code, reply.text), reply.status_code) else: raise HorizonRequestError(e) # SSE connection if SSEClient is None: raise ImportError( 'SSE not supported, missing `stellar-base-sseclient` module') # If SSE is enabled, Horizon will fetch the user-agent from the URL query params. Maybe it's not a good design. params.update(self.user_agent) return SSEClient(url, retry=0, session=self._sse_session, connect_retry=-1, params=params)
def _streamSSE(url, on_data=print, exit=None): """internal""" messages = SSEClient(url, proxies=_PYEX_PROXIES, headers={"keep_alive": "false"}) def _runner(messages=messages, on_data=on_data): for msg in messages: data = msg.data try: on_data(json.loads(data)) except PyEXStopSSE: # stop listening and return print("HERE3") return except (json.JSONDecodeError, KeyboardInterrupt): print("HERE4") raise except Exception: print("HERE5") raise def _exit(messages=messages, exit=exit): # run runner in wrapper runthread = Thread(target=_runner) # die with parent runthread.daemon = True # start the runner runthread.start() # wait for exit event exit.wait() # kill killerthread = Thread(target=lambda: messages.resp.close()) # die with parent killerthread.daemon = True # start the killer killerthread.start() return if isinstance(exit, Event): # run on thread, stop when exit set exitthread = Thread(target=_exit) # start the threads exitthread.start() # return the threads return exitthread else: # just call and return the function return _runner()
def run(): global ha_url global ha_key global dash_host global logger while True: try: headers = {'Content-Type': 'application/json'} if ha_key != "": headers['x-ha-access'] = ha_key messages = SSEClient(ha_url + "/api/stream", verify=False, headers=headers, retry=3000) for msg in messages: process_message(msg) except requests.exceptions.ConnectionError: logger.warning( "Unable to connect to Home Assistant, retrying in 5 seconds") except: logger.fatal("Unexpected error:") logger.fatal('-' * 60) logger.fatal(traceback.format_exc()) logger.fatal('-' * 60) time.sleep(5)
def __enter__(self): url = self.endpoint if self.from_time is not None: url += '?since=' + self.from_time.isoformat().replace( '+00:00', 'Z') self.stream = SSEClient(url) return self
def download(self, symbol: SecuritySymbol, currency: str) -> PriceModel: ''' download the price ''' import urllib.parse import urllib.request from sseclient import SSEClient if not symbol.namespace: raise ValueError(f"Namespace not sent for {symbol}") self.logger.debug(f"fetching price from FWB.") url = self.get_security_url(symbol) # download messages = SSEClient(url) if not messages: return None data = None for msg in messages: if msg.data: data = msg.data if "EMPTY" not in data: break # parse price = self.parse_price(data) price.symbol = symbol return price
def sse(self, frequency, wifinfo_addr): """ Se connecte au serveur SSE et stocke les mesures qu'il envoie """ while True: try: # source SSE messages = SSEClient(f"http://{wifinfo_addr}/tic", timeout=31) last_write = 0 for msg in messages: sys.stdout.flush() now = int(time.time() / frequency) if now == last_write: # trop fréquent: on ignore le message print(f"\033[2m{msg}\033[0m") continue print(msg) last_write = now self.insert_data(json.loads(msg.data)) except (socket.timeout, requests.exceptions.ConnectionError) as exception: print(exception) print("retry in 10s") time.sleep(10)
def run(stream_url, ores_url, metrics_collector, config, delay, verbose): # What to do in case of a change def precache_a_change(change): session = requests.Session() if delay: time.sleep(delay) start = time.time() response = session.post(ores_url + "/v3/precache/", json=change, headers={'Content-Type': "Application/JSON"}) if response.status == 200: logger.info("Scored {0} in {1} seconds.".format( json.dumps(change), round(time.time() - start, 3))) elif response.status == 204: logger.debug("Nothing to do for {0}".format(json.dumps(change))) else: logger.error( "Scoring {0} and got an error in response:\n{1}".format( json.dumps(change), response.content)) # Execute changes! with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: for event in SSEClient(stream_url): if event.event == 'message': try: change = json.loads(event.data) except ValueError: continue executor.submit(precache_a_change, change)
def _do_sse_request(self, path, params=None, data=None): from sseclient import SSEClient headers = {'Accept': 'text/event-stream'} if self.dcos == True: headers['Authorization'] = "token=%s" % (self._token()) self.auth = None messages = None servers = list(self.servers) while servers and messages is None: server = servers.pop(0) url = ''.join([server.rstrip('/'), path]) try: messages = SSEClient(url, params=params, data=data, headers=headers, auth=self.auth) except Exception as e: marathon.log.error('Error while calling %s: %s', url, e.message) if messages is None: raise MarathonError('No remaining Marathon servers to try') return messages
def events(self, eventname): endpoint = '/'.join( ['db', self.__id_safe, 'events', urlquote(eventname, safe='')]) res = self.__client._call_raw('GET', endpoint, stream=True) res.raise_for_status() return SSEClient(res.stream()).events()
def listen(self, events=None, **kwargs): # Late import to avoid very expensive in-direct import (~1 second) when this function is # not called / used from sseclient import SSEClient url = self._url query_params = {} if events and isinstance(events, six.string_types): events = [events] if 'token' in kwargs: query_params['x-auth-token'] = kwargs.get('token') if 'api_key' in kwargs: query_params['st2-api-key'] = kwargs.get('api_key') if events: query_params['events'] = ','.join(events) query_string = '?' + urllib.parse.urlencode(query_params) url = url + query_string for message in SSEClient(url): # If the execution on the API server takes too long, the message # can be empty. In this case, rerun the query. if not message.data: continue yield json.loads(message.data)
def main(): """ Consumes URL data and persists it to mongodb. """ mongo_client = pymongo.MongoClient(MONGO_HOST, 27017) db = mongo_client.pymongo_sse sse_client = SSEClient(requests.get(URL, stream=True)) db_events = db.events try: print("\033[1mInserting results to database. Press Ctrl+C to abort.\033[0m") for event in sse_client.events(): result = db_events.insert_one(json.loads(event.data)) except KeyboardInterrupt: mongo_client.close() print("Inserting interrupted. Aborting.")
def listener_factory(args): import requests from sseclient import SSEClient logger.info("args given to listener_factory: %s", args) endpoint = os.getenv("CWT_MESSAGE_SSE_ENDPOINT") logger.info("Will be listening to %s", endpoint) return Listener(*args, node_runner, lambda: SSEClient(requests.get(endpoint, stream=True)))
def print_notifications(sdk_key): # SSEClient (https://pypi.org/project/sseclient/) streams Server-Sent Events from the notifications endpoint # (/v1/notifications/event-stream), and we print each one messages = SSEClient('http://localhost:8080/v1/notifications/event-stream', headers={ 'X-Optimizely-Sdk-Key': sdk_key, }) for msg in messages: print("Notification: {}".format(msg))
def sse_handler(): messages = SSEClient(f"{api}/location/events?token={token}") for msg in messages: try: json.loads(f"{msg}") except Exception as inst: print(f"error : {type(inst)}, {inst.args}, {inst}") finally: print(msg)
def __event_listener(self, func: Callable[[Dict[str, Any]], Any], event_types: List[int]) -> None: """Listens for events and passes event data to the user-defined function.""" url = self.url + "/events?id=" for event in event_types: url += str(event) + "," client = SSEClient(url[:-1]) for event in client: func(json.loads(str(event)))
def connect_to_nr_of_measurements_stream(context, stream): """Get the number of measurements server-sent-events.""" context.sse_messages = [] for message in SSEClient(f"{context.base_api_url}/nr_measurements" ): # pragma: no cover-behave context.sse_messages.append(message) if stream == "stream": break context.execute_steps('when the collector measures "42"') stream = "stream"
import process from sseclient import SSEClient if __name__=="__main__": messages=SSEClient('https://api.particle.io/v1/devices/events?access_token=6a61e063f79781dddcc39729e77ed76696f23bfc') a=process.parse(messages.__iter__()) for item in a: print item
from sseclient import SSEClient if __name__ == "__main__": messages=SSEClient('https://api.particle.io/v1/devices/events?access_token=7883544edea996822936af401fad4209c2ba5627') for item in messages.__iter__(): print(item, flush=True)
async def internal_generator(): client = SSEClient(response.content) async for event in client.events(): yield json.loads(event.data)