def start_notify(self): response = self.request_get(self.urlNotification, stream=True) client = sseclient.SSEClient(response) for event in client.events(): notification = json.loads(event.data) value = notification.get('value', '') if (value.find("020101") != -1): mac = notification.get('id', '') if mac != '': heart_rate = int(value[6:], 16) return_obj = {'mac': mac, 'heart_rate': heart_rate} self.heart_rate_callback(return_obj)
def logs(self, follow=False) -> Iterable[Event]: if follow: url = f"{self._BASE_URL}/{self.name}/logs?follow=true" resp = self.cluster.request("GET", url, stream=True) else: url = f"{self._BASE_URL}/{self.name}/logs" resp = self.cluster.request("GET", url) handle_request_error( resp, f"Failed to retrieve logs for {self}. {resp.status_code} {resp.text}" ) return sseclient.SSEClient(resp).events()
def test_last_id_remembered(monkeypatch): content = 'data: message 1\nid: abcdef\n\ndata: message 2\n\n' fake_get = mock.Mock(return_value=FakeResponse(200, content)) monkeypatch.setattr(requests, 'get', fake_get) c = sseclient.SSEClient('http://blah.com') m1 = next(c) m2 = next(c) assert m1.id == 'abcdef' assert m2.id is None assert c.last_id == 'abcdef'
def subscribe(self): """ Subscribe to the Server Sent Event Stream :return: sseclient.SSEClient Object """ logger.debug('subscribe:') self.__sse_client = sseclient.SSEClient("{0}://{1}:{2}/api/updates/subscribe".format(self.__ap_scheme, self.__ap_server, self.__ap_port), session=self.__http_session, chunk_size=1, verify=self.__ap_verifyssl) return self.__sse_client
def getTargetData(): data = {} response = with_urllib3(url) # or with_requests(url) client = sseclient.SSEClient(response) for msg in client.events(): if msg.data != None: data = json.loads(msg.data)["data"]["diff"]["0"] else: data = None break client.close() return data
def expect_network_cmd(): url = 'https://api.particle.io/v1/devices/events?access_token='+particle_token response = with_urllib3(url) # or with_requests(url) client = sseclient.SSEClient(response) # acts like a while loop for event in client.events(): data = json.loads(event.data) # e.g data['data'] = 'heartbeat' # when event arrives fire Spanner.assertEqual Spanner.assertEqual("heartbeat", data['data']) sys.exit(0)
def facebookLiveChat(self): """"輸出 Facebook 的聊天內容。""" url = "https://streaming-graph.facebook.com/" + Config.FACEBOOK_LIVE_VIDEO_ID + "/live_comments?access_token=" + \ Config.FACEBOOK_ACCESS_TOKEN + \ "&comment_rate=one_per_two_seconds&fields=from{name,id},message" response = with_urllib3(url) client = sseclient.SSEClient(response) for event in client.events(): data = json.loads(event.data) ssePublish('facebook', data['from']['name'], data['message']) if Config.SPEECH_ACTIVE: voice.voice(data['message'])
def sse_stream(): while True: try: logging.info("Connecting to sse stream at %s", args.api_url) response = requests.get("{}/events/measurements".format( args.api_url), stream=True) client = sseclient.SSEClient(response) yield from client.events() except BaseException as e: logging.exception(e) time.sleep(1)
def __init__(self, config: LBClientConfig = None): if config == None: raise BadConfigurationException( "Message source must be init with a config") self._config = config headers = { 'X-API-Token': self._config.api_key, } self._client = sseclient.SSEClient(self._build_url(), chunk_size=64, headers=headers)
def watch_client(): self.stream_response = requests.get(self.url, params=param, stream=True) client = sseclient.SSEClient(self.stream_response) for event in client.events(): logger.info('%s: %s' % (event.event, event.data)) if self.event_index < len(expected_event_list): self.assertThat( event.event, Equals(expected_event_list[self.event_index])) self.event_index += 1 return
def get_events(self, killer): response = requests.get(url, stream=True) client = sseclient.SSEClient(response) events = open(filename, 'a+') for event in client.events(): if killer.kill_now: events.close() break print(json.loads(event.data)) json.dump(json.loads(event.data), events) events.write('\n')
def Subscribe(self, basestation): basestation_id = basestation.get('deviceId') def Register(self): if basestation_id in self.event_streams and self.event_streams[ basestation_id].connected: self.Notify( basestation, { "action": "set", "resource": "subscriptions/" + self.user_id + "_web", "publishResponse": False, "properties": { "devices": [basestation_id] } }) event = self.event_streams[basestation_id].Get(block=True, timeout=120) if event: self.event_streams[basestation_id].Register() return event def QueueEvents(self, event_stream): for event in event_stream: response = json.loads(event.data) if basestation_id in self.event_streams: if self.event_streams[basestation_id].connected: if response.get('action') == 'logout': self.event_streams[basestation_id].Disconnect() else: self.event_streams[basestation_id].queue.put( response) elif response.get('status') == 'connected': self.event_streams[basestation_id].Connect() if basestation_id not in self.event_streams or not self.event_streams[ basestation_id].connected: event_stream = sseclient.SSEClient( 'https://arlo.netgear.com/hmsweb/client/subscribe?token=' + self.headers['Authorization'], cookies=self.cookies) self.event_streams[basestation_id] = EventStream(QueueEvents, args=( self, event_stream, )) self.event_streams[basestation_id].Start() while not self.event_streams[basestation_id].connected: time.sleep(1) if not self.event_streams[basestation_id].registered: Register(self)
def __init__(self, msg_processor, params): """Starts the SSE subscription and registers for commands on RefreshItem. Expects the following params: - "URL": base URL of the openHAB instance NOTE: does not support TLS. - "RefreshItem": Name of the openHAB Item that, when it receives a command will cause sensor_reporter to publish the most recent states of all the sensors. - msg_processor: message handler for command to the RefreshItem """ super().__init__(msg_processor, params) self.log.info("Initializing openHAB REST Connection...") self.openhab_url = params("URL") self.refresh_item = params("RefreshItem") self.registered[self.refresh_item] = msg_processor # optional OpenHAB Verison and optional API-Token for connections with authentication try: self.OH_version = float(params("openHAB-Version")) except NoOptionError: self.log.info( "No openHAB-Version specified, falling back to version 2.0") self.OH_version = 2.0 if self.OH_version >= 3.0: try: self.api_token = params("API-Token") except NoOptionError: self.api_token = "" if not bool(self.api_token): self.log.info( "No API-Token specified, connecting to openHAB without authentication" ) # Subscribe to SSE events and start processing the events # if API-Token is provided and supported then include it in the request if self.OH_version >= 3.0 and bool(self.api_token): header = {'Authorization': 'Bearer ' + self.api_token} stream = requests.get("{}/rest/events".format(self.openhab_url), headers=header, stream=True) else: stream = requests.get("{}/rest/events".format(self.openhab_url), stream=True) self.client = sseclient.SSEClient(stream) self.thread = Thread(target=self._get_messages) self.thread.start() self.stop = False
def process_events(self, callback=None, filter_for_task_id=None, yield_cb=False): ''' processes all received streaming events in a blocking manner until the 'finish-task' event is reached, which marks the end of a build execution. An optional callback may be specified, which is called for each received event with the parsed event data (wrapped into a dictionary). @param callback: callable accepting exactly one positional argument ''' client = sseclient.SSEClient(self.response) should_stop = False # pylint: disable=no-member # events attrib is added by response for event in client.events(): if event is None or not event.data or len(event.data.strip()) == 0: return True parsed = json.loads(event.data) data = parsed.get('data') if not data: continue if filter_for_task_id: if data.get('origin') and data['origin'].get( 'id') == filter_for_task_id: matches_task_filter = True else: matches_task_filter = False else: matches_task_filter = True if matches_task_filter and parsed.get('event') == 'finish-task': should_stop = True # do not wait any longer as our task has finished if callback and matches_task_filter: result = callback(data) if result and yield_cb: yield result # if 'finish-task' event is reached, we always want to stop if not should_stop and data.get('event') == 'end': should_stop = True if should_stop: client.close() return True
def __matebot_watch_thread(self): self.matebot_text = '' while True: try: # peoples = os.popen("curl -s https://g0t001.uber.space/matebot/").read() url = 'https://g0t001.uber.space/matebot/stream' response = requests.get(url, stream=True) client = sseclient.SSEClient(response) for event in client.events(): peoples = json.loads(event.data) # peoples = json.loads(peoples) peoples = sorted(peoples, key=lambda x: int(x['konsumiert']), reverse=True) rank = 3 if (rank > len(peoples)): rank = len(peoples) leaderboard = {} i = 0 while (i < rank and len(peoples) > 0): dudes = [] dudes.append(peoples.pop(0)) while (len(peoples) > 0 and int(peoples[0]['konsumiert']) == int( dudes[0]['konsumiert'])): dudes.append(peoples.pop(0)) leaderboard[i] = dudes i += len(dudes) textArray = [] leadershipEmoji = {0: u'🥇', 1: u'🥈', 2: u'🥉'} for i in range(rank, -1, -1): if i in leadershipEmoji: emoji = leadershipEmoji[i] else: emoji = ''.join([u'💩' for x in range(2, i)]) if i in leaderboard: dudesText = ', '.join( list( map( lambda dude: "%s (%s)" % (dude['name'], dude['konsumiert']), leaderboard[i]))) textArray.append(emoji + ' ' + dudesText) textArray = textArray[::-1] self.matebot_text = ', '.join(textArray) except Exception as e: print >> sys.stderr, 'Mate:' + str(e) matebot_text = '' time.sleep(self.__cachedUntil)
def Subscribe(self, device_id, xcloud_id): def Register(self, device_id, xcloud_id): if device_id in self.event_streams and self.event_streams[ device_id].connected: self.Notify( device_id, xcloud_id, { "action": "set", "resource": "subscriptions/" + self.user_id + "_web", "publishResponse": "false", "properties": { "devices": [device_id] } }) event = self.event_streams[device_id].queue.get(block=True, timeout=1000) self.event_streams[device_id].Register() self.event_streams[device_id].queue.task_done() return event def QueueEvents(self, event_stream): for event in event_stream: response = json.loads(event.data) if device_id in self.event_streams and self.event_streams[ device_id].connected: if 'action' in response and response['action'] == 'logout': self.event_streams[device_id].Disconnect() else: message = json.loads(event.data) # Only queue messages that start with our transaction id prefix. if message['transId'].startswith(Arlo.TRANSID_PREFIX): self.event_streams[device_id].queue.put(message) elif 'status' in response and response['status'] == 'connected': self.event_streams[device_id].Connect() if device_id not in self.event_streams or not self.event_streams[ device_id].connected: event_stream = sseclient.SSEClient( 'https://arlo.netgear.com/hmsweb/client/subscribe?token=' + self.headers['Authorization'], cookies=self.cookies) self.event_streams[device_id] = EventStream(QueueEvents, args=( self, event_stream, )) while not self.event_streams[device_id].connected: time.sleep(1) if not self.event_streams[device_id].registered: Register(self, device_id, xcloud_id)
def get_output(self, assessment, options): """ To get output of a Assessment :param assessment: string """ responseFormat = None if options and 'format' in options and options['format'] is not None: responseFormat = options['format'] options['format'] = None url = '/assessment/' + str(assessment) + '/output' response = self.http.downstream(url, responseFormat) stream = sseclient.SSEClient(response) return stream
def __init__(self, binderURL, timeout=10, message='step'): response = with_urllib3(binderURL) # or with_requests(url) client = sseclient.SSEClient(response) for event in client.events(): msg = json.loads(event.data) if message=='full': pprint.pprint(msg) elif message=='step': if msg['message'].startswith(('Step', 'Built image', 'Launching server')): pprint.pprint(msg) pass resp = json.loads(event.data) self._binder = resp randsessionId = random.getrandbits(32) r = requests.post('{}api/kernels?{}'.format(resp['url'], randsessionId), headers={'Authorization': 'token {}'.format(resp['token'])}) wss = 'wss://{}api/kernels/{}/channels?token={}'.format(resp['url'].split('//')[1], r.json()['id'], resp['token']) self.kernel_url = wss print(self.kernel_url) #websocket.setdefaulttimeout(timeout) self._ws = websocket.WebSocket() self._ws.connect(wss) #The websocketApp connection appears to close? #self._ws = websocket.WebSocketApp(wss) #I think we need to create a heartbeat that pings the server every so often to keep the session alive #Will a simple websocket ping do that? #The following is DEFINED on .WebSocketApp but that route appears to break connections? #self._ws.run_forever(ping_interval=70, ping_timeout=10) #Do we also need a hearbeat sent to the Binder server / kernel so it knows our client it still alive? # initialize our list of messages self.shell_messages = [] self.iopub_messages = [] self.cnt = 0 self.keep_running = True #self.heart = threading.Thread(target=self.keep_alive) #self.heart.start() self.keep_alive()
def home(main_pid): log.logInfo('Initializing feature: home') log.logInfo('Connect address: {}'.format(config['instance']['address'])) # SSE status test try: requests.get( f'https://{config["instance"]["address"]}/api/v1/streaming/health' ).raise_for_status() except requests.exceptions.HTTPError as exc: log.logCritical('*Server-sent eventsが使えませんっ!ユウちゃん寝ますっ!') os.kill(main_pid, signal.SIGKILL) return try: res = mastodon.account_verify_credentials() log.logInfo('Fetched account: @{}'.format(res.acct)) while True: client = sseclient.SSEClient( requests.get( f'https://{config["instance"]["address"]}/api/v1/streaming/user?access_token={config["instance"]["access_token"]}', stream=True)) for event in client.events(): if event.event == "update": home_onUpdate(json.loads(event.data)) elif event.event == "notification": home_onNotification(json.loads(event.data)) elif event.event == "delete": pass else: log.logWarn(f"Unknown event: {event.event}") log.logErr('サーバーからの通信が切れましたっ!1分後にやり直しますっ!') time.sleep(60) except OperationalError as exc: log.logCritical('*データベースにアクセスできませんっ!ユウちゃん寝ますっ!') os.kill(main_pid, signal.SIGKILL) return except (requests.exceptions.ReadTimeout, requests.exceptions.ChunkedEncodingError, MastodonNetworkError, MastodonServerError, MastodonBadGatewayError): log.logErr('*ホームタイムラインが繋がんないみたいですっ・・・。1分後にやり直しますっ!') time.sleep(60) home(main_pid) except KeyboardInterrupt: pass except: KotohiraUtil.PANIC(sys.exc_info()) log.logErr('ホームタイムラインを十秒待って読み込みし直しますねっ!') time.sleep(10) home(main_pid)
def get_data_stream(token, api_endpoint): """ Start REST streaming device events given a Nest token. """ headers = { 'Authorization': "Bearer {0}".format(token), 'Accept': 'text/event-stream' } url = api_endpoint http = urllib3.PoolManager() # get response, handling redirects (307) if needed response = http.request('GET', url, headers=headers, preload_content=False, redirect=False) if (response.status == 307): redirect_url = response.headers.get("Location") response = http.request('GET', redirect_url, headers=headers, preload_content=False, redirect=False) if (response.status != 200): print("An error occurred! Response code is ", response.status) client = sseclient.SSEClient(response) for event in client.events(): # returns a generator event_type = event.event print("event: ", event_type) if event_type == 'open': # not always received here print("The event stream has been opened") elif event_type == 'put': print("The data has changed (or initial data sent") print("data: ", event.data) elif event_type == 'keep-alive': print( "No data updates. Receiving an HTTP header to keep the connection open." ) elif event_type == 'auth_revoked': print("The API authorization has been revoked.") print("revoked token: ", event.data) elif event_type == 'error': print("Error occurred, such as connection closed.") print("error message: ", event.data) else: print("Unknown event, no handler for it.") # get_data_stream(token, NEST_API_URL)
def run(): """Launch client.""" with requests.get(URL, stream=True) as response: client = sseclient.SSEClient(response) for event in client.events(): if event.event == "data": print("Data event received") data = json.loads(event.data) elif event.event == "patch": print("Patch event received") patch = jsonpatch.JsonPatch.from_string(event.data) patch.apply(data, in_place=True) else: print("Unhandled event received.") print_table(data)
def servico_de_notificacao(self): request_url: str = ClienteHttp.server_url + ClienteHttp.server_endpoints[ 'notificacao'] payload: dict = {'nomeDeUsuario': self.username} print("Iniciando servico de notificacao ") response = requests.get(request_url, params=payload, stream=True) if not (response.status_code == 200 or response.status_code == 201): raise ServerException() client = sseclient.SSEClient(response) for event in client.events(): if (event.data != "-1"): pprint.pprint(event.data) print("Encerrando servico de notificacao")
def _post(self, url, fields=None, body=None): r = self._http.request_encode_url('POST', url, fields=fields, body=body, preload_content=False) if r.status != 200: try: if r.headers['Content-Type'] == 'application/json': raise errors.SignalFlowException(**json.loads(r.read())) raise errors.SignalFlowException(r.status) finally: r.close() return sseclient.SSEClient(r)
def start(self): connect_url = self._get_url('connect') self.__response = iter(sseclient.SSEClient(connect_url, session=self._session)) self._session.get(self._get_url('start')) def _receive(): try: notification = next(self.__response) except StopIteration: return else: if notification.data != 'initialized': self._handle_notification(notification.data) return _receive
def __init__(self, event_handler, heartbeat_handler, args): self.connected = False self.registered = False self.queue = queue.Queue() self.heartbeat_stop_event = threading.Event() self.event_stream_stop_event = threading.Event() self.arlo = args[0] self.heartbeat_handler = heartbeat_handler try: event_stream = sseclient.SSEClient('https://arlo.netgear.com/hmsweb/client/subscribe?token='+self.arlo.request.session.headers.get('Authorization'), session=self.arlo.request.session) self.event_stream_thread = threading.Thread(name="EventStream", target=event_handler, args=(self.arlo, event_stream, self.event_stream_stop_event, )) self.event_stream_thread.setDaemon(True) except: pass
def run(self): # Open up a connection and stream data from a DT cloud sse_client = SB_SSEClient() response = sse_client.getResponse(self.sensor_filter) client = sseclient.SSEClient(response) # Producer thread. Stream data and insert into queue producer_thread = threading.Thread(target=self.stream_data, args=(client, )) producer_thread.start() # Perform use-case logic self.surveil_sensorstate() producer_thread.join()
def start_event(self): response = self.request_get(self.urlScanDevices, stream=True) client = sseclient.SSEClient(response) for event in client.events(): scan_time = time.time() device_info_json = json.loads(event.data) name = device_info_json.get("name", "") if name == self.wristBandName: mac = device_info_json.get("bdaddrs", "")[0]['bdaddr'] device = {'name': name, "scan_time": scan_time} if mac not in self.scanned_device_list: self.scanned_device_list[mac] = device self.serial += 1 else: self.scanned_device_list[mac] = device
def test_event_stream(): """Check whether event.data can be loaded.""" limit = 50 url = 'https://stream.wikimedia.org/v2/stream/recentchange' source = sseclient.SSEClient(url) for n, event in enumerate(source, start=1): if event.event != 'message' or not event.data: continue try: element = json.loads(event.data) except ValueError as e: source.resp.close() raise e if n == limit: break assert True
def run(self): # Open up a connection and stream data from a DT cloud sse_client = SB_SSEClient() response = sse_client.getResponse(self.sensor_filter) client = sseclient.SSEClient(response) # Producer thread. Stream data and insert into queue producer_thread = threading.Thread(target=self.stream_data, args=(client, )) producer_thread.start() allowed_days, allowed_hours = self.load_other_config_parameters() self.surveil_door_state(allowed_days, allowed_hours) producer_thread.join()
def _get_event_stream(self): """ Gets the event stream by making a GET request to Marathon /events endpoint """ events_url = self._marathon.get_url('service/marathon/v2/events') messages = sseclient.SSEClient(events_url) for msg in messages: if self.stopped: break try: json_data = json.loads(msg.data) except ValueError: logging.debug('Failed to parse event: %s', msg.data) continue event = MarathonEvent(json_data) yield event