async def main(): async for event in aiosseclient( 'https://stream.wikimedia.org/v2/stream/recentchange'): packet = json.loads(event.data) if packet['wiki'] == 'enwiki' and not packet['bot']: out = json.dumps(packet, indent=4, sort_keys=True) print(out)
async def main(): print("Running main") if config.import_mode: finish_all_in_list() print("Looking for new DOIs from the Event stream") mediawikiapi = MediaWikiAPI() count = 0 count_dois_found = 0 count_missing_dois = 0 async for event in aiosseclient( 'https://stream.wikimedia.org/v2/stream/recentchange', ): # print(event) data = json.loads(str(event)) # print(data) meta = data["meta"] # what is the difference? server_name = data['server_name'] namespace = int(data['namespace']) language_code = server_name.replace(".wikipedia.org", "") # for exclude in excluded_wikis: # if language_code == exclude: if language_code != "en": continue if server_name.find("wikipedia") != -1 and namespace == 0: title = data['title'] if data['bot'] is True: bot = "(bot)" else: bot = "(!bot)" if data['type'] == "new": type = "(new)" elif data['type'] == "edit": type = "(edit)" else: type = None if type is not None: print(f"{type}\t{server_name}\t{bot}\t\"{title}\"") print(f"http://{server_name}/wiki/{quote(title)}") dois_count_tuple = process_event( mediawikiapi, language_code=language_code, title=title, ) if dois_count_tuple[0] > 0: count_dois_found += dois_count_tuple[0] if dois_count_tuple[1] > 0: count_missing_dois += dois_count_tuple[1] count += 1 print( f"Processed {count} events and found {count_dois_found}" + f" DOIs where {count_missing_dois} were missing in WD.") if config.max_events > 0 and count == config.max_events: exit(0)
async def read_stream(session): try: async for event in aiosseclient('https://stream.wikimedia.org/v2/stream/recentchange'): d = json.loads(event.data) w = d['wiki'] if 'revision' in d: _id = d['revision']['old'] else: continue if _id is None : continue if w not in wikis: wikis [w] = {'min': _id, 'max' : _id , 'count': 1} if wikis [w]['min'] > _id : wikis [w]['min']= _id #if wikis [w]['max'] < _id : # wikis [w]['max']= _id wikis [w]['count']= wikis [w]['count'] +1 #pprint.pprint([w,wikis [w]]) #pprint.pprint(d) server_script_path = d['server_script_path'] server_url = d['server_url'] url = server_url + server_script_path + '/index.php?oldid='+ str(_id) + '&action=raw' status = await fetch(session, d, url) global count count = count + 1 if count % 1000 == 0: print (".") except Exception as e: print (e) return await read_stream(session)
async def read_via_restconf(hostname, queue): print(f'Handling {hostname}') url = urllib.parse.urlunparse( ('http', hostname, '/telemetry/optics', None, None, None)) while True: last_spectrum = None try: async for block in aiosseclient(url): ds = json.loads(block)['ietf-restconf:notification'][ 'ietf-yang-push:push-update']['datastore-contents'] buf = extract_data(ds, hostname) spectrum = ds.get('czechlight-roadm-device:spectrum-scan', None) if spectrum is not None and last_spectrum != ds[ 'czechlight-roadm-device:spectrum-scan']: extract_spectrum(buf, ds, hostname) last_spectrum = ds['czechlight-roadm-device:spectrum-scan'] if len(buf): print(f'{hostname} -> {len(buf)}') await queue.put('\n'.join(buf)) except Exception as e: print(hostname, e) await asyncio.sleep(1)
async def extract(url): async for event in aiosseclient(url): if event.event == 'message': transform(event)
async def main(self): async for event in aiosseclient(constants.WIKI_URL): if event.event == "message": logger.info("Posted raw message") self.producer.send(constants.RAW_CHANNEL, value=event.data)
async def main(): async for event in aiosseclient('https://stream.wikimedia.org/v2/stream/recentchange'): packet = json.loads(event.data) if packet['wiki'] == 'enwiki' and not packet['bot']: out = json.dumps(packet, indent=4, sort_keys=True) print(out)
async def _fastapi_client(): async for event in aiosseclient("http://localhost:8000/stream", timeout=None): print(f"Event: {event.event}") print(f"Data: {event.data}")