def asJson(self,result): '''convert the given result to JSON ''' events=[] for title in result: events.extend(title.events) jsonResult={"count": len(events), "events": events} jsons.suppress_warnings() jsonText=jsons.dumps(jsonResult,indent=4,sort_keys=True) return jsonText
def __init__(self): self.polling_time = 1 self.save_interval = 300 self.tracked_applications = Tracker.get_tracked_applications() pid = os.getpid() Tracker.save_pid(pid) jsons.suppress_warnings(True) atexit.register(self.handle_exit) self.kill_now = False signal.signal(signal.SIGTERM, self.handle_exit) signal.signal(signal.SIGINT, self.handle_exit) self.tracking_loop()
def test_suppress_warnings(self): fork_inst = jsons.fork() jsons.suppress_warnings(True, fork_inst) with warnings.catch_warnings(record=True) as w: fork_inst._warn('Some warning', 'some-warning') self.assertEqual(0, len(w)) jsons.suppress_warnings(False, fork_inst) with warnings.catch_warnings(record=True) as w: fork_inst._warn('Some warning', 'some-warning') self.assertEqual(1, len(w))
def to_json(self, strip_privates: bool = True) -> str: """ Serialize to json :param strip_privates: strip private variables :return: the json representation of this object """ set_serializers() suppress_warnings() try: return cast( str, dumps(self, strip_privates=strip_privates, strip_nulls=True)).replace(FROM_JSON_FILE, "") except JsonsError: return JSON_PARSE_ERROR
def write_json(object_to_write: object, strip_privates: bool = True) -> str: """ Serialize to json string :param object_to_write: object to write to json :param strip_privates: strip private variables :return: the json string representation of this object """ set_serializers() suppress_warnings() try: json_object = write_json_object(object_to_write, strip_privates) json_string = cast( str, dumps(json_object, strip_privates=strip_privates, strip_nulls=True)) return json_string except JsonsError: return JSON_PARSE_ERROR
def write_json_object(object_to_write: object, strip_privates: bool = True) -> object: """ Serialize to json object :param object_to_write: object to write to json :param strip_privates: strip private variables :return: the json representation of this object """ set_serializers() suppress_warnings() try: json_object = dump(object_to_write, strip_privates=strip_privates, strip_nulls=True) for key in KEYS_TO_REMOVE: _remove_key(json_object, key) return json_object except JsonsError: return JSON_PARSE_ERROR
def test_dump_datetime_without_tz(self): # By default, naive datetimes trigger a warning. with warnings.catch_warnings(record=True) as w: jsons.dump(datetime.datetime.now()) self.assertEqual(1, len(w)) self.assertTrue(issubclass(w[0].category, UserWarning)) jsons.suppress_warnings() # Warnings are now suppressed. with warnings.catch_warnings(record=True) as w: jsons.dump(datetime.datetime.now()) self.assertEqual(0, len(w)) jsons.suppress_warnings(False) # The warnings are back on now. with warnings.catch_warnings(record=True) as w: jsons.dump(datetime.datetime.now()) self.assertEqual(1, len(w)) self.assertTrue(issubclass(w[0].category, UserWarning))
from confluent_kafka import Producer import time import requests import json import jsons jsons.suppress_warnings() import pandas as pd import sys from datetime import datetime from LambdArchitecture_OpenWeather.properties import PROJ_DIR, TTL, TOPIC appId = sys.argv[1] INDEX = 0 def delivery_report(err, msg): """ Called once for each message produced to indicate delivery result. Triggered by poll() or flush(). """ if err is not None: print('Message delivery failed: {}'.format(err)) else: print('Message delivered to {} [{}]'.format(msg.topic(), msg.partition())) def toCelsius(temp): return float(temp) - 273.15 def conv_date(date): return datetime.fromtimestamp(date)
def main(argv): if len(sys.argv) < 2: print("pass filename") sys.exit(2) print("loading " + argv[0]) texts = loadFile(argv[0]) # debug # texts = ['Rami Eid is studying at Stony Brook University in New York.', # 'Blounts Creek is a small unincorporated rural community in Beaufort County, North Carolina, United States, near a creek with the same name.'] # task 1 #task1(texts[0]) nlp = spacy.load("en_core_web_sm") for idx, doc in enumerate(nlp.pipe(texts, disable=["tagger", "parser"])): print("Named Entities:", [(ent.text, ent.label_) for ent in doc.ents]) # Represent entity graph as dictionary: <Entity name, Node> nodes = buildEntityGraph(doc, texts[idx]) # verifying graph print("Graph:") printGraph(nodes) # Find maximal cliques and clique weights print("BRON-KERBOSCH") sys.setrecursionlimit(2000) cliques = bron_kerbosch(list(nodes.values())) print("cliques:", cliques) # if the clique contains certain types of relations, then we fill them into the complex relation / template workTemplates = [] partTemplates = [] for clique in cliques: for node in clique: for edge in node.weightedEdges: if edge.dst in clique: tryAddWorkTemplate(edge, workTemplates) tryAddPartTemplate(edge, partTemplates) #tryAddBuyTemplate(edge, partTemplates) # verifying template filling for work in workTemplates: print('Work:', work.person, work.org, work.title, work.location, sep=', ') for part in partTemplates: print(part.part, part.whole, sep=' part of ') # writing templates to json output out = [] for template in workTemplates: arguments = {} arguments['1'] = template.person or "" arguments['2'] = template.org or "" arguments['3'] = template.title or "" arguments['4'] = template.location or "" extraction = Extraction('WORK', [token.text for token in doc], arguments) output = Output(argv[0], extraction) out.append(output) for template in partTemplates: arguments = {} arguments['1'] = template.part or "" arguments['2'] = template.whole or "" extraction = Extraction('PART', [token.text for token in doc], arguments) output = Output(argv[0], extraction) out.append(output) # Write new relations to data file jsons.suppress_warnings() for output in out: with open(str(argv[0])[:-4] + '.json', 'a') as the_file: the_file.write(json.dumps(jsons.dump(output)) + '\n')