class JSONService: def __init__(self, url, handler=None): self.parser = JSONParser() self.url = url self.handler = handler def callMethod(self, method, params, handler=None): if handler == None: handler = self.handler if handler == None: return self.__sendNotify(method, params) else: return self.__sendRequest(method, params, handler) def onCompletion(self): pass def __sendNotify(self, method, params): msg = {"id": None, "method": method, "params": params} msg_data = self.parser.encode(msg) if not HTTPRequest().asyncPost(self.url, msg_data, self): return -1 return 1 def __sendRequest(self, method, params, handler): id = pygwt.getNextHashId() msg = {"id": id, "method": method, "params": params} msg_data = self.parser.encode(msg) request_info = JSONRequestInfo(id, method, handler) if not HTTPRequest().asyncPost(self.url, msg_data, JSONResponseTextHandler(request_info)): return -1 return id
def test_if_can_get_a_dict(self): path = './JSONParser/nested.json' json_parser = JSONParser(path) json_parser.parse() dict_retrieved = json_parser.get_dict('breakhis_label_to_integer') item = dict_retrieved['benign_tubular_adenoma'] self.assertEqual(item, 4, "The item has been retrieved succefully!")
def test_if_item_can_be_retrieved(self): path = './JSONParser/test.json' json_parser = JSONParser(path) json_parser.parse() item = json_parser.get_item('parent', 'child') self.assertEqual(item, 'item', "The item has been retrieved succefully!")
def traducir(json_text): input = InputStream(json_text) lexer = JSONLexer(input) stream = CommonTokenStream(lexer) parser = JSONParser(stream) parser._listeners = [MyErrorListener()] tree = parser.json()
def main(): lexer = JSONLexer(InputStream(sys.stdin.read())) stream = CommonTokenStream(lexer) parser = JSONParser(stream) tree = parser.json() visitor = CustomVisitor() visitor.visit(tree)
def parse_result_json(input_magazines, complete_list_magazines, keywords=None, keyWordsFreqs=None): parser = JSONParser() return parser.parse(input_magazines, complete_list_magazines, keywords, keyWordsFreqs)
class JSONService: def __init__(self, url, handler = None): self.parser = JSONParser() self.url = url self.handler = handler def callMethod(self, method, params, handler = None): if handler == None: handler = self.handler if handler == None: return self.__sendNotify(method, params) else: return self.__sendRequest(method, params, handler) def onCompletion(self): pass def __sendNotify(self, method, params): msg = {"id":None, "method":method, "params":params} msg_data = self.parser.encode(msg) if not HTTPRequest().asyncPost(self.url, msg_data, self): return -1 return 1 def __sendRequest(self, method, params, handler): id = pygwt.getNextHashId() msg = {"id":id, "method":method, "params":params} msg_data = self.parser.encode(msg) request_info = JSONRequestInfo(id, method, handler) if not HTTPRequest().asyncPost(self.url, msg_data, JSONResponseTextHandler(request_info)): return -1 return id
def test_if_can_work_with_nested_dict(self): path = './JSONParser/nested.json' json_parser = JSONParser(path) json_parser.parse() item = json_parser.get_item('breakhis_label_to_integer', 'benign_tubular_adenoma') self.assertEqual(item, 4, "The item has been retrieved succefully!")
def test_if_json_is_loaded(self): path = './JSONParser/test.json' json_parser = JSONParser(path) json_parser.parse() setting = json_parser.get_setting() self.assertIsNotNone( setting, "There is a json file and \ have been loaded")
def parse_result_json(self, input_magazines, complete_list_magazines, targetUri, arpaName, stopwordlist, keywords=None, keyWordsFreqs=None): parser = JSONParser() print("ANNO: JSON PARSERIIN") print(stopwordlist) return parser.parse(input_magazines, complete_list_magazines, targetUri, arpaName, stopwordlist, keywords, keyWordsFreqs)
def main(args): input_stream = FileStream(args.filename) lexer = JSONLexer(input_stream) token_stream = CommonTokenStream(lexer) parser = JSONParser(token_stream) tree = parser.json() emiter = XmlEmitter if args.iterative: emiter = MyXmlEmitter xml = emiter() walker = ParseTreeWalker() walker.walk(xml, tree) print(xml.result())
def get_parser(config, text_format=None, **kwargs): column_paths = list() for index in config["fieldConfig"]: if "path" in config["fieldConfig"][index]: column_paths.insert(int(index), config["fieldConfig"][index]["path"]) parser = JSONParser(column_paths, **kwargs) return parser
class JSONService: def __init__(self, url, handler = None): """ Create a JSON remote service object. The url is the URL that will receive POST data with the JSON request. See the JSON-RPC spec for more information. The handler object should implement onRemoteResponse(value, requestInfo) to accept the return value of the remote method, and onRemoteError(code, message, requestInfo) to handle errors. """ self.parser = JSONParser() self.url = url self.handler = handler def callMethod(self, method, params, handler = None): if handler == None: handler = self.handler if handler == None: return self.__sendNotify(method, params) else: return self.__sendRequest(method, params, handler) def onCompletion(self): pass def __sendNotify(self, method, params): msg = {"id":None, "method":method, "params":params} msg_data = self.parser.encode(msg) if not HTTPRequest().asyncPost(self.url, msg_data, self): return -1 return 1 def __sendRequest(self, method, params, handler): id = pygwt.getNextHashId() msg = {"id":id, "method":method, "params":params} msg_data = self.parser.encode(msg) request_info = JSONRequestInfo(id, method, handler) if not HTTPRequest().asyncPost(self.url, msg_data, JSONResponseTextHandler(request_info)): return -1 return id
def __init__(self, url, handler = None): """ Create a JSON remote service object. The url is the URL that will receive POST data with the JSON request. See the JSON-RPC spec for more information. The handler object should implement onRemoteResponse(value, requestInfo) to accept the return value of the remote method, and onRemoteError(code, message, requestInfo) to handle errors. """ self.parser = JSONParser() self.url = url self.handler = handler
def onCompletion(self, json_str): response = JSONParser().decodeAsObject(json_str) if not response: self.request.handler.onRemoteError( 0, "Server Error or Invalid Response", self.request) elif response["error"]: error = response["error"] self.request.handler.onRemoteError(error["code"], error["message"], self.request) else: self.request.handler.onRemoteResponse(response["result"], self.request)
def draw(json_parser, if_print): palette = json_parser.get_palette() screen = json_parser.get_screen() shapes = json_parser.get_shapes() fig = plt.figure(figsize=(screen["width"], screen["height"]), dpi=1) fig.artists.append(plt.Rectangle((0, 0), screen["width"], screen["height"], facecolor=JSONParser.return_color(palette, screen["bg_color"]), zorder=0)) for i in shapes: i.add_shape(fig) if if_print == 1: fig.savefig(argv[3]) plt.close(fig) else: plt.show()
def main(): if_to_print = 0 if not exists(str(argv[1])): print("Given file doesnt exist.") return 1 if len(argv) == 4 and argv[2] == "-o": if_to_print = 1 elif len(argv) != 2: print("Bad amount of arguments.") return 1 file = open(str(argv[1]), "r") all_lines = "" for i in file: all_lines += i try: json_my = JSONParser(loads(all_lines)) except ValueError: print("Bad JSON", exc_info()) return 1 draw(json_my, if_to_print)
def main(): folderIO = FolderIO() files = folderIO.get_files("D:/DLSU/Masters/MS Thesis/data-2016/02/", False, ".json") print("Found {} files.".format(len(files))) file_stats = open('results_stats.txt', 'a') file_summary = open('results_summary.txt', 'a') file_full = open('results_full.txt', 'a') file_frequency = open('results_frequency.txt', 'a') max_count = 0 max_tweet_id = None max_node = None json_parser = JSONParser() for file in files: print("\nProcessing {}".format(file)) # Append date-time to the result files file_stats.write('\n{}-{}\n'.format(datetime.datetime.now(), file.name)) file_summary.write('\n{}-{}\n'.format(datetime.datetime.now(), file.name)) file_full.write('\n{}-{}\n'.format(datetime.datetime.now(), file.name)) file_frequency.write('\n{}-{}\n'.format(datetime.datetime.now(), file.name)) thread_length_freq = {} processed_tweet_ids = set() tweet_helper = TweetHelper() api = tweet_helper.api lines_processed = 0 tweets_processed = 0 for tweet_json in json_parser.parse_file_into_json_generator(file): curr_tweet = tweet_helper.retrieve_tweet(tweet_json["id"]) lines_processed += 1 if curr_tweet is not None and curr_tweet.id not in processed_tweet_ids: processed_tweet_ids.add(curr_tweet.id) curr_reply_thread = tweet_helper.list_reply_ancestors(curr_tweet) curr_reply_thread_count = len(curr_reply_thread) thread_length_freq[curr_reply_thread_count] = thread_length_freq.get(curr_reply_thread_count, 0) + 1 if curr_reply_thread_count > max_count: max_count = curr_reply_thread_count max_tweet_id = curr_tweet.id file_summary.write("{}:\n{}\n\n".format(max_count, "\n".join(str(reply.id) for reply in curr_reply_thread))) file_summary.flush() if curr_reply_thread_count >= 3: file_full.write("{}:\n{}\n\n".format(curr_reply_thread_count, "\n".join(str(("@"+reply.user.screen_name + ": "+str(reply.text)).encode("utf-8"))+"\n"+str(reply.id)+"\n" for reply in curr_reply_thread))) file_full.flush() tweets_processed += 1 # Unused code for constructing reply tree # curr_reply_thread_tree = tweet_helper.construct_reply_thread(curr_tweet) # curr_reply_thread_count = count_nodes(curr_reply_thread_tree) # print("{} with {} nodes\n".format(curr_reply_thread_tree.data.id, curr_reply_thread_count)) # print("{}\n".format(curr_reply_thread_tree.__str__())) # if curr_reply_thread_count > max_count: # max_count = curr_reply_thread_count # max_node = curr_reply_thread_tree # max_tweet_id = max_node.data.id # file_summary.write("{} with {} nodes\n".format(max_tweet_id, max_count)) # file_full.write("{}\n".format(max_node.__str__())) # print("{} with {} nodes\n".format(max_tweet_id, max_count)) # print("{}\n".format(max_node.__str__())) # Write reply thread length frequency counts to the results_frequency file if lines_processed % 10 == 0: print("Processed {} lines now with {} tweets".format(lines_processed, tweets_processed)) file_stats.write('{} lines with {} successfully processed tweets\n'.format(lines_processed, tweets_processed)) file_stats.flush() for count, frequency in sorted(thread_length_freq.items()): file_frequency.write('{} - {}\n'.format(count, frequency)) file_frequency.flush()
def __init__(self, split_ratio, dataframe): self.test_prop, self.train_prop, self.validation_prop = split_ratio self.dataframe = dataframe self.json_parser = JSONParser.JSONParser('setting.json') self.json_parser.parse() self.label_to_int_dict = self.json_parser.get_dict('breakhis_label_to_integer')
def __init__(self, url, handler=None): self.parser = JSONParser() self.url = url self.handler = handler
def test_JSONParser_constructible(self): path = 'c:/dummy_path/' json_parser = JSONParser(path) self.assertIsNotNone(json_parser)
self.setXML(ctx, self.getXML(ctx.array())) def exitEmptyArray(self, ctx: JSONParser.EmptyArrayContext): self.setXML(ctx, "") def exitJson(self, ctx: JSONParser.JsonContext): self.setXML(ctx, self.getXML(ctx.getChild(0))) if __name__ == '__main__': if len(sys.argv) > 1: input_stream = FileStream(sys.argv[1]) else: input_stream = InputStream(sys.stdin.read()) lexer = JSONLexer(input_stream) token_stream = CommonTokenStream(lexer) parser = JSONParser(token_stream) tree = parser.json() lisp_tree_str = tree.toStringTree(recog=parser) print(lisp_tree_str) # listener print("Start Walking...") listener = XmlEmitter() walker = ParseTreeWalker() walker.walk(listener, tree) print(listener.getXML(tree))
def __init__(self, url, handler = None): self.parser = JSONParser() self.url = url self.handler = handler
('{"a":True}', 1), ('{"a":Null}', 1), ('{"a":foobar}', 2), ("{'a':1}", 3), ('{1:1}', 2), ('{true:1}', 2), ('{"a":{}', 2), ('{"a":-}', 1), ('{"a":[,]}', 2), ('{"a":.1}', 1), ('{"a":+123}', 2), ('{"a":"""}', 1), ('{"a":"\\"}', 1), ] a1 = JSONParser() a2 = JSONParser() a3 = JSONParser() total = 0 expect = 0 errors = [] for s, score in chain(json_ok, json_ex): expect += score try: dst = json.loads(s) except Exception: dst = Exception try: a1.loads(s)