def test_parse_index_row_returns_dict(self): p = Parser() elem = make_tree(SAMPLE_ROW) result = p.parse_index_row(elem) self.assertEqual(type(result), dict)
def test_add_rule(self): fc = RegexCriterion(r"^[-+]?\d*\.\d+$") fc2 = RegexCriterion(r"^[-+]?\d*\.\d+$") rule = ParserRule(criteria=[fc, fc2], allow_partial=False) parser = Parser(rules=[rule, rule]) parser.add_rule(rule) assert len(parser.rules) == 3
def test_numbers(self): tokens = [Token(TokenType.INT, 4)] node = Parser(tokens).parse() self.assertEqual(node, IntNode(4)) tokens = [Token(TokenType.FLOAT, 3.0)] node = Parser(tokens).parse() self.assertEqual(node, FloatNode(3.0))
def handle(app_config): slug = request.args.get(':') if not slug: return '<meta http-equiv="refresh" content="2; url=?:=home"> redirect' if not is_valid_slug(slug): return 'Invalid page ID' with DbTree(app_config['db_uri']) as db: page_info = db.get_page_info(slug) tree_html = compile_tree(page_info.tree) parser = Parser(slug) ast_list = [parser.parse_string(string, content_id) for string, content_id in page_info.content_pair_iter()] with DbTitle(app_config['db_uri']) as db: db.put_titles_in(parser.acc) notes_html_list = (compile_notes(cons, parser.acc) for cons in ast_list) # use the "directory" part only page_info.path.pop() return render_template( 'view.html', title=page_info.title, nav_edit=url_for('edit') + slug_to_link(slug), unlisted=page_info.unlisted, directory_of_page=page_info.path, prev_article=page_info.prev, next_article=page_info.next, mtime_str=page_info.mtime_str, sidebar_html=tree_html, notes_html_list=notes_html_list )
def main(): print_menu() cmd = input(">") if cmd == "1": parser = Parser("gr1.txt") seq = read_seq("seq.txt") parser.create_the_nightmare_table() parsing = parser.parse(seq, ["eps", "S"]) if len(parsing) == 0: print("amu ii bai") elif type(parsing[0]) == type("mno"): print("amu ii bai si incepe la tokenul:") print(parsing.reverse()) else: tree = ParseTree(parser.ll1, parsing, parser.productions) print(str(tree)) if cmd == "2": parser = Parser("grammar-ioana.txt") out = read_pif("PIFU_BUN2.txt") # print(out) parser.create_the_nightmare_table() parsing = parser.parse(out, ["eps", "START"]) print(parsing) if len(parsing) == 0: print("amu ii bai") elif type(parsing[0]) == type("mno"): print("amu ii bai si incepe la tokenul:") parsing.reverse() print(parsing) else: tree = ParseTree(parser.ll1, parsing, parser.productions) print(str(tree))
def _parse_arguments(self, args): try: hours = Parser.hours(args[0]) date = Parser.date(args[1]) project = Parser.project(args[2]) return hours, date, project except parsing.BadFormatException as e: raise BadArgumentException(e.message)
def test_reparsing(self): result = Tokenizer.tokenize('+2+2') result = Parser(result).parse() expected = result result = str(result) result = Tokenizer.tokenize(result) result = Parser(result).parse() self.assertEqual(str(result), str(expected))
def test_parse_index_table_returns_all_rows(self): row = '<tr class="tCenter hl-tr"></tr>' html = '<table id="tor-tbl">' + row * 10 + '</table>' p = Parser() rows = p.parse_index_table(html) self.assertEqual(len(rows), 10)
def test_5(): try: regexp = "01('=34567)" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() except ParseError: print("test 5: Incorrect regexp")
def test_transformation_failure(self): start = Parser(Tokenizer.tokenize('x+y')).parse() end = start transformation = ExpressionSubstitution(start, end) instantiated_start = Parser(Tokenizer.tokenize('a + b')).parse() pattern = SubstitutionPattern({'x': 'xyz'}) self.assertRaises(Exception, transformation.transform, [instantiated_start, pattern])
def test_equation_cancellation_is_applicable(self): lhs = Parser(Tokenizer.tokenize('x + 4')).parse() rhs = Parser(Tokenizer.tokenize('y')).parse() equation = Equation(lhs, rhs) addition_cancellation = EquationCancellation(OperationType.PLUS(), OperationType.MINUS()) self.assertTrue(addition_cancellation.is_applicable_to(equation)) flipped = equation.flip() self.assertFalse(addition_cancellation.is_applicable_to(flipped))
def process_event(self, event: dict, active_ids: list): """Process an event to check for shield power pool usage""" ctg = Parser.get_event_category(event, active_ids) if event["self"] is True and ctg == "engine": self._internal_q.put(("Engine", event["time"])) return if event["self"] is True or Parser.compare_ids(event["target"], active_ids) is False: return if "Damage" not in event["effect"]: return # event: Damage dealt to self self._internal_q.put(("Shield", event["time"]))
def test_equation_cancellation_with_negative(self): lhs = Parser(Tokenizer.tokenize('x + -4')).parse() rhs = Parser(Tokenizer.tokenize('y')).parse() equation = Equation(lhs, rhs) addition_cancellation = EquationCancellation(OperationType.PLUS(), OperationType.MINUS()) self.assertTrue(addition_cancellation.is_applicable_to(equation)) result = addition_cancellation.apply(equation) verify(str(result), self.reporter)
def calcul(eq, var_list): try: eq = eq.replace(" ", "") eq_list = tokenize(eq, var_list) E = Parser(eq_list, var_list) ans = E.eval() return ans except Exception as e: return e
def test_equation_cancellation(self): lhs = Parser(Tokenizer.tokenize('x * 4')).parse() rhs = Parser(Tokenizer.tokenize('y')).parse() equation = Equation(lhs, rhs) multiplication_cancellation = EquationCancellation( OperationType.TIMES(), OperationType.DIVIDE()) self.assertTrue(multiplication_cancellation.is_applicable_to(equation)) result = multiplication_cancellation.apply(equation) verify(str(result), self.reporter)
def send_recent_files(self, window: tk.Tk): """Send only the files of today""" files = list(Parser.gsf_combatlogs()) result = list() for file in files: file_date = Parser.parse_filename(file) if file_date is None: continue file_date = file_date.date() if file_date == datetime.now().date(): result.append(file) self.send_files(window, result)
def test_6(): regexp = "(z*)" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("a") == False assert my_dfa.walk_dfa("4") == False assert my_dfa.walk_dfa("26546") == False assert my_dfa.walk_dfa("Owskemg") == False
def test_identity_transformation(self): start = Parser(Tokenizer.tokenize('x')).parse() end = start transformation = ExpressionSubstitution(start, end) instantiated_start = Parser(Tokenizer.tokenize('abc')).parse() pattern = SubstitutionPattern({'x': 'abc'}) verify( '{} -> {}'.format( 'abc', transformation.transform(instantiated_start, pattern)), self.reporter)
def test_4(): regexp = "ytut(a|g)*lk" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() #print(my_parser.show_token_list()) nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("a") == False assert my_dfa.walk_dfa("ytu") == False assert my_dfa.walk_dfa("ytutalk") == True
def test_all_equation_substitutions_addition_by_same(self): equation = Parser.parse_equation('3.0 = (x)+(x)') substitution = ExpressionSubstitution(Parser.parse_expression('a + a'), Parser.parse_expression('2*a')) transformation = Transformation.apply_all_substitution_transformations( substitution) result = transformation.apply(equation) verify(str(result), self.reporter)
def test_all_equation_substitutions_simple(self): equation = Parser.parse_equation('y = (x)-(x)') substitution = ExpressionSubstitution(Parser.parse_expression('a - a'), Parser.parse_expression('0')) transformation = Transformation.apply_all_substitution_transformations( substitution) result = transformation.apply(equation) verify(str(result), self.reporter)
def test_1(): regexp = "a|b*" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("ab") == False assert my_dfa.walk_dfa("a") == True assert my_dfa.walk_dfa("aa") == False assert my_dfa.walk_dfa("ba") == False assert my_dfa.walk_dfa("bbbb") == True
def test_3(): regexp = "(cd*|bha)*jk" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("k") == False assert my_dfa.walk_dfa("mf") == False assert my_dfa.walk_dfa("jk") == True assert my_dfa.walk_dfa("e") == False assert my_dfa.walk_dfa("t") == False
def run(code): scanner = Scanner(code) tokens = scanner.scan_tokens() if error.had_error: return parser = Parser(tokens) stmts = parser.parse() if error.had_error: return res = resolver.Resolver(interp) res.resolve(stmts) if error.had_error: return interp.interpret(stmts)
def getAllRSS(): resp = getAllDB() datas = newFetcher.fetchAll(resp) rssList = [] for data in datas: rss = Parser.convertFromXML(data['xml']) if rss != False: d = { 'cid': data['cid'], 'url': data['url'], 'title': rss.title, 'link': rss.link, 'description': rss.desc, 'itemList': rss.itemList } rssList.append(d) r = {'success': True, 'data': rssList} resp = Response(json.dumps(r)) resp.headers['Content-Type'] = 'application/json' resp.headers['Access-Control-Allow-Origin'] = '*' #! CORS resp.headers['Access-Control-Allow-Methods'] = 'GET' resp.headers[ 'Access-Control-Allow-Headers'] = 'Origin, X-Requested-With, Content-Type, Accept' return resp
def do_get(self, arg): """ Get an entry value and metadata from the current directory: - `get entry1`: get entry1 value """ entry_name = arg if self.path == '': path = self.dir_name else: path = f'{self.path}/{self.dir_name}' r = self.caller.get_entry(path, entry_name) if r.status_code != 200: self.logger.error(r.json()['message']) return entry = Entry(r.json()['body']) if entry.created_with_cli: ciphered_nonce = entry.value plain_value = Parser.decipher_decode(ciphered_nonce, self.p_key_derived) Printer.print_entry_encrypted(entry, plain_value) else: self.logger.warning( 'this entry has not been created with this CLI, the value is transferred to the ' 'remote server in plain text') Printer.print_entry(entry)
def webapp_login(): global caller # logging in if request.method == 'POST': host_param = request.form['host'] try: if not Caller.check_up(host_param): return render_template("login.html", msg='host is down', host=host_param) private_key_df = request.form['private_key'] private_key_df_derived = Parser.derive(private_key_df) public_key_df = gen_public_key(private_key_df_derived) caller = Caller(host_param, public_key_df) return redirect(url_for('webapp.webapp_main', path='', dirname='')) except TypeError as te: return render_template("login.html", msg=str(te), host=host_param) elif request.method == 'GET': if caller is not None: if caller.get_session().status_code == 200: return redirect(url_for('webapp.webapp_main', path='', dirname='')) return render_template("login.html")
def test_full_exp(self): tokens = [ Token(TokenType.MINUS), Token(TokenType.INT, 1), Token(TokenType.MULTIPLY), Token(TokenType.INT, 2), Token(TokenType.MOD), Token(TokenType.INT, 2), Token(TokenType.PLUS), Token(TokenType.LPAREN), Token(TokenType.FLOAT, 3.0), Token(TokenType.DIVIDE), Token(TokenType.INT, 10), Token(TokenType.RPAREN), Token(TokenType.MINUS), Token(TokenType.INT, 4), Token(TokenType.INTDIVIDE), Token(TokenType.INT, 1), Token(TokenType.EXPONENT), Token(TokenType.INT, 3) ] node = Parser(tokens).parse() self.assertEqual( node, SubtractNode( AddNode( ModNode(MultiplyNode(NegateNode(IntNode(1)), IntNode(2)), IntNode(2)), DivideNode(FloatNode(3.0), IntNode(10))), IntegerDivideNode(IntNode(4), ExponentNode(IntNode(1), IntNode(3)))))
def change_x_to_num(eq, var_list, string): try: eq = eq.replace(" ", "") eq_list = tokenize(eq, var_list) variable = [] value = [] string = string.replace(" ", "") string = string.split(',') for n in range(len(string)): temp = string[n].split('=') if temp[0] not in var_list: raise Exception('%s is not in current variable list' % (temp[0])) if len(temp) != 2 or not is_digit(temp[1]): raise Exception('Wrong input') variable.append(temp[0]) value.append(temp[1]) for n in range(len(variable)): eq_temp = [] for m in range(len(eq_list)): if eq_list[m] == variable[n]: eq_temp.append(float(value[n])) else: eq_temp.append(eq_list[m]) eq_list = eq_temp if eq_list[0] == 'sig': sig_eq = '' for n in range(4, len(eq_list)-6): sig_eq += str(eq_list[n]) ans, _, _ = sigma(eq_list[2], sig_eq, eq_list[-5], eq_list[-3], var_list) return ans[0] E = Parser(eq_list, var_list) ans = E.eval() return from_list_to_str('', ans) except Exception as e: return '*Error* ' + str(e)
def test_single_variable_solver_handles_distribution(self): equation = Parser.parse_equation('p * -25 + (1 - p) * 5 = 0') result = Solver.single_variable(equation, 'p', print_out=True, max_iterations=20) verify(str(result), self.reporter)
def test_all_substitutions_same_variable(self): expression = Parser(Tokenizer.tokenize('x + x + x')).parse() start = Parser(Tokenizer.tokenize('a + a')).parse() end = Parser(Tokenizer.tokenize('2 * a')).parse() transformation = ExpressionSubstitution(start, end) transformations = transformation.get_all_substitutions(expression) to_return = list() for pattern, result in transformations: row = list() for key in sorted(pattern.keys()): row.append('{} : {}'.format(key, pattern[key])) to_return.append('{' + ', '.join(row) + '} => ' + str(result)) verify('\n'.join(to_return), self.reporter)
def test_single_variable_solver_evaluates(self): equation = Parser.parse_equation('0 = 0.5*a + 3*4') result = Solver.single_variable(equation, 'a', print_out=True, max_iterations=5) verify(str(result), self.reporter)
def test_single_variable_solver_handles_same_variable(self): equation = Parser.parse_equation('x + x = 3') result = Solver.single_variable(equation, 'x', print_out=True, max_iterations=5) self.assertEqual(str(result), 'x = 1.5')
def test_single_variable_solver_handles_same_variable_complex(self): equation = Parser.parse_equation('2*x - 7 = 4*x + 5') result = Solver.single_variable(equation, 'x', print_out=True, max_iterations=20) self.assertEqual(str(result), 'x = -6.0')
def process_event(self, event: dict, active_id: str): """Process a given event dictionary for display""" if event is None: return event_type = Parser.get_event_category(event, active_id) widget: Gtk.Grid = self.generate_widget(event, event_type) if widget is None: return time = datetime.combine(datetime.now().date(), event["time"].time()) self._queue.put((time, widget))
class Compiler(): def __init__(self): self.parser = Parser() def compile(self, text): ast = self.parser.parse(text) out = Writer() ast.write(out) print out.str
def send_files(self, window: tk.Tk, files: list = None): """ Send the match data found in CombatLogs in the CombatLogs folder to the Discord Bot Server. For the actual sending, the send_file function is used to send each individual file. If Discord Sharing is not enabled, the function returns immediately. This function is meant to be executed during start-up of the GSF Parser, while still at the SplashScreen. The procedure of this function takes a rather long time and given data access of the MainWindow.characters_frame running it in a separate Thread would be a bad idea, and thus this function would interrupt the mainloop for at least three seconds, if no files have to be synchronized. If files have to be synchronized, this function will take long to complete. """ if os.path.exists("development"): return splash = DiscordSplash(window.splash if window.splash is not None else window) splash.update_state() if settings["sharing"]["enabled"] is False or self.validate_tag(settings["sharing"]["discord"]) is False: return files = list(Parser.gsf_combatlogs()) if files is None else files if len(self.db) == 0: mb.showinfo("Notice", "This is the first time data is being synchronized with the Discord Bot Server. " "This may take a while.") elif len(files) - len(self.db) > 10: mb.showinfo("Notice", "There are quite many files to synchronize. Please stand by.") print("[DiscordClient] Initiating sending of match data of {} CombatLogs".format(len(files))) for file_name in files: try: self.send_file(file_name, window) except Exception: mb.showerror("Error", "There was an error processing {}.".format(file_name)) splash.update_state() if self.failed > 5: break splash.destroy() print("[DiscordClient] Done sending files.")
def test_index_nbytes_returns_nbytes(self): p = Parser() html = '<td class="tor-size"><u>123456</u></td>' nbytes = p.index_nbytes(make_tree(html)) self.assertEqual(nbytes, 123456)
def test_index_timestamp_returns_timestamp(self): p = Parser() html = '<tr><td></td><td><u>123456</u></td></tr>' ts = p.index_timestamp(make_tree(html)) self.assertEqual(ts, 123456)
def test_index_title_returns_title(self): p = Parser() html = '<td class="t-title"><div class="t-title"><a>Blah</a></div></td>' tid = p.index_title(make_tree(html)) self.assertEqual(tid, 'Blah')
def test_index_tid_returns_tid(self): p = Parser() html = '<td class="t-title"><div class="t-title"><a data-topic_id="12345"></a></div></td>' tid = p.index_tid(make_tree(html)) self.assertEqual(tid, 12345)
def __init__(self): self.parser = Parser()
def send_file(self, file_name, window): """ Send the data in a single file to the Discord Bot Server. This is done in a few steps. # TODO: Optimize amount of times the file is looped over # TODO: Split this into multiple shorter functions 1. Retrieve basic information for this CombatLog that is required for sending it, including the date it was created and the player name. 2. Check the requirement that the server for this character is known. This is only the case if the character name is unique for this system across all servers. If the server cannot reliably be determined, the CombatLog data cannot be sent. 3. Check the files.db database in the temporary data directory if this file is in it. If it is not, this is the first time this file is processed. If it is, this file has already been processed at least once. 4. Parse the file, determining individual matches and the times they started at. 5. Retrieve data from the character database (managed by the MainWindow.characters_frame in the :characters: attribute. 6. If Discord sharing is enabled for the character, make sure the Discord Bot Server knows about it by sending the command for registering a character to the server. Note that this may cause duplicate registration requests among multiple files, but the Discord Bot Server will ignore them if the character is already registered. 7. Loop over the matches to send. 7.1. Retrieve match-specific required information such as the player ID format and the results. 7.2. Check if the match is a tutorial match. If it is, the character was the only participant and sending it would only clutter the database. 7.3. Check if the non-personal match data has already been sent for this file and if not send it to the server. # TODO: Extend this part for sending the map type 7.4. Check if personal data sharing is enabled for this character and it has not already been sent. Then send the personal match data to the server. 8. Update the files.db database with whether the sharing of data was successful. Only if *all* matches were successfully synchronized will the state be set to True. If the state is False, a new attempt will be made at some later point. 9. Save the database to file to prevent loss of data if the user exits the process unexpectedly. :param file_name: Absolute path to the CombatLog to sync :param window: MainWindow instance of this GSF Parser """ date = Parser.parse_filename(file_name) lines = Parser.read_file(file_name) player_name = Parser.get_player_name(lines) server = window.characters_frame.characters.get_server_for_character(player_name) basename = os.path.basename(file_name) # Actually send the file data to the server if date is None or server is None: return print("[DiscordClient] Synchronizing file: {}".format(basename)) if basename not in self.db: self.db[basename] = {"match": False, "char": False} match_s, char_s = self.db[basename]["match"], self.db[basename]["char"] player_id_list = Parser.get_player_id_list(lines) file_cube, matches, spawns = Parser.split_combatlog(lines, player_id_list) character = window.characters_frame.characters[(server, player_name)] character_enabled = character["Discord"] if character_enabled is True: server, name, faction = character["Server"], character["Name"], character["Faction"] self.send_character(server, faction, name) print("[DiscordClient] Character sharing {} for {} on {}".format( "enabled" if character_enabled else "disabled", player_name, server)) for index, (start, end) in enumerate(zip(matches[::2], matches[1::2])): match = file_cube[index] id_fmt = Parser.get_id_format(match[0]) start, end = map(lambda time: datetime.combine(date.date(), time.time()), (start, end)) results = Parser.parse_match(match, player_id_list) abls, dmg_d, dmg_t, _, _, _, _, _, enemies, _, _, ships, _ = results if Parser.is_tutorial(match): continue if self.db[basename]["match"] is False: match_s = self.send_match_start(server, date, start, id_fmt) and match_s match_s = self.send_match_end(server, date, start, id_fmt, end) and match_s else: print("[DiscordClient] Ignored {}".format(basename)) data = FileHandler.get_data_dictionary() spawn_dict = None for spawn in spawns[index]: result = FileHandler.get_spawn_dictionary(data, basename, start, spawn) if isinstance(result, dict): spawn_dict = result if isinstance(spawn_dict, dict): if "map" in spawn_dict and isinstance(spawn_dict["map"], tuple) and None not in spawn_dict["map"]: self.send_match_map(server, date, start, id_fmt, spawn_dict["map"]) if "score" in spawn_dict and isinstance(spawn_dict["score"], float): self.send_match_score(server, date, start, id_fmt, character["Faction"], spawn_dict["score"]) if character_enabled is True: if self.db[basename]["char"] is False: # Parse the file with results and send the results ship = ship_tiers[max(ships, key=ships.__getitem__)] if len(ships) != 0 else "Unknown" deaths = len(match) - 1 char_s = self.send_result( server, date, start, id_fmt, player_name, len(enemies), dmg_d, dmg_t, deaths, ship) print("[DiscordClient] {} to send character result for ({}, {})".format( "Succeeded" if char_s is True else "Failed", server, player_name)) else: print("[DiscordClient] Not sending character result because already sent.") else: print("[DiscordClient] Not sending character result because not enabled.") self.db[basename] = {"match": match_s, "char": char_s} self.save_database()
"lt": "C_ARITHMETIC", "and": "C_ARITHMETIC", "or": "C_ARITHMETIC", "not": "C_ARITHMETIC", } def usage(): print""" python S1VM.py <source> where <source> is a valid vm file or a folder containing vm files. """ if __name__=="__main__": translator=Translator() parser=Parser(instdict) outASM=[] try: givenpath = sys.argv[1] except IndexError: usage() exit() if os.path.isdir(givenpath): for file in os.listdir(givenpath): if file[-3:] == ".vm": outASM.append(translator.translate(parser.parse(os.path.join(givenpath, file)))) elif os.path.isfile(givenpath) and givenpath[-3:] == ".vm": outASM.append(translator.translate(parser.parse(givenpath)))
Copyright (C) 2016-2018 RedFantom """ # Standard Library import pickle # Project Modules from parsing import Parser with open("realtime.db", "rb") as fi: data = pickle.load(fi) fo = open("output.txt", "w") for file, file_data in data.items(): if file is None or len(file) == 0: continue fo.write("# {}\n".format(Parser.parse_filename(file).strftime("%Y-%m-%d - %H:%m"))) for match, match_data in file_data.items(): match_features = list() for spawn, spawn_data in match_data.items(): for key, key_data in spawn_data.items(): if isinstance(key_data, dict): if len(key_data) > 0: match_features.append(key) continue if key_data is not None: match_features.append(key) string = ", ".join(set(match_features)) if len(match_features) > 0 else "No data recorded" fo.write("{}: {}\n".format(match.strftime("%H:%M:%S"), string)) fo.close()