def main(): print_menu() cmd = input(">") if cmd == "1": parser = Parser("gr1.txt") seq = read_seq("seq.txt") parser.create_the_nightmare_table() parsing = parser.parse(seq, ["eps", "S"]) if len(parsing) == 0: print("amu ii bai") elif type(parsing[0]) == type("mno"): print("amu ii bai si incepe la tokenul:") print(parsing.reverse()) else: tree = ParseTree(parser.ll1, parsing, parser.productions) print(str(tree)) if cmd == "2": parser = Parser("grammar-ioana.txt") out = read_pif("PIFU_BUN2.txt") # print(out) parser.create_the_nightmare_table() parsing = parser.parse(out, ["eps", "START"]) print(parsing) if len(parsing) == 0: print("amu ii bai") elif type(parsing[0]) == type("mno"): print("amu ii bai si incepe la tokenul:") parsing.reverse() print(parsing) else: tree = ParseTree(parser.ll1, parsing, parser.productions) print(str(tree))
def test_numbers(self): tokens = [Token(TokenType.INT, 4)] node = Parser(tokens).parse() self.assertEqual(node, IntNode(4)) tokens = [Token(TokenType.FLOAT, 3.0)] node = Parser(tokens).parse() self.assertEqual(node, FloatNode(3.0))
def test_reparsing(self): result = Tokenizer.tokenize('+2+2') result = Parser(result).parse() expected = result result = str(result) result = Tokenizer.tokenize(result) result = Parser(result).parse() self.assertEqual(str(result), str(expected))
def test_transformation_failure(self): start = Parser(Tokenizer.tokenize('x+y')).parse() end = start transformation = ExpressionSubstitution(start, end) instantiated_start = Parser(Tokenizer.tokenize('a + b')).parse() pattern = SubstitutionPattern({'x': 'xyz'}) self.assertRaises(Exception, transformation.transform, [instantiated_start, pattern])
def test_equation_cancellation_is_applicable(self): lhs = Parser(Tokenizer.tokenize('x + 4')).parse() rhs = Parser(Tokenizer.tokenize('y')).parse() equation = Equation(lhs, rhs) addition_cancellation = EquationCancellation(OperationType.PLUS(), OperationType.MINUS()) self.assertTrue(addition_cancellation.is_applicable_to(equation)) flipped = equation.flip() self.assertFalse(addition_cancellation.is_applicable_to(flipped))
def test_equation_cancellation(self): lhs = Parser(Tokenizer.tokenize('x * 4')).parse() rhs = Parser(Tokenizer.tokenize('y')).parse() equation = Equation(lhs, rhs) multiplication_cancellation = EquationCancellation( OperationType.TIMES(), OperationType.DIVIDE()) self.assertTrue(multiplication_cancellation.is_applicable_to(equation)) result = multiplication_cancellation.apply(equation) verify(str(result), self.reporter)
def test_equation_cancellation_with_negative(self): lhs = Parser(Tokenizer.tokenize('x + -4')).parse() rhs = Parser(Tokenizer.tokenize('y')).parse() equation = Equation(lhs, rhs) addition_cancellation = EquationCancellation(OperationType.PLUS(), OperationType.MINUS()) self.assertTrue(addition_cancellation.is_applicable_to(equation)) result = addition_cancellation.apply(equation) verify(str(result), self.reporter)
def test_identity_transformation(self): start = Parser(Tokenizer.tokenize('x')).parse() end = start transformation = ExpressionSubstitution(start, end) instantiated_start = Parser(Tokenizer.tokenize('abc')).parse() pattern = SubstitutionPattern({'x': 'abc'}) verify( '{} -> {}'.format( 'abc', transformation.transform(instantiated_start, pattern)), self.reporter)
def handle(app_config): slug = request.args.get(':') if not slug: return '<meta http-equiv="refresh" content="2; url=?:=home"> redirect' if not is_valid_slug(slug): return 'Invalid page ID' with DbTree(app_config['db_uri']) as db: page_info = db.get_page_info(slug) tree_html = compile_tree(page_info.tree) parser = Parser(slug) ast_list = [parser.parse_string(string, content_id) for string, content_id in page_info.content_pair_iter()] with DbTitle(app_config['db_uri']) as db: db.put_titles_in(parser.acc) notes_html_list = (compile_notes(cons, parser.acc) for cons in ast_list) # use the "directory" part only page_info.path.pop() return render_template( 'view.html', title=page_info.title, nav_edit=url_for('edit') + slug_to_link(slug), unlisted=page_info.unlisted, directory_of_page=page_info.path, prev_article=page_info.prev, next_article=page_info.next, mtime_str=page_info.mtime_str, sidebar_html=tree_html, notes_html_list=notes_html_list )
def test_full_exp(self): tokens = [ Token(TokenType.MINUS), Token(TokenType.INT, 1), Token(TokenType.MULTIPLY), Token(TokenType.INT, 2), Token(TokenType.MOD), Token(TokenType.INT, 2), Token(TokenType.PLUS), Token(TokenType.LPAREN), Token(TokenType.FLOAT, 3.0), Token(TokenType.DIVIDE), Token(TokenType.INT, 10), Token(TokenType.RPAREN), Token(TokenType.MINUS), Token(TokenType.INT, 4), Token(TokenType.INTDIVIDE), Token(TokenType.INT, 1), Token(TokenType.EXPONENT), Token(TokenType.INT, 3) ] node = Parser(tokens).parse() self.assertEqual( node, SubtractNode( AddNode( ModNode(MultiplyNode(NegateNode(IntNode(1)), IntNode(2)), IntNode(2)), DivideNode(FloatNode(3.0), IntNode(10))), IntegerDivideNode(IntNode(4), ExponentNode(IntNode(1), IntNode(3)))))
def test_add_rule(self): fc = RegexCriterion(r"^[-+]?\d*\.\d+$") fc2 = RegexCriterion(r"^[-+]?\d*\.\d+$") rule = ParserRule(criteria=[fc, fc2], allow_partial=False) parser = Parser(rules=[rule, rule]) parser.add_rule(rule) assert len(parser.rules) == 3
def test_5(): try: regexp = "01('=34567)" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() except ParseError: print("test 5: Incorrect regexp")
def test_all_substitutions_same_variable(self): expression = Parser(Tokenizer.tokenize('x + x + x')).parse() start = Parser(Tokenizer.tokenize('a + a')).parse() end = Parser(Tokenizer.tokenize('2 * a')).parse() transformation = ExpressionSubstitution(start, end) transformations = transformation.get_all_substitutions(expression) to_return = list() for pattern, result in transformations: row = list() for key in sorted(pattern.keys()): row.append('{} : {}'.format(key, pattern[key])) to_return.append('{' + ', '.join(row) + '} => ' + str(result)) verify('\n'.join(to_return), self.reporter)
def test_transformation_with_expression(self): start = Parser(Tokenizer.tokenize('x + y')).parse() end = Parser(Tokenizer.tokenize('y + x')).parse() transformation = ExpressionSubstitution(start, end) instantiated_start = Parser(Tokenizer.tokenize('1+(2+3+4)')).parse() pattern = SubstitutionPattern({ 'x': '1', 'y': Parser(Tokenizer.tokenize('2+3+4')).parse() }) verify( '{} -> {}'.format( str(instantiated_start), transformation.transform(instantiated_start, pattern)), self.reporter)
def test_ops(self): tokens = [ Token(TokenType.INT, 4), Token(TokenType.PLUS), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, AddNode(IntNode(4), FloatNode(3.0))) tokens = [ Token(TokenType.INT, 4), Token(TokenType.MINUS), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, SubtractNode(IntNode(4), FloatNode(3.0))) tokens = [ Token(TokenType.INT, 4), Token(TokenType.DIVIDE), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, DivideNode(IntNode(4), FloatNode(3.0))) tokens = [ Token(TokenType.INT, 4), Token(TokenType.MULTIPLY), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, MultiplyNode(IntNode(4), FloatNode(3.0))) tokens = [ Token(TokenType.INT, 4), Token(TokenType.INTDIVIDE), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, IntegerDivideNode(IntNode(4), FloatNode(3.0))) tokens = [ Token(TokenType.INT, 4), Token(TokenType.EXPONENT), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, ExponentNode(IntNode(4), FloatNode(3.0))) tokens = [ Token(TokenType.INT, 4), Token(TokenType.MOD), Token(TokenType.FLOAT, 3.0) ] node = Parser(tokens).parse() self.assertEqual(node, ModNode(IntNode(4), FloatNode(3.0)))
def calcul(eq, var_list): try: eq = eq.replace(" ", "") eq_list = tokenize(eq, var_list) E = Parser(eq_list, var_list) ans = E.eval() return ans except Exception as e: return e
def main(): parser = Parser() args = parser.args infile = args.infile outfile = args.outfile verbose = args.verbose skip_index = args.skip_index delta_a = args.delta_a delta_fc = args.delta_fc d_max = args.d_max rho_mc = args.rho_mc rho_s = args.rho_s weights = list(map(float, args.weight.strip('[]').split(','))) algorithm = args.algo df = pd.read_csv(infile, dtype='str') if len(weights) != df.shape[1]: weights = np.ones(df.shape[1]) if skip_index: weights[ 0] = 0. #index column weight set to 0 and not considered during clustering if verbose: print("ARGS:") print("\tinfile:", infile) print("\toutfile:", outfile) print("\tweight:", weights) print("\tdelta_a:", delta_a) print("\tdelta_fc:", delta_fc) print("\td_max:", d_max) print("\trho_mc:", rho_mc) print("\trho_s:", rho_s) if verbose: print("\ninput shape:", df.shape) if algorithm == 0: merged = np.append(df.values, np.zeros((df.shape[0], 1)), axis=1) clusters = RecAgglo(merged, delta_a, delta_fc, d_max, rho_s, rho_mc, weights, verbose) else: to_cluster = df.values if algorithm == 1: clusters = SampleClust(to_cluster, rho_s, rho_mc, weights) elif algorithm == 2: clusters = AggloClust(to_cluster, 'single', d_max, weights) df_clusters = pd.DataFrame(clusters) df_clusters.to_csv(outfile)
def test_parsing_error(self): path = 'tests/input/parsing_error' file_list = os.listdir(path) for filename in file_list + ['error_31_file_not_found.txt']: # print(filename) with open('tests/output/out_{}'.format(filename), 'r') as fd: expected_out = fd.read() with ListStream() as print_list: Parser('{}/{}'.format(path, filename)).parse_file() out = "".join(print_list.data) self.assertEqual(out, expected_out)
def test_4(): regexp = "ytut(a|g)*lk" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() #print(my_parser.show_token_list()) nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("a") == False assert my_dfa.walk_dfa("ytu") == False assert my_dfa.walk_dfa("ytutalk") == True
def test_6(): regexp = "(z*)" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("a") == False assert my_dfa.walk_dfa("4") == False assert my_dfa.walk_dfa("26546") == False assert my_dfa.walk_dfa("Owskemg") == False
def run(code): scanner = Scanner(code) tokens = scanner.scan_tokens() if error.had_error: return parser = Parser(tokens) stmts = parser.parse() if error.had_error: return res = resolver.Resolver(interp) res.resolve(stmts) if error.had_error: return interp.interpret(stmts)
def test_3(): regexp = "(cd*|bha)*jk" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("k") == False assert my_dfa.walk_dfa("mf") == False assert my_dfa.walk_dfa("jk") == True assert my_dfa.walk_dfa("e") == False assert my_dfa.walk_dfa("t") == False
def test_1(): regexp = "a|b*" my_scan = Scanner(regexp) my_parser = Parser(my_scan) my_parser.parse() nfa_construct = NFA_Constructor() my_nfa = nfa_construct.construct_nfa(my_parser.token_list) dfa_construct = DFA_Constructor() my_dfa = dfa_construct.construct_dfa(my_nfa) assert my_dfa.walk_dfa("ab") == False assert my_dfa.walk_dfa("a") == True assert my_dfa.walk_dfa("aa") == False assert my_dfa.walk_dfa("ba") == False assert my_dfa.walk_dfa("bbbb") == True
def test_complex_single_solution_solve(self): lhs = Parser(Tokenizer.tokenize('x * 4 - 18')).parse() rhs = Parser(Tokenizer.tokenize('2')).parse() equation = Equation(lhs, rhs) cancellations = [ EquationCancellation(OperationType.PLUS(), OperationType.MINUS()), EquationCancellation(OperationType.MINUS(), OperationType.PLUS()), EquationCancellation(OperationType.TIMES(), OperationType.DIVIDE()), EquationCancellation(OperationType.DIVIDE(), OperationType.TIMES()) ] transformations = list( map(lambda x: x.as_transformation(), cancellations)) step = SolverStep(transformations) step.next_step = step condition = lambda x: str(x.lhs) == 'x' result = step.execute_until(equation, condition) verify(str(result), self.reporter)
def change_x_to_num(eq, var_list, string): try: eq = eq.replace(" ", "") eq_list = tokenize(eq, var_list) variable = [] value = [] string = string.replace(" ", "") string = string.split(',') for n in range(len(string)): temp = string[n].split('=') if temp[0] not in var_list: raise Exception('%s is not in current variable list' % (temp[0])) if len(temp) != 2 or not is_digit(temp[1]): raise Exception('Wrong input') variable.append(temp[0]) value.append(temp[1]) for n in range(len(variable)): eq_temp = [] for m in range(len(eq_list)): if eq_list[m] == variable[n]: eq_temp.append(float(value[n])) else: eq_temp.append(eq_list[m]) eq_list = eq_temp if eq_list[0] == 'sig': sig_eq = '' for n in range(4, len(eq_list)-6): sig_eq += str(eq_list[n]) ans, _, _ = sigma(eq_list[2], sig_eq, eq_list[-5], eq_list[-3], var_list) return ans[0] E = Parser(eq_list, var_list) ans = E.eval() return from_list_to_str('', ans) except Exception as e: return '*Error* ' + str(e)
def __init__(self, number): self.partyInfos = PartyInformations(number) self.parser = Parser(self.partyInfos) while self.parser.tryRead() == False: continue while True: start = timer() self.parser.updateInfos() if number == 1: self.parser.getGhost() self.parser.parseTourInfos() if self.parser.readQuestion() == False: continue if self.parser.questionType == self.parser.tileQuestion: IA.compute(self.partyInfos) end = timer() print("time elapsed = " + str(end - start)) self.parser.sendResponse()
def test_imply(self): path = 'tests/input/imply' file_list = os.listdir(path) for filename in file_list: # print(filename) if filename in ['imply_0.txt', 'imply_1.txt', 'imply_2.txt']: with open('tests/output/out_{}'.format(filename), 'r') as fd: expected_out = fd.read() with ListStream() as print_list: Parser('{}/{}'.format(path, filename)).parse_file() out = "".join(print_list.data) self.assertEqual(out, expected_out) else: with open('tests/output/out_{}'.format(filename), 'r') as fd: out = fd.read() self.assertEqual( backward_chaining_solver.treat_entry('{}/{}'.format( path, filename)), out)
def analyse(self, device_id, artists_number): """The main method that analysis Google Play Music account, and returns results of parsing concert.ua :param device_id: str :param artists_number: int :return: results: list """ api = Mobileclient() try: artists_number = int(artists_number) device_id = str(device_id) except ValueError: return -1 api.oauth_login(str(device_id)) main_playlist = Playlist() library = api.get_all_songs() for song in library: main_playlist.append(song) playlists = api.get_all_user_playlist_contents() for playlist in playlists: for song in playlist['tracks']: main_playlist.append(song) top = main_playlist.top_n(artists_number) artists = [] for artist_id in top: artist = api.get_artist_info(include_albums=False, artist_id=artist_id, max_rel_artist=0, max_top_tracks=0) artists.append(artist['name']) results = [] for artist in artists: parse_result = Parser().parse(artist) if parse_result: results.append(parse_result) return results
def main(): booking = get_json_data('booking') fare_rules = get_json_data('fare_rules') if booking != None and fare_rules != None: parser = Parser(booking, fare_rules) data = parser.calculate_all() print(data) write_data(data) # print() else: print({'Error': 'Error'}) write_data({'Error': 'Error'})
def calcul(eq, var_list): try: eq = eq.replace(" ", "") eq_list = tokenize(eq, var_list) if eq_list[0] == 'sig': sig_eq = '' for n in range(4, len(eq_list)-6): sig_eq += str(eq_list[n]) return sigma(eq_list[2], sig_eq, eq_list[-5], eq_list[-3], var_list) else: E = Parser(eq_list, var_list) ans = E.eval() domain, in_domain = E.get_domain() return [ans, domain, in_domain] except Exception as e: return 'Error', e, 0