def main(): # Load in test code testcode = open("Test/testcode.txt", "r") contents = testcode.read() scan = Scanner() for symbol in contents: Scanner.read_symbol(scan, symbol) print("identifiers:") print(scan.identifiers) print("\n") print("consts:") print(scan.consts) print("________") for line in scan.lineCollection: for token in line: print("type: " + str(token.enum_type) + " id: " + str(token.id), end=' | ') print("") print("________") parser = Parser(Scanner.lineCollection) parsed_block = parser.parse() print("") print("________") print("") Interpreter.consts = scan.consts Interpreter.identifiers = scan.identifiers Interpreter.interpret(parsed_block.statements)
def __init__(self): self.number = 100 self.urlManager = UrlManager() self.downloader = Downloader() self.parser = Parser() self.dataManager = DataManager() self.dbManager = MongoUtils()
def test_something(self): string_source = 'i = 100;\n' \ 'while (a>1){\n' \ ' print(a-1);};' parser = Parser(StringSource(string_source)) program = parser.parse_program() ast_dumper = AstDumper() ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
def main(filename): print("----------Parser Debug-------------") tokenizer = Tokenizer(filename) tokenizer.tokenize() lexer = Lexer(tokenizer.getTokens()) lexer.lex() parser = Parser(lexer.getTokens()) parser.parse() print(parser.getTree()) print("\n----------Execution Stack-----------") interpreter = Interpreter(parser.getTree()) interpreter.interpret() print("\n----------Program Output------------") return interpreter.output()
def should_fail(tester, string_source, expected_error_code=None, expected_token_type=None, expected_description=None): parser = Parser(StringSource(string_source)) with tester.assertRaises(ParserError) as e: parser.parse_program() if expected_error_code is not None: tester.assertEqual(expected_error_code, e.exception.error_code) if expected_token_type is not None: tester.assertEqual(expected_token_type, e.exception.expected_token_type) if expected_description: tester.assertEqual(expected_description, e.exception.description)
def test_microterm(self): source = 'a**b;' program = Parser(StringSource(source)).parse_program() actual = program.objects[0] expected = create_expected_binary_operator( [(('id', 'a',), ('op', '**',), ('id', 'b',))] ) self.visit_and_compare(expected, actual) source = '5**4.5;' program = Parser(StringSource(source)).parse_program() actual = program.objects[0] expected = create_expected_binary_operator( [(('scalar', 5.0,), ('op', '**',), ('scalar', 4.5,))] ) self.visit_and_compare(expected, actual)
class SpiderMan(object): def __init__(self): self.number = 100 self.urlManager = UrlManager() self.downloader = Downloader() self.parser = Parser() self.dataManager = DataManager() self.dbManager = MongoUtils() def set_crawler_number(self, num): if num is None or int(num) < 0: return self.number = int(num) def crawler(self, root_url): self.urlManager.add_new_url(root_url) while (self.urlManager.has_new_url() and self.urlManager.old_urls_size() < self.number): try: new_url = self.urlManager.get_new_url() html = self.downloader.download(new_url) result = self.parser.parser(new_url, html) self.urlManager.add_new_urls(result[0]) self.dataManager.store_data(result[1]) except Exception as err: print("crawl failed" + err) self.dataManager.output_data() datas = self.dataManager.get_data() self.dbManager.insert_baike_many(datas)
def test_equality_expression(self): source = 'a == b != c == 0;' program = Parser(StringSource(source)).parse_program() actual = program.objects[0] expected = create_expected_binary_operator( [(('id', 'a',), ('op', '==',), ('id', 'b',)), (('prev', None,), ('op', '!=',), ('id', 'c',)), (('prev', None,), ('op', '==',), ('scalar', 0.0,))] ) self.visit_and_compare(expected, actual)
def parse_file(self, file_path): global f_counter p_name = "#NUM_" + str(f_counter) p = Parser(self.hash_stopwords, self.hash_punc) self.get_exam_from_file(file_path, p) with open( self.post_path + '/temp_hash_objects/file_hash_' + p_name + '.pkl', 'wb') as output: pickle.dump(p.hash_temp_words, output, pickle.HIGHEST_PROTOCOL) self.indexer.write_dict_exc(p.hash_exc) self.indexer.write_dict_exc_in_exams(p.hash_exc_in_exams)
def test_term(self): source = 'a mod b / c div d * 0;' program = Parser(StringSource(source)).parse_program() actual = program.objects[0] expected = create_expected_binary_operator( [(('id', 'a',), ('op', 'mod',), ('id', 'b',)), (('prev', None,), ('op', '/',), ('id', 'c',)), (('prev', None,), ('op', 'div',), ('id', 'd',)), (('prev', None,), ('op', '*',), ('scalar', 0.0,))] ) self.visit_and_compare(expected, actual)
def test_relative_expression(self): source = 'a >= b > c < d <= 0;' program = Parser(StringSource(source)).parse_program() actual = program.objects[0] expected = create_expected_binary_operator( [(('id', 'a',), ('op', '>=',), ('id', 'b',)), (('prev', None,), ('op', '>',), ('id', 'c',)), (('prev', None,), ('op', '<',), ('id', 'd',)), (('prev', None,), ('op', '<=',), ('scalar', 0.0,))] ) self.visit_and_compare(expected, actual)
def test_new_examples(): # Example 1 - loop2.c ps = Parser( r'C:\Users\przem\OneDrive\PycharmProjects\S2_SEM1\Projekt\TRACOParser\Examples\loop2\loop2.c', r'C:\Users\przem\OneDrive\PycharmProjects\S2_SEM1\Projekt\TRACOParser\Examples\loop2\values.json' ) _ = ps.readfile() print(_.instructions) print(_.instructions[0].Constr) print(_.instructions[0].Constr.instructions[0]) print(_.instructions[0].Constr.instructions[0].Constr) print( _.instructions[0].Constr.instructions[0].Constr.instructions[0].Constr) print(_.instructions[0].Constr.instructions[0].Constr.instructions[0]. Constr.instructions[0]) # Example 2 ps = Parser( r'C:\Users\przem\OneDrive\PycharmProjects\S2_SEM1\Projekt\TRACOParser\Examples\loop2\loop2.c', r'C:\Users\przem\OneDrive\PycharmProjects\S2_SEM1\Projekt\TRACOParser\Examples\loop2\values.json' )
def test_something(self): string_source = ' fun partition(arr, low, high)' \ ' {' \ ' i = (low -1);' \ ' pivot = arr[high];' \ ' for (j in range)' \ ' if (arr[j] < pivot)' \ ' {' \ ' i = i+1;' \ ' temp = arr[i];' \ ' arr[i] = arr[j];' \ ' arr[j] = temp;' \ ' }' \ ' temp = arr[i+1];' \ ' arr[i+1] = arr[high];' \ ' arr[high] = temp;' \ ' ret (i+1);' \ ' }' \ ' fun quickSort(arr, low, high)' \ ' if (low < high)' \ ' {' \ ' pi = partition(arr, low, high);' \ ' quickSort(arr, low, pi-1);' \ ' quickSort(arr, pi+1, high);' \ ' }' s = 'x[2];\n' \ 'a[1, 2];\n' \ 'b[a];\n' \ 'c[:];\n' \ 'd[:, 1];\n' \ 'e[1, :];\n' \ 'f[:, :];' parser = Parser(StringSource(string_source)) program = parser.parse_program() ast_dumper = AstDumper() ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
def test_parser(parser=Parser(parse_arguments().infile)): print("RESULT OF PARSING: ") for x in parser.readfile().instructions: if isinstance(x, Constructions): pprint(dict(vars(x))['Constr']) if x.Constr.instructions: print("====inside for===") for ins in x.Constr.instructions: if isinstance(ins, Constructions): pprint(dict(vars(ins))['Constr']) else: print(ins) print("====end of for===") else: print(x)
def test_arithmetic_expression(self): source = 'a+b-c+d+5-4-7+4+3;' program = Parser(StringSource(source)).parse_program() actual = program.objects[0] expected = create_expected_binary_operator( [(('id', 'a',), ('op', '+',), ('id', 'b',)), (('prev', None,), ('op', '-',), ('id', 'c',)), (('prev', None,), ('op', '+',), ('id', 'd',)), (('prev', None,), ('op', '+',), ('scalar', 5.0,)), (('prev', None,), ('op', '-',), ('scalar', 4.0,)), (('prev', None,), ('op', '-',), ('scalar', 7.0,)), (('prev', None,), ('op', '+',), ('scalar', 4.0,)), (('prev', None,), ('op', '+',), ('scalar', 3.0,))] ) self.visit_and_compare(expected, actual)
def main(): print('hello') parser = Parser('./data/train') parser.taskFlag = 1 parser.train() parser.classifyEmails('./data/test') parser.taskFlag = 2 parser.train() parser.classifyEmails('./data/test') parser.taskFlag = 3 parser.train() parser.classifyEmails('./data/test')
from Parser.Parser import Parser from Utils.Utils import Utils from Equation.Equation import Equation test_str = "4 + 3 * X^2 + X^1 = - 5 * X^0" term_re = r"(?P<term>[-+]?(?P<coef>\d*\.?\d*)\*?(?P<expo>X{1}\^\d+)?)" # equation_re = "(^(\s*([+-]?)\s*\d*((\d*)(\.\d+)?\s*(\*{1}))?\s*(X(\^[012]?)?)? *[+-]?)+$)" # equation_re= r'(^(([+-]?)\d*((\d*)(\.\d+)?[*]{1})?[^*](X(\^[012]?)?)?[^+-])+$)' equation_re = r'(?P<term>[-+]?(?P<coef>\d*\.?\d*)\*?(?P<expo>X{1}\^\d+)?)+' test_str = test_str.replace(' ', '') both_sides = Utils.check_input(str_=test_str, reg=equation_re) print(f'Both_sides: {both_sides}') if both_sides: left_side = Parser.match_in_string(both_sides[0], term_re) # print(left_side) # right_side = Parser.match_in_string(both_sides[1], term_re) # Utils.switch_sign(right_side) # print(right_side) # equation_terms = left_side + right_side # print(equation_terms) # eq = Equation(equation_terms)
def test_parse_program_with_string(self): s = 'a = 1 + 2;' \ 's = "some string";' Parser(StringSource(s)).parse_program()
def test_simple(self): s = '"Hello world";' self.assertEqual(String, type(Parser(StringSource(s)).try_to_parse_string()))
def test_filesource(self): filesource = FileSource('test.txt') parser = Parser(filesource) program = parser.parse_program() ast_dumper = AstDumper() ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
def empezar(): import sys archivoEntrada = open(sys.argv[1], 'r') analizadorLexico = Lexer() entrada = archivoEntrada.read() analizadorLexico.input(entrada) lineatokens = '' try: for token in iter(analizadorLexico.lexer.token, None): lineatokens += repr(token.type) + ' ' + repr( token.value) + ' ' + repr(token.lineno) lineatokens += '\n' except Error.LexicalError.LexicalError as error: lineatokens = error.mensaje archivoEntrada.close() analizadorSintactico = Parser(analizadorLexico.tokens) analizadorLexico.lexer.lineno = 1 try: raiz = analizadorSintactico.parse(entrada) Parser.linea += 'Programa:\n' for clase in raiz: clase.imprimir() except Error.SyntacticalError.SyntacticalError as error: Parser.linea = error.mensaje analizadorSemantico = VisitanteTabla() try: analizadorSemantico.visitarProgram(raiz) except Error.SemanticError.SemanticError as error: try: analizadorSemantico.linea = error.mensaje analizadorSemantico.linea += '\nEl error se dio en la clase: ' + analizadorSemantico.metodoActual.tabla.padre.nombre + ', metodo: ' + analizadorSemantico.metodoActual.tabla.nombre except BaseException: pass analizadorCodigo = VisitanteLir(analizadorSemantico.tablaPrincipal) analizadorCodigo.visitarProgram(raiz) dump_tokens = 0 dump_ast = 0 dump_symtab = 0 dump_lir = 0 for parametro in sys.argv: if parametro[0] == '-': if parametro == '-dump-tokens': dump_tokens = 1 if parametro == '-dump-ast': dump_ast = 1 if parametro == '-dump-symtab': dump_symtab = 1 if parametro == '-dump-lir': dump_lir = 1 if (dump_tokens): archivoSalida = open(sys.argv[1][:-3] + '.tok', 'w') archivoSalida.write(lineatokens) print "Tokens fueron escritos a: %s" % (str(sys.argv[1][:-3] + '.tok')) archivoSalida.close() if (dump_ast): archivoSalida = open(sys.argv[1][:-3] + '.ast', 'w') archivoSalida.write(Parser.linea) print "Salida del AST fue escrita a: %s" % (str(sys.argv[1][:-3] + '.ast')) archivoSalida.close() if (dump_symtab): archivoSalida = open(sys.argv[1][:-3] + '.sym', 'w') archivoSalida.write(analizadorSemantico.linea) print "Salida de la tabla de simbolos fue escrita a: %s" % ( str(sys.argv[1][:-3] + '.sym')) archivoSalida.close() if (dump_lir): archivoSalida = open(sys.argv[1][:-3] + '.lir', 'w') archivoSalida.write(analizadorCodigo.linea) print "Codigo intermedio escrito a: %s" % (str(sys.argv[1][:-3] + '.lir')) archivoSalida.close() listabat = analizadorCodigo.linea.split('\n') lineabat = '@ECHO OFF\n' if listabat[0] == '': listabat = listabat[1:] lineabat += 'echo ' + listabat[0] + '>codigo.tmp\n' for linea in listabat[1:]: if linea == '': lineabat += 'echo+>>codigo.tmp\n' else: lineabat += 'echo ' + linea + '>>codigo.tmp\n' lineabat += 'java -jar microLIR.jar codigo.tmp\necho+\npause\ndel codigo.tmp' archivoSalida = open(sys.argv[1][:-3] + '.bat', 'w') archivoSalida.write(lineabat) print 'Archivo batch ejecutable escrito a: ' + sys.argv[1][:-3] + '.bat' archivoSalida.close()
from Parser.Parser import Parser if __name__ == '__main__': parser = Parser() while 1 == 1: input_str = input("#: ") print(parser.input(input_str))
def parse_matrix(string): parser = Parser(StringSource(string)) return parser.try_to_parse_matrix()
def empezar(): import sys archivoEntrada = open(sys.argv[1], 'r') analizadorLexico = Lexer() entrada = archivoEntrada.read() analizadorLexico.input(entrada) lineatokens = '' try: for token in iter(analizadorLexico.lexer.token, None): lineatokens += repr(token.type) + ' ' + repr(token.value) + ' ' + repr(token.lineno) lineatokens += '\n' except Error.LexicalError.LexicalError as error: lineatokens = error.mensaje archivoEntrada.close() analizadorSintactico = Parser(analizadorLexico.tokens) analizadorLexico.lexer.lineno = 1 try: raiz = analizadorSintactico.parse(entrada) Parser.linea += 'Programa:\n' for clase in raiz: clase.imprimir() except Error.SyntacticalError.SyntacticalError as error: Parser.linea = error.mensaje analizadorSemantico = VisitanteTabla() try: analizadorSemantico.visitarProgram(raiz) except Error.SemanticError.SemanticError as error: try: analizadorSemantico.linea = error.mensaje analizadorSemantico.linea += '\nEl error se dio en la clase: ' + analizadorSemantico.metodoActual.tabla.padre.nombre + ', metodo: ' + analizadorSemantico.metodoActual.tabla.nombre except BaseException: pass analizadorCodigo = VisitanteLir(analizadorSemantico.tablaPrincipal) analizadorCodigo.visitarProgram(raiz) dump_tokens = 0 dump_ast = 0 dump_symtab = 0 dump_lir = 0 for parametro in sys.argv: if parametro[0] == '-': if parametro == '-dump-tokens': dump_tokens = 1 if parametro == '-dump-ast': dump_ast = 1 if parametro == '-dump-symtab': dump_symtab = 1 if parametro == '-dump-lir': dump_lir = 1 if(dump_tokens): archivoSalida = open(sys.argv[1][:-3] + '.tok', 'w') archivoSalida.write(lineatokens) print "Tokens fueron escritos a: %s" % (str(sys.argv[1][:-3] + '.tok')) archivoSalida.close() if(dump_ast): archivoSalida = open(sys.argv[1][:-3] + '.ast', 'w') archivoSalida.write(Parser.linea) print "Salida del AST fue escrita a: %s" % (str(sys.argv[1][:-3] + '.ast')) archivoSalida.close() if(dump_symtab): archivoSalida = open(sys.argv[1][:-3] + '.sym', 'w') archivoSalida.write(analizadorSemantico.linea) print "Salida de la tabla de simbolos fue escrita a: %s" % (str(sys.argv[1][:-3] + '.sym')) archivoSalida.close() if(dump_lir): archivoSalida = open(sys.argv[1][:-3] + '.lir', 'w') archivoSalida.write(analizadorCodigo.linea) print "Codigo intermedio escrito a: %s" % (str(sys.argv[1][:-3] + '.lir')) archivoSalida.close() listabat = analizadorCodigo.linea.split('\n') lineabat = '@ECHO OFF\n' if listabat[0] == '': listabat = listabat[1:] lineabat += 'echo ' + listabat[0] + '>codigo.tmp\n' for linea in listabat[1:]: if linea == '': lineabat += 'echo+>>codigo.tmp\n' else: lineabat += 'echo ' + linea + '>>codigo.tmp\n' lineabat += 'java -jar microLIR.jar codigo.tmp\necho+\npause\ndel codigo.tmp' archivoSalida = open(sys.argv[1][:-3] + '.bat', 'w') archivoSalida.write(lineabat) print 'Archivo batch ejecutable escrito a: ' + sys.argv[1][:-3] + '.bat' archivoSalida.close()
from Fetcher.Fetcher import Fetcher from Parser.Parser import Parser fetch = Fetcher() filename, data = fetch.run() print(data.keys()) par = Parser(data=data) print(par.parse_regions())
import discord from discord.ext.commands import Bot from discord.ext import commands import asyncio import time from Parser.Parser import Parser Client = discord.Client() client = commands.Bot(command_prefix = "!") url = "" parser = Parser(url) @client.event async def on_ready(): print("Feedy is online!") @client.event async def on_message(message): if(message.content.lower() == "!help"): response = "I don't do much at the time. but here's what i got \n -> !top5 : Get latest latest 5 gaming news" await client.send_message(message.channel, response) elif(message.content == "!top5"): await client.send_message(message.channel, "Getting top 5 news. Please Wait...") parser.update() top5 = parser.top5() for x in top5: response = x["title"] + "\n\n" response += x["description"][0:200] + "... \n" response += x["media"]
import sys from Lexer.Lexer import Lexer from Parser.Parser import Parser from Interpreter.Interpreter import Interpreter from Lexer.LexerHash import LexerHash from Lexer.LexerQueue import LexerQueue Lexer.run(sys.argv) Parser.run() Interpreter.run() for arg in sys.argv: if arg == "-v": print("Tabela de variáveis") LexerHash.shared().verbose() print("\nLista de instruções") LexerQueue.shared().verbose()
import sys if __name__ == "__main__": path = sys.argv[1] try: fonte = open(path, 'r') programa = ''.join(fonte.readlines()) fonte.close() except Exception: print("Código Fonte não encontrado") sys.exit(1) lexer = Scanner(programa) tabtokens = lexer.scan() # for i in tabtokens: # print(i) parser = Parser(tabtokens, True) #False = Boolean pode receber int - diferente de 0 é true| e vice e versa #True = Boolean só recebe boolean | e vice e versa try: parser.start() except Exception as e: print(e) # for i in parser.tabSimbolos: # print(i) #print('-------------------------------') # for i in parser.tabTresEnderecos: # print(i) parser.gerarArqCod()
import sys, os sys.path.append(os.path.join(sys.path[0], "../interpreted-interpreter")) from Parser.Parser import Parser from Tree.TreeBuilder import TreeBuilder from Tree.Environment import Environment from Tree.BuiltInNode import BuiltInNode from Tree.IdentNode import IdentNode parser = Parser() builder = TreeBuilder() built_in_env = Environment() built_ins = [ "newline", "write", "car", "cdr", "null?", "pair?", "procedure?", "symbol?", "number?", "display", "b-", "b+", "b*", "b/", "b=", "b<", "eq?", "cons", "apply", "eval", "set-car!", "set-cdr!", "builtin-env", "global-env" ] for built_in in built_ins: built_in_env.define(IdentNode(built_in), BuiltInNode(built_in)) global_env = Environment(built_in_env) # define a few methods based on builtins parser.feed( """ (define (b>= x y) (if (b< x y) #f #t)) (define (b> x y)
def dumpAST(source: Source): parser = Parser(source) program = parser.parse_program() ast_dumper = AstDumper() ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
import sys from Parser.Parser import Parser if __name__ == "__main__": path = sys.argv[1] try: fonte = open(path, 'r') programa = ''.join(fonte.readlines()) fonte.close() except Exception: print("Código Fonte não encontrado") sys.exit(1) lexer = Scanner(programa) tabTokens = lexer.scan() ''' for i in tabTokens: print(i) ''' parser = Parser(tabTokens) try: parser.start() except Exception as e: print(e) else: print("Executado como um módulo")
def test_fix_dump(self): s = 'a = 0;' parser = Parser(StringSource(s)) program = parser.parse_program() ast_dumper = AstDumper() ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))