def main(): from lexer import lex from tree_to_dot import tree_to_dot, view json_example = open('json_example.json').read() print json_example tokens = lex(json_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_example.gv', 'w').write(dot) view(dot) # # --- MODIFY HERE TO ADD MORE TEST CASES --- # # TODO: delete this? json_example = open('json_array_example.json').read() print json_example tokens = lex(json_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_array_example.gv', 'w').write(dot) view(dot)
def test_atom_definiton(): atom_test_1 = lexer.lex('big_kahuna_burger') assert atom_test_1[0][1] is 'ATOM' atom_test_2 = lexer.lex("\'forest gump\'") assert atom_test_2[0][1] is 'ATOM' atom_test_3 = lexer.lex('@==>') assert atom_test_3[0][1] is 'ATOM'
def main(): from lexer import lex from tree_to_dot import tree_to_dot, view json_example = open('json_example.json').read() print(json_example) tokens = lex(json_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_example.gv', 'w').write(dot) view(dot) json_array_example = open('json_array_example.json').read() print(json_array_example) tokens = lex(json_array_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_array_example.gv', 'w').write(dot) view(dot) json_bad_example = open('json_bad_example.json').read() print(json_bad_example) tokens = lex(json_bad_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_bad_example.gv', 'w').write(dot) view(dot)
def parseTopLevel(mod, source, forJIT=False): # Classify the block try: blockHead = lexer.lex(source.peek(), False) except Exception: # Clear the bad line from the buffer source.getLine() raise if blockHead[0].name == 'def': # Determine function name and return type blockHead = lexer.lex(source.getLine(), mod.debugLexer) dtype = dtypes.getType(blockHead[1].data) funcName = blockHead[2].data[0] args = [[dtypes.getType(i.data), j.data] for i, j in ast.splitArguments(blockHead[2].data[1])] if funcName in mod.userFunctions.keys(): raise ValueError( "ERROR: Function {} is already defined.".format(funcName)) # Handle function arguments mod.body += [ "define {} @{}({})".format( dtype.irname, funcName, ",".join( [i.irname + " %arg_" + j for i, j in args])) + "{" ] mod.body += ["entry:"] for argType, argName in args: mem = mod.newVariable(argName, argType) mod.body += [ "store {} {}, {}* {}".format(argType.irname, "%arg_" + argName, mem.irname, mem.addr) ] # Read in the function body output = None if source.end(1): raise ValueError( "ERROR: Expected a block (maybe you forgot to indent?)") while not source.end(1): result = parseBlock(mod, source, 1) if result is not None: output = result # Check that the return type is correct and end the function definition if output is None or (dtype.name != output.name): raise ValueError( "ERROR: Return type {} does not match declaration {}.".format( output.name, dtype.name)) mod.userFunctions[funcName] = (dtype, args) mod.alreadyDeclared.append(funcName) mod.endScope() mod.body += ["ret {} {}".format(output.irname, output.addr)] mod.body += ["}"] else: # Top-level statement mod.isGlobal = forJIT mod.out = mod.main mod.lastOutput = parseBlock(mod, source, 0, forJIT) mod.out = mod.body mod.isGlobal = False return
def run(): if len(argv) > 1: ps.parse(lx.lex(FR.open_file(argv[1]))) #evaluate() else: while True: data = input("A&A >") ps.parse(lx.lex(data))
def test_calc(): tokens = list(lex("80*8+1")) parser = EarleyParser(tokens) res = parser.parse() assert res.evaluate() == 641 tokens = list(lex("(1+2.4)*1e-2/(8-9)")) parser = EarleyParser(tokens) res = parser.parse() assert res.evaluate() == -0.034
def test_lex(): with pytest.raises(ParseError): list(lex("1+2f")) with pytest.raises(ParseError): list(lex("hello!")) with pytest.raises(ValueError): list(lex("1.1.1")) with pytest.raises(ValueError): list(lex("1e-1.2")) assert len(list(lex("(1+2.4)*1e-2/(8-9)"))) == 14
def check_all_brackets(behav_dict, spell_dict): """Check proper braces in all behaviours and spells.""" for key in behav_dict: tokens = lex(behav_dict[key]) if not check_brackets(tokens): sys.exit("Bad bracketting in behav " + key) for key in spell_dict: tokens = lex(spell_dict[key]['spell']) if not check_brackets(tokens): sys.exit("Bad bracketting in spell " + key)
def test_fail_lex_object_key(self): tests = [ "\\", "\"", "\\u1", "\\x", "\\uGGGG" ''' "this should not work": ''' ] for test in tests: with self.assertRaises(TokenError): lex(test)
def analyzeFile(fn): tokenClass = { 'class': 'keyword', 'primType': 'keyword', 'propType': 'keyword', 'methodCategory': 'keyword', 'if': 'keyword', 'do': 'keyword', 'while': 'keyword', 'let': 'keyword', 'return': 'keyword', 'else': 'keyword', 'void': 'keyword', 'var': 'keyword', 'primVal': 'keyword', 'identifier': 'identifier', 'integerConstant': 'integerConstant', 'stringConstant': 'stringConstant', 'lb': 'symbol', 'rb': 'symbol', 'binOp': 'symbol', 'equals': 'symbol', 'minus': 'symbol', 'not': 'symbol', 'lp': 'symbol', 'rp': 'symbol', 'c': 'symbol', 'dot': 'symbol', 'sc': 'symbol', 'lbr': 'symbol', 'rbr': 'symbol', } print(f"<!-- Analyzing {fn} #-->") # Lexer part with open(fn) as f: tokens = lex(f.read(), debug) tok, s = next(tokens) print('<tokens>') while tok is not TokenType.eof: t = tokenClass[tok.name[4:]] if t == 'symbol': s = escape(s) print(f'<{t}> {s} </{t}>') tok, s = next(tokens) print('</tokens>') # Parser part with open(fn) as f: tokens = lex(f.read(), debug) parser = Parser(debug) expr = parser.parse(tokens) res = "\n".join(filter(lambda x: len(x) > 0, str(expr).split("\n"))) print(res)
def test_all_data(self): expect_fail = ["test7.oreo"] for filename in os.listdir(get_data_dir()): path = os.path.join(get_data_dir(), filename) if os.path.isfile(path): with open(path, "r") as f: s = f.read() if filename in expect_fail: self.assertRaises(ParseError, lexer.lex, s) else: lexer.lex(s) # just check no exceptions
def test_complex_term_definiton(): complex_term_test_1 = lexer.lex('playsAirGuitar(bob)') assert complex_term_test_1[0][0][1] is 'FUNCTOR' assert complex_term_test_1[0][1][1] is 'ATOM' complex_term_test_2 = lexer.lex('hide(X,father(father(father(butch))))') assert complex_term_test_2[0][0][1] is 'FUNCTOR' # 'hide', functor assert complex_term_test_2[0][1][1] is 'VARIABLE' # 'X', variable assert complex_term_test_2[0][2][1] == [('father', 'FUNCTOR'), [('father', 'FUNCTOR'), ('butch', 'ATOM')]] complex_term_test_3 = lexer.lex('test(X)') assert complex_term_test_3[0][0][1] is 'FUNCTOR' assert complex_term_test_3[0][1][1] is 'VARIABLE'
def test_parse(): tokens = list(lex("(1+2.4)*1e-2/(8-9)")) parser = EarleyParser(tokens) assert parser.parse() is not None tokens = list(lex("(1+2.4*1e-2/(8-9)")) parser = EarleyParser(tokens) assert parser.parse() is None tokens = list(lex("(1+2.4)1e-2/(8-9)")) parser = EarleyParser(tokens) assert parser.parse() is None
def onOpen(self, event): """ Runs when you try to open a file:\n * Lexes and parses the document and loads it into webview. """ fd = wx.FileDialog(self, "Open...", wildcard="AlmostMarkdown files (*.amd)|*.amd", style=wx.FD_OPEN) if fd.ShowModal() == wx.ID_CANCEL: return pathname = fd.GetPath() try: f = open(pathname, 'r') f.close() self.currentAMD = pathname self.edit.LoadFile(pathname) self.onKeyUp(wx.KeyEvent(wx.wxEVT_NULL)) tokens = lexer.lex(self.edit.GetValue()) self.generateHtml(tokens, self.html, self.exeDir) self.wv.LoadURL(f"file://{self.html}") except IOError: wx.LogError("Cannot open the specified file '%s'." % pathname)
def test_block_sequential_literals(self): sequential_types = (List, Vector) item_specs = ((int, IntegerAtom, (1, 2, 3)), (lambda s: s.strip('"'), StringAtom, ('"Garnet"', '"Amethyst"', '"Pearl"'))) for sequential_type in sequential_types: for item_purifier, item_class, item_sequence in item_specs: with self.subTest(sequential_type=sequential_type, item_sequence=item_sequence): source = """ {}…{}— {} {} {} """.format(sequential_type.open_delimiter_character, sequential_type.close_delimiter_character, *item_sequence) parsed = list(parse(lex(source))) annotated = list(annotate(parsed)) try: sequential, = annotated except: from pudb import set_trace as debug; debug() self.assertEqual(sequential_type, sequential.__class__) self.assertEqual( [item_class(item_purifier(i)) for i in item_sequence], sequential.elements )
def test_mixed(self): s = ''' { "name": "Brent Pappas", "age": 22, "interests": ["juggling", "programming", "reading"] } ''' self.assertEqual(lex(s), [ Token(Tag.LEFT_BRACE), ObjectKey("name"), Literal("Brent Pappas"), Token(Tag.COMMA), ObjectKey("age"), Number(22), Token(Tag.COMMA), ObjectKey("interests"), Token(Tag.LEFT_BRACKET), Literal("juggling"), Token(Tag.COMMA), Literal("programming"), Token(Tag.COMMA), Literal("reading"), Token(Tag.RIGHT_BRACKET), Token(Tag.RIGHT_BRACE) ])
def test_block_sequential_literals(self): sequential_types = (List, Vector) item_specs = ((int, IntegerAtom, (1, 2, 3)), (lambda s: s.strip('"'), StringAtom, ('"Garnet"', '"Amethyst"', '"Pearl"'))) for sequential_type in sequential_types: for item_purifier, item_class, item_sequence in item_specs: with self.subTest(sequential_type=sequential_type, item_sequence=item_sequence): source = """ {}…{}— {} {} {} """.format(sequential_type.open_delimiter_character, sequential_type.close_delimiter_character, *item_sequence) parsed = list(parse(lex(source))) annotated = list(annotate(parsed)) try: sequential, = annotated except: from pudb import set_trace as debug debug() self.assertEqual(sequential_type, sequential.__class__) self.assertEqual( [item_class(item_purifier(i)) for i in item_sequence], sequential.elements)
def main(): argparser = argparse.ArgumentParser( description='The cinch language compiler') argparser.add_argument('file', help='The file to be compiled or interpreted') argparser.add_argument('-c', help='Produce an object file', action='store_true') argparser.add_argument('-S', help='Produce an assembly file', action='store_true') argparser.add_argument('-i', '--interpret', help='Interpret the file. Do not compile it', action='store_true') argparser.add_argument('-o', help='The file which should be output to') argparser.add_argument('-m', help='The machine to target. May be x86', choices=['x86'], default='x86') args = argparser.parse_args() with open(args.file, 'r') as f: source = f.read() tokens = lexer.lex(source) ast = parser.parse(tokens) # noqa - this is a stub if args.i: # from cinch import interpreter # exit_code = interpreter.interpret(ast) # sys.exit(exit_code) pass # assembly = compiler.compile(ast, args.m) if args.S: # write out assembly pass elif args.c: # write out object file pass else: # actually link the thing pass
def assembleFile(f_in_name, f_out_name): """ Okay, so this is the entry point for the assembler. This calls the lexer, which calls the parser, which calls the assembler, which calls the s19 generator. The flow will look like this f_in_name >> lexer >> parser >> assembler >> s19_gen >> f_out_name The lexer will take in the in file, and return a token stream The parser will take in the token stream and return an ast The assembler will take in an ast and return a list of binary data, with it's memory location The s19 gen will take that list, and return the list of records. Then this function, now that it has this list of records, will write them to the file. """ print(f_in_name + " >> " + f_out_name) toks = lex(f_in_name) # print("\n".join(str(x) for x in toks)) # print("\n\n") ast = parse(toks) # print("\n".join([str(x) for x in ast])) outFile = link([assemble(ast)]) with open(f_out_name, "w+") as f: f.write(outFile) f.write("\n")
def compile(source): vprint("Lexing ... \n") tokens = lexer.lex(source) show(lexemes, "Token List : \n", tokens) vprint("Parsing ... \n") tree = parser.parse(tokens) showTree(parseTree, "Parse Tree : \n", tree) vprint("Unsugaring ... \n") tree = unsugarer.unsugar(tree) showTree(unsugTree, "Unsugared Tree : \n", tree) tree = simplifier.simplify(tree) if Optimize: vprint("Optimizing ... " + str(optimizer.level) + "\n") tree = optimizer.optimize(tree) showTree(optimTree, "Optimized Tree : \n", tree) if flattened: vprint("Flattening ... \n") show(flattened, "Flattened Tree : \n", flattener.flatten(tree)) if Generate: vprint("Translating ... \n") translator.addfunc(tree) show(translation, "Generated Assembly ... \n", translator.getresult()) return translator.getresult()
def dir_stats(d): path = "datasets/" + d fs = os.listdir(path) lens = [] vocab = [] for f in fs: if f[-4:] in file_endings: continue f_str = fopen(path + "/" + f) ts = lex(f_str) lens.append(len(ts)) vocab = vocab + ts lens = np.array(lens) vocab = list(set(vocab)) print("lens:") print(lens) print("stats for dir '" + d + "'") print(" vocab len : " + str(len(vocab))) print(" prg len-tokens : " + str(lens.shape[0])) print(" prg max-len-tokens : " + str(lens.max())) print(" prg min-len-tokens : " + str(lens.min())) print(" prg avg-len-tokens : " + str(round(lens.mean()))) print(" prg variance-len-tokens : " + str(round(np.std(lens)))) print(" prg len-tokens<=5000 : " + str(np.count_nonzero(lens <= 5000)))
def run(self, env): for i in range(len(self.args[1::2])): # Initialize all our variables env.vars[self.args[1::2][i]] = (self.args[i * 2], '0') env.funcs[self.name] = self.args # Put our function in the dictionary env.write('jmp e%s' % self.name) # Jump over our function until it is called env.write('%s:' % self.name) # Add our label env.indent += 1 env.pos += 1 level = 1 # The bracket level while level != 0: if parser.end(env.lines[env.pos] ): # A statement ended, decrement the bracket level level -= 1 else: start = '{' in env.lines[ env.pos] # Check if a statement started stream = lexer.lex(env.lines[env.pos]) # Lex the current line func, stream = parser.parse(stream) # Parse it stream = [token[0] for token in stream] # Remove the tags func = mapping[func](stream) func.run(env) # Run the function, this adds stuff to the file if '{' in env.lines[ env.pos]: # A statement began, increment the bracket level level += 1 if level == 0: env.pos += 1 break env.write('ret') # Return env.indent -= 1 env.write('e%s:' % self.name) # Label used to jump over our function env.indent += 1 env.write('nop') # Do nothing env.indent -= 1
def test_dictionary_literal_annotated_with_definition(self): source = """ := dee {"rah" 1; "hey" 2;}""" annotated = list(annotate(parse(lex(source)))) def_dee, = annotated dictionary_literal_node = def_dee.identified self.assertEqual(IdentifierAtom("dee"), dictionary_literal_node.identifier)
def main(): parse(process(lex("pi = 3.14"))[0]) for i, a in enumerate(argv[1:]): try: parse(process(lex(a))[0]) except: k = "" if str(i)[-1] == "1": k = "st" elif str(i)[-1] == "2": k = "nd" elif str(i)[-1] == "3": k = "rd" else: k = "th" print("%s%s argument is invalid!" % (i, k)) cli()
def test_lex_bools(self): tests = [ ("false", [Boolean(False)]), ("true", [Boolean(True)]), ("true false", [Boolean(True), Boolean(False)]), ] for test, expected in tests: self.assertEqual(lex(test), expected)
def main(): source = input() token = lex(source) print(list(map(str, token))) term = Parser(token).parse() print(term) ans = eval_term(term) print('ans: ' + str(ans))
def test_lex_brackets(self): def l(): return Token(Tag.LEFT_BRACKET) def r(): return Token(Tag.RIGHT_BRACKET) s = "[][][[]]" self.assertEqual(lex(s), [l(), r(), l(), r(), l(), l(), r(), r()])
def get_ast(filename, opts, debug=False): """ Returns the AST for a file filename - filename to parse opts - user options -- returns: ast """ if opts.type == 'C': from c.ixcode import (lang_lex_dict, lang_parse_dict) lex = lexer.lex(filename, lang_lex_dict, debug) return parser.parse(lex, filename, lang_parse_dict, debug) elif opts.type == 'Python': from python.ixcode import (lang_lex_dict, lang_parse_dict) lex = lexer.lex(filename, python.lang_lex_dict, debug) return parser.parse(lex, filename, python.lang_parse_dict, debug) arg_err("Unable to parse %s language files." % opts.type)
def test_lex_braces(self): def l(): return Token(Tag.LEFT_BRACE) def r(): return Token(Tag.RIGHT_BRACE) s = "{}{}{{}}" self.assertEqual(lex(s), [l(), r(), l(), r(), l(), l(), r(), r()])
def compile_code(source): """Compile the provided source code into assembly. source - The C source code to compile. return - The asm output """ tokens = lex(source) for token in tokens: print(token) return tokens
def process_file(filename): try: filetext = open(filename, 'r').read() except: err.invalid_file(filename) #errors.py nodeList = parse(lex(filetext)) #lexer.py, parser.py return nodeList
def test_lex_negatives(self): s = "-123 -45.6 -3e-4" # this may just be a quirk of the way this works, but # the input s = "-123-45.6-3e-4" works too. hmm... # ah should be fine since this is just lexing # In fact, I think this is actually how this should work self.assertEqual( lex(s), [Number(-123), Number(-45.6), Number(-0.0003)])
def test_lex_sci(self): s = "1e2 2.1e3 3e-4 1E+2 2.1E+3 3E-4" self.assertEqual(lex(s), [ Number(100), Number(2100.0), Number(0.0003), Number(100), Number(2100.0), Number(0.0003) ])
def test_parse_object(self): for test, expected in [ ('{}', {}), ('{"name": "brent"}', { "name": "brent" }), ('{"name": "brent","age":22}', { "name": "brent", "age": 22 }), ('{"name": "brent","age":22,"interests":["juggling","programming","reading"]}', { "name": "brent", "age": 22, "interests": ["juggling", "programming", "reading"] }), ('''{ "name": "brent", "age":22, "interests":[ "juggling","programming","reading" ], "key1":{"key2":"value"}} ''', { "name": "brent", "age": 22, "interests": ["juggling", "programming", "reading"], "key1": { "key2": "value" } }), (''' { "test" : {} }''', { "test": {} }), (''' { "first name":"Arthur", "last name": "Dent", "age":42, "interests": [ "flying", "sandwich making" ] } ''', { "first name": "Arthur", "last name": "Dent", "age": 42, "interests": ["flying", "sandwich making"] }) ]: self.assertEqual(parse(lex(test)), expected)
def list_pattern_func(x): if isinstance(x,lit) and x.val == "()": # shouldn't happen.. return lex("nil") elif isinstance(x,seq) and len(x)>=2 and \ isinstance(x[0], lit) and isinstance(x[-1], lit) and \ x[0].val == "(" and x[-1].val == ")": if not all(isinstance(i,lit) and i.val == "," for i in x[2:-1:2]): return Fail items = x[1:-1:2] items = items[::-1] ret = lex("nil") for i in items: if isinstance(i, expr): ret = seq(lex("cons(")+seq([i])+lex(",")+seq([ret])+lex(")")) else: return Fail return ret else: return Fail
def main(argc, argv): filename = 'test.txt' f_content = read_all(filename) tokens = lex(f_content) tf, qf, rules = parse(tokens) print("tf", tf, "\n") print("qf", qf, "\n") for r in rules: print(json.dumps(r, indent=2)) pass
def execute(script): script = lexer.lex(script) data = [st.Stack()] i = 0 cont = True while cont == True and i < len(script): com = script[i] if com == '': pass
def run(path): data = open_file(path) toks = lex(data) print("\n####### Console #######\n ") file = open("out.txt", "w+") file.write("\n####### Console #######\n ") file.close() parse(toks, labels) file = open("out.txt", "a") file.write(str(register.flag)) file.close()
def main(): from lexer import lex from tree_to_dot import tree_to_dot, view json_example = open('json_example.json').read() print json_example tokens = lex(json_example) parser = JsonParser(tokens) parse_tree = parser.parse() #print parse_tree dot = tree_to_dot(parse_tree) open('json_example.gv', 'w').write(dot) view(dot) #test cases: ## json_example = open('json_bad_example.json').read() ## print json_example ## tokens = lex(json_example) ## parser = JsonParser(tokens) ## parse_tree = parser.parse() ## ## json_example = open('json_example_empty.json').read() ## print json_example ## tokens = lex(json_example) ## parser = JsonParser(tokens) ## parse_tree = parser.parse() ## dot = tree_to_dot(parse_tree) ## open('json_example_empty.gv', 'w').write(dot) ## view(dot) json_example = open('json_array_example.json').read() print json_example tokens = lex(json_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_array_example.gv', 'w').write(dot) view(dot)
def execute(text, print_result, ctx): tokens = l.lex(text) parser = p.Parser(tokens) program = parser.parse_program() if len(parser.errors) > 0: parser.print_errors() else: result = e.evaluate(program, ctx) if (print_result and type(result) != o.Null) or type(result) == o.Error: print(result)
def test_definition_sets_subsequent_global_environment(self): source = """ := a [1 2 3] for |i a|— (println a) """ annotated = list(annotate(parse(lex(source)))) def_a, for_i_in_a = annotated self.assertIsNone(def_a.global_environment.get('a')) self.assertEqual( List([IntegerAtom(1), IntegerAtom(2), IntegerAtom(3)]), for_i_in_a.global_environment['a'] )
def main(): from lexer import lex from tree_to_dot import tree_to_dot, view json_example = open('json_example.json').read() print json_example tokens = lex(json_example) parser = JsonParser(tokens) parse_tree = parser.parse() dot = tree_to_dot(parse_tree) open('json_example.gv', 'w').write(dot) view(dot) print "\n*************************************" print "Bad Example:\n" json_bad_example = open('json_bad_example.json').read() print json_bad_example tokens = lex(json_bad_example) parser = JsonParser(tokens) try: parser.parse() except SyntaxError, e: print e
def check(stream, filename): """Checks if a stream is lexically and syntactically valid Joos1W code. Args:n stream: The stream object to check Returns: 0 if the stream is lexically and syntactically valid Joos1W code, 42 if the stream is not lexically and syntactically valid Joos1W code.""" try: parse(lexer.lex(stream.read()), filename) return 0 except JoosSyntaxException, e: if not Testing.testing: print e.msg return 42
def main(argc, argv): if argc != 3: print("Usage: %s <File Path> [Key]" % argv[0]) return #End if sourceFile = open(argv[1], "r") source = sourceFile.read() symbols = lexer.lex(source) statements = parser.parse(symbols) key = hashlib.md5(argv[2]).digest mangledStatements = mangler.mangle_hard(statements, key)
def parse(self, fp, bar): """create threads to tag chatlog""" chat_que = lexer.lex(fp) ret = [None] * len(chat_que) bar.full = len(chat_que) bar.start() pool = [Thread(target=self.tagging, args=(ret, chat_que, bar)) for _ in range(self.thread_cnt)] for t in pool: t.daemon = True t.start() for t in pool: t.join() bar.done = True return ret
def cli(name="left",version="0.1",prompt="| %s v%s |> "): try: while True: code = input(warn + prompt % (name, version) + endc) if len(code) == 0: continue out = parse(process(lex(code))[0][0]) if out: print(bold + green + str(toStr(out)) + endc) except (KeyboardInterrupt, EOFError): print(warn + "\n\nProcess finished!\n" + endc) except IOError: raise except Exception as e: getError()
def str_pattern_func(x, exprs = None): if isinstance(x,lit) and x.val == '""': return lex("str(nil)") elif isinstance(x,seq) and len(x) == 1 and isinstance(x[0], lit) and x[0].val[0] == '"' and x[0].val[-1] == '"': try: lit_val = literal_eval(x[0].val) except ValueError: return Fail items = lit_val[::-1] ret = lex("nil") for i in items: ret = seq( lex("cons(") + seq([seq(lex("char(")+(lark_int(ord(i)),)+lex(")"))])+lex(",")+seq([ret])+lex(")")) return seq(lex("str(")+seq([ret])+lex(")")) else: return Fail
def general_parse(code, safe_mode): # Parsing args_list = [] tokens = lex(code) while tokens != []: to_print = add_print(tokens) parsed, tokens = parse(tokens, safe_mode) if to_print: parsed = 'imp_print(' + parsed + ')' # Finish semicolon parsing if tokens and tokens[0] == ';': tokens = tokens[1:] args_list.append(parsed) # Build the output string. args_list = add_preps(preps_used, safe_mode) + args_list py_code = '\n'.join(args_list) return py_code
def run(script, env={'__loops__':[], '__functions__':{}}, lex=True): script = lexer.lex(script) i = 0 c = None ignore = ['return', 'debug', 'endfunc'] while c not in ignore: o = (None, env) v = script[i].varname c = script[i].com args = [] args = evalargs(script[i].args, env) print(script[i].args) if '_'+c in dir(evaluator): o = getattr(evaluator, '_'+c)(env, *args)
def lexandparse(string): statement_stream = lexer.lex(string) for statement in statement_stream: statement = lexer.assignNameSpecs(statement, state.names) if statement != None: ptree = parser.parse(statement, grammar) if ptree != None: ptree = parser.massage(ptree, grammar, lexer.punctuation) output = state.evaluate(ptree) if output != None: if type(output) == type([]): for i in output: print(i) else: print(output)
def build_envs(files): """Lexes/Parses/does checking for all files in files.""" global find_all_in_package_cache find_all_in_package_cache = {} trees = [ElementTree.ElementTree(file="Array.xml").getroot()] files = ["$Array.java"] + files trees[0].filename = "$Array.java" for f in files[1:]: if f in cached_trees: trees += [cached_trees[f]] else: CurrentFile.name = f tree = parse(lex(open(f).read()), f) cached_trees[f] = tree trees += [tree] name_to_class = {} for tree in trees: CurrentFile.name = tree.filename clazz = find_type_decl(tree) name = clazz.get("canonical_name") error_if(name in name_to_class, "Duplicate class defined.") if name.find(".") != -1: name_to_class[name] = clazz for x in range(0, len(trees)): CurrentFile.name = trees[x].filename if trees[x] not in cached_environments: build_environments(trees[x], trees, name_to_class) cached_environments[files[x]] = trees[x].env for tree in trees: CurrentFile.name = tree.filename clazz = find_type_decl(tree) clazz.env.add_superclass_methods() clazz.env.add_superclass_fields() check_types(tree) check_hierarchy(tree) return trees
def test_named_function_definition(self): source = """ :=λ first_plus_square_of_second |a ^int b ^int| → ^int (+ a (⋅ b b)) := we_assert "a and b were in the fn body's env., but not here" """ defn_first_plus, def_we_assert = annotate(parse(lex(source))) # The local environment has our arguments, self.assertEqual( Argument(IdentifierAtom('a'), TypeSpecifierAtom("^int")), defn_first_plus.expressions[0].local_environment['a'] ) # and they don't leak. self.assertIsNone(def_we_assert.local_environment.get('a')) # And we can see the defined function from the environment of # the subsequent expression. self.assertEqual( type(def_we_assert.environment['first_plus_square_of_second']), NamedFunctionDefinition )
def main(): parser = argparse.ArgumentParser(description="The interpreter for pattern-based-language") parser.add_argument("-f", "--file", action="store", dest="file", type=str, help="the file to execute") parser.add_argument("-p", "--parse", action="store_true", default=False, help="just parse the file - don't execute it") parser.add_argument("-t", "--tree", action="store_true", default=False, help="print the parse tree") parser.add_argument("-i", "--interactive", action="store_true", default=False, help="enter interactive mode after the file has been run") parser.add_argument("-v", "--version", action="version", version="pattern-based-language v1.0 -- Copyright Zac Garby © 2017") args = parser.parse_args() if args.file == None: repl(c.Context()) else: try: text = open(args.file).read() if args.parse or args.tree: tokens = l.lex(text) parse = p.Parser(tokens) program = parse.parse_program() if len(parse.errors) > 0: parse.print_errors() elif args.tree: print(program) return ctx = c.Context() execute(text, False, ctx) if args.interactive: repl(ctx) except FileNotFoundError: print("File not found: %s" % args.file) return
def imp_lex(characters): return lexer.lex(characters, token_exprs)
from parser import parse __author__ = 'ay27' from table import init_token_table if __name__ == '__main__': try: g.pas_src = open(g.src_file_name) except OSError: err('can not open file %s' % g.src_file_name) exit(-1) if g.pas_src is None: err('can not open file %s' % g.src_file_name) exit(-1) init_token_table() debug('token table init finish') try: lex(g.pas_src) except IOError: err('something error') debug('lex finish') g.dyd = open('test.dyd') try: parse(g.dyd) except StopIteration: print('finish')
def pyth_eval(a): if not isinstance(a, str): raise BadTypeCombinationError(".v", a) return eval(parse(lex(a, safe_mode))[0], environment)
self.msg = msg self.callback = callback def func_send_socket(matched_dict, exprs): sock = matched_dict[self.socket].socket msg = matched_dict[self.msg] print("sending", str_val(msg)) #sendall??? sock.sendall(str.encode(str_val(msg))) # converts to bytes in Python 3 return matched_dict[self.callback].lazy_exe(exprs) self.func = func_send_socket def __repr__(self): return self.socket.__repr__() + ".send " + self.msg.__repr__() + \ " " + self.callback.__repr__() lark_true = lex("True") lark_false = lex("False") import select class has_data_socket_func(builtin_func): def __init__(self, socket): self.socket = socket def func_has_data_socket(matched_dict, exprs): sock = matched_dict[self.socket].socket rs, ws, es = select.select([sock], [sock], [sock]) if len(rs) == 1: return lark_true else: return lark_false self.func = func_has_data_socket
parser.add_option('-I', '--print-lexical', dest='p_lexical', action='store_true', default=False, help='print the outcome of the lexical analysis') parser.add_option('-s', '--print-syntax', dest='p_syntax', action='store_true', default=False, help='print the outcome of the syntax analysis') return parser if __name__ == '__main__': pars = generate_parser() options, args = pars.parse_args() #open the file f = open(args[0]) char_stream = unicode(f.read(), 'utf8') tokens = list(lexer.lex(char_stream)) if options.p_lexical: for token in tokens: print token ast = parser.parse(TokenStream(tokens)) if options.p_syntax: print ast