class LexFileTest(LexerTest): def setUp(self): self.Lexer = Lexer() self.Lexer.lex_file('test/Lexer_test.plw', error_path='test') def test_add_token(self): self.Lexer.add_token(position=(1, 0, 'test'), type='id', value='test') self.assertEqual( self.Lexer.tokens[-1], Token(position=(1, 0, 'test'), type='id', value='test'))
class LexerTest(unittest.TestCase): def setUp(self): self.Lexer = Lexer() self.Lexer.lex(RAW_TEXT, path='test') def tearDown(self): del self.Lexer def test_lex(self): generated_tokens = tuple(self.Lexer.tokens) self.assertTupleEqual(generated_tokens, TOKENS)
def makeInterpreter(self, text): from src.Lexer import Lexer from src.Parser import Parser from src.Interpreter import Interpreter lexer = Lexer(text) parser = Parser(lexer) interpreter = Interpreter(parser) return interpreter
def makeInterpreter(self, text): from src.Lexer import Lexer from src.Parser import Parser from src.Interpreter import Interpreter from src.SymbolTable import SymbolTableBuilder lexer = Lexer(text) parser = Parser(lexer) tree = parser.parse() symtab_builder = SymbolTableBuilder() symtab_builder.visit(tree) interpreter = Interpreter(tree) return interpreter
def get_tokens_from_raw(raw: str) -> List[Token]: ''' Get a list of tokens from raw text Parameters ---------- raw : str raw text Returns ------- list[Token] A list of all tokens found in `raw` Raises ------ PyllowException if something like a syntax error is present in `raw` ''' lexer = Lexer() return lexer.lex(raw)
def main(): argparser = argparse.ArgumentParser( description='Generate an AST DOT file.') argparser.add_argument('fname', help='Pascal source file') args = argparser.parse_args() fname = args.fname text = open(fname, 'r').read() lexer = Lexer(text) parser = Parser(lexer) viz = ASTVisualizer(parser) content = viz.gendot() print(content)
class Preprocessor: def __init__(self, path, target): self.path = path self.target = target self.lexer = Lexer() self.parser = Parser() self.builder = Builder() def compile(self, minify): print(f'Compiling {self.path}') with open(self.path, 'r') as file: raw = file.read() self.lexer.tokenize(raw) self.parser.parse(self.lexer.tokens) html = self.builder.build(self.parser.dom) if minify: html = self.builder.minify(html) with open(self.target, 'w', newline='\n') as file: file.write(html) def watch(self, minify): print(f'Watching {self.path}') path = self.getSanitizedPath() eventHandler = EventHandler(path.fn, self.compile, [minify]) observer = Observer() observer.schedule(eventHandler, path.dir, recursive=False) observer.start() try: while True: sleep(.1) except KeyboardInterrupt: observer.stop() observer.join() def getSanitizedPath(self): pathChunks = self.path.replace('\\', '/').split('/') return Namespace(dir='/'.join(pathChunks[:-1]), fn=pathChunks[-1])
def main(fname): # argparser = argparse.ArgumentParser( # description='Generate an AST DOT file.' # ) # argparser.add_argument( # 'fname', # help='Pascal source file' # ) # args = argparser.parse_args() # fname = args.fname text = open(fname, 'r').read() lexer = Lexer(text) parser = Parser(lexer) viz = ASTVisualizer(parser) content = viz.gendot() print(content) w = open('ast.dot', 'w') w.write(content)
def setUp(self): self.Lexer = Lexer() self.Lexer.lex_file('test/Lexer_test.plw', error_path='test')
def setUp(self): self.Lexer = Lexer() self.Lexer.lex(RAW_TEXT, path='test')
self.scope.insert(var_symbol) def visit_Assign(self, node): # right-hand side self.visit(node.right) # left-hand side self.visit(node.left) def visit_Var(self, node): var_name = node.value var_symbol = self.scope.lookup(var_name) if var_symbol is None: raise Exception("Error: Symbol(identifier) not found '%s'" % var_name) if __name__ == '__main__': text = """ program Main; var x, y : integer; begin x := x + y; end. """ lexer = Lexer(text) parser = Parser(lexer) tree = parser.parse() semantic_analyzer = SemanticAnalyzer() semantic_analyzer.visit(tree) print(semantic_analyzer.scope)
def __init__(self, path, target): self.path = path self.target = target self.lexer = Lexer() self.parser = Parser() self.builder = Builder()
def makeLexer(self, text): from src.Lexer import Lexer lexer = Lexer(text) return lexer
from src.Lexer import Lexer from src.Parser import Parser lexer = Lexer() parser = Parser() def interpret(query): output, symbol_table = lexer.analyze(query) parser.parse(output)
def set_stream(self, raw): lexer = Lexer() lexed = lexer.lex(raw) self.tree._stream = TokenStream(lexed) self.tree._stream.next()
from pprint import pprint from src.AST import AST from src.Lexer import Lexer lexer = Lexer() lexer.lex_file('test/binaryexpr.plw') pprint(lexer.tokens) tree = AST(lexer.tokens) if not tree.parse(): # exit() pass print('-' * 100) #pprint(tree.tree.pprint_list(), indent=4) print('-' * 100) tree.execute() pprint(tree.tree._scope) print('-' * 100) pass