def testScanVariable(self): lexer = Lexer() lexer.input("def x = y") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID = ID')
def testScanBooleanTrue(self): lexer = Lexer() lexer.input("def boolean = true") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID = BOOL')
def testScanLambda(self): lexer = Lexer() lexer.input("def f(x) = lambda(y) x") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID ( ID ) = LAMBDA ( ID ) ID')
def testScanNegativeInteger(self): lexer = Lexer() lexer.input("def number = -102") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID = INT')
def testScanList(self): lexer = Lexer() lexer.input("def t = [a, b]") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID = [ ID , ID ]')
def testScanTuple(self): lexer = Lexer() lexer.input("def t = (a, b)") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID = ( ID , ID )')
def testScanFunction(self): lexer = Lexer() lexer.input("def f(x) = x") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID ( ID ) = ID')
def testScanString(self): lexer = Lexer() lexer.input("def string = \"hello, world!\"") (tokens, errors) = lexer.allTokens() # assert that no error occured self.assertEqual(errors, []) # assert correct token scanning self.assertEqual(types(tokens), 'DEF ID = STRING')
('\*', 'MULTIPLY'), ('\/', 'DIVIDE'), ('\(', 'LP'), ('\)', 'RP'), ('==', 'EQUAL'), ('=', 'ASSIGN'), ] lx = Lexer(rules, skip_whitespace=True) inp = input('File name: ') source = open(inp) table = SymbolTable() for line in source: lx.input(line) table.pos+=1 try: for tok in lx.tokens(): token = tok.getToken() table.insert(token) except LexerError as err: print('LexerError at position %s' % err.pos) symbol_table = table.getTable() with open('table.pickle', 'wb') as f: pickle.dump(symbol_table, f) lexical_box = table.getLexicalBox() output = open('output.txt','w')
from Lexer import Lexer from Yacc import Parser lexer = Lexer().build() file = open('test3.txt') text_input = file.read() file.close() lexer.input(text_input) # while True: # tok = lexer.token() # if not tok: break # print(tok) parser = Parser() parser.build().parse(text_input, lexer, False)