示例#1
0
    def testScanVariable(self):
        lexer = Lexer()
        lexer.input("def x = y")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID = ID')
示例#2
0
    def testScanBooleanTrue(self):
        lexer = Lexer()
        lexer.input("def boolean = true")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID = BOOL')
示例#3
0
    def testScanLambda(self):
        lexer = Lexer()
        lexer.input("def f(x) = lambda(y) x")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID ( ID ) = LAMBDA ( ID ) ID')
示例#4
0
    def testScanNegativeInteger(self):
        lexer = Lexer()
        lexer.input("def number = -102")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID = INT')
示例#5
0
    def testScanList(self):
        lexer = Lexer()
        lexer.input("def t = [a, b]")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID = [ ID , ID ]')
示例#6
0
    def testScanTuple(self):
        lexer = Lexer()
        lexer.input("def t = (a, b)")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID = ( ID , ID )')
示例#7
0
    def testScanFunction(self):
        lexer = Lexer()
        lexer.input("def f(x) = x")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID ( ID ) = ID')
示例#8
0
    def testScanString(self):
        lexer = Lexer()
        lexer.input("def string = \"hello, world!\"")
        (tokens, errors) = lexer.allTokens()

        # assert that no error occured
        self.assertEqual(errors, [])

        # assert correct token scanning
        self.assertEqual(types(tokens), 'DEF ID = STRING')
示例#9
0
    ('\*',              'MULTIPLY'),
    ('\/',              'DIVIDE'),
    ('\(',              'LP'),
    ('\)',              'RP'),
    ('==',               'EQUAL'),
    ('=',               'ASSIGN'),
                    
]

lx = Lexer(rules, skip_whitespace=True)
inp = input('File name: ')
source = open(inp)
table = SymbolTable()

for line in source:
    lx.input(line)
    table.pos+=1
    try:
         for tok in lx.tokens():
            token = tok.getToken()
            table.insert(token)

    except LexerError as err:
         print('LexerError at position %s' % err.pos)

symbol_table = table.getTable()
with open('table.pickle', 'wb') as f:
	pickle.dump(symbol_table, f)

lexical_box = table.getLexicalBox()
output = open('output.txt','w')
示例#10
0
from Lexer import Lexer
from Yacc import Parser

lexer = Lexer().build()
file = open('test3.txt')
text_input = file.read()
file.close()
lexer.input(text_input)

# while True:
#     tok = lexer.token()
#     if not tok: break
#     print(tok)

parser = Parser()
parser.build().parse(text_input, lexer, False)