Beispiel #1
0
def run():
    kb_data = parse_kb_data_file(DATA_FILE)
    kb_tokens = lexer.imp_lex(kb_data)

    kb_parsed = parser.parse_kb(kb_tokens)
    #pprint.pprint(kb_parsed)
    #
    query_instructions = open_code_file(argv[1])
    query_tokens = lexer.imp_lex(query_instructions)
    queries_parsed = parser.parse_queries(query_tokens)
    pprint.pprint(queries_parsed)
    answers = answer_queries(queries_parsed, kb_parsed)
    pprint.pprint(answers)
Beispiel #2
0
def run():
    text = 's := 4 ; t := s - 5'
    tokens = imp_lex(text)
    print 'tokens: ', tokens
    parse_result = imp_parse(tokens)
    print 'result_stmt and pos: ', parse_result
    if not parse_result:
        sys.stderr.write('Parse error !\n')
        sys.exit(1)
    ast = parse_result.value
    env = {}
    ast.eval(env)
    print 'eval result: ', env

    sys.stdout.write('Final variable values: \n')
    for name in env:
        sys.stdout.write("%s: %s\n" % (name, env[name]))
Beispiel #3
0
        # 那么 '; + b' 得到的 value 应该是 (;, b), pos 应该走到第二个 ';' 那里
        # 然后,利用 ^ 对 (;, b) 进行 process_next 调用
        next_parser = self.separator + self.parser ^ process_next  # 先 + 再 ^
        next_result = result

        while next_result:
            next_result = next_parser(tokens, result.pos)
            if next_result:
                result = next_result
        return result


if __name__ == '__main__':
    import lexer
    s = 'a ; b ; c'
    tokens = lexer.imp_lex(s)
    # output: [('a', 'ID'), (';', 'RESERVED'), ('b', 'ID'), (';', 'RESERVED'), ('c', 'ID')]
    print tokens

    parser = Tag('ID')
    result = parser(tokens, 0)
    # output Result(a, 1)
    print result

    separator = Reserved(';', 'RESERVED')

    def post_proc(parsed):
        (left, right) = parsed
        # output: left:  ;
        print 'left: ', left
        # output: right:  b
Beispiel #4
0
        # 那么 '; + b' 得到的 value 应该是 (;, b), pos 应该走到第二个 ';' 那里
        # 然后,利用 ^ 对 (;, b) 进行 process_next 调用
        next_parser = self.separator + self.parser ^ process_next   # 先 + 再 ^
        next_result = result

        while next_result:
            next_result = next_parser(tokens, result.pos)
            if next_result:
                result = next_result
        return result


if __name__ == '__main__':
    import lexer
    s = 'a ; b ; c'
    tokens = lexer.imp_lex(s)
    # output: [('a', 'ID'), (';', 'RESERVED'), ('b', 'ID'), (';', 'RESERVED'), ('c', 'ID')]
    print tokens

    parser = Tag('ID')
    result = parser(tokens, 0)
    # output Result(a, 1)
    print result

    separator = Reserved(';', 'RESERVED')

    def post_proc(parsed):
        (left, right) = parsed
        # output: left:  ;
        print 'left: ', left
        # output: right:  b
Beispiel #5
0
import unittest
import sys
from lexer import Lexer, imp_lex
from parser import parse

if __name__ == '__main__':
    filename = sys.argv[1]
    file = open(filename)
    characters = file.read()
    file.close()
    tokens = imp_lex(characters)
    if tokens == -1:
        print "Encountered lexing error, aborting..."
        sys.exit(1)

    print "Lexer Results:"
    for token in tokens:
        #import pdb;pdb.set_trace()
        print "Symbol: " + token.symbol + " Token: " + str(
            token.tag) + " Row: " + str(token.row) + " Column: " + str(
                token.column)

    print "\n Parser Results:"
    parse(tokens)