Esempio n. 1
0
def calculate_file(filename):
    lines = []
    with open(filename, 'r') as f:
        for line in f:
            lines.append(parser.tokenizer(line))

    return evaluator.Evaluator(lines).run()
Esempio n. 2
0
def interpret_text(text: str):
    lexer = tokenizer(text)
    parser = Parser(lexer)

    assign_list = parser.assign_list()
    interpreter = Interpreter()

    interpreter.interp(assign_list)
    return interpreter.variable_list
Esempio n. 3
0
 def test_parse_test2(self):
     text = """
     c=2*5*4
     a=(c*7)+7
     """
     lexer = tokenizer(text)
     parser = Parser(lexer)
     assign_res = parser.assign_list()
     self.assertEqual(len(assign_res), 2)
Esempio n. 4
0
 def parse(self):
     # print(self.text_box.get())
     scanner(self.text_box.get())
     lst_tokens = tokenizer(file='output.txt')
     init_node = Node()
     parser = Parser(lst_tokens)
     parser.parents.append(init_node.index)
     parser.program()
     parser.draw_tree()
Esempio n. 5
0
    def test_invalid_syntax2(self):
        text = """
               a=(2*7+7
               """
        lexer = tokenizer(text)
        parser = Parser(lexer)

        exception = None
        try:
            assign_res = parser.assign_list()
        except Exception as e:
            exception = e

        self.assertEqual(exception.args[0], "Invalid syntax")
Esempio n. 6
0
    def test_invalid_syntax1(self):
        text = """
               1c=2*5*4
               a=c*7+7
               """
        lexer = tokenizer(text)
        parser = Parser(lexer)

        exception_t = None
        try:
            assign_res = parser.assign_list()
        except Exception as e:
            exception_t = e

        self.assertEqual(exception_t.args[0], "Invalid syntax")
Esempio n. 7
0
def execute(filename):
    try:
        f = open(filename)
    except:
        print_msg("open file error\n", abort=True)
    text = f.read()
    f.close()
    token = tokenizer(text)
    parser = Parser(token)
    instruction = parser.parse()
    #print repr(text)
    #print instruction

    vm = VM(instruction)
    vm.run()

    return text
Esempio n. 8
0
 def set_tokenizer(self, presubvector, postsubvector, funcrepolist):
     '''
     This initiates a set of substitution vectors for the corpus to be
     digested.
     '''
     tokobj = tokenizer()
     for funcrepo in funcrepolist:
         tokobj.add_function_repo(funcrepo)
     for vector in (presubvector, postsubvector):
         if vector == presubvector:
             loc = 'pre'
         else:
             loc = 'post'
         for vecttype, vectfile in vector:
             if vecttype.lower() == 'r':
                 tokobj.add_regex(vectfile, op_seq=loc)
             elif vecttype.lower() == 'f':
                 tokobj.add_function(vectfile, op_seq=loc)
             else:
                 tokobj.add_abbrev(vectfile, op_seq=loc)
     self.tokenizer = tokobj
     return
Esempio n. 9
0
 def set_tokenizer(self, presubvector, postsubvector, funcrepolist):
     '''
     This initiates a set of substitution vectors for the corpus to be
     digested.
     '''
     tokobj = tokenizer()
     for funcrepo in funcrepolist:
         tokobj.add_function_repo(funcrepo)
     for vector in (presubvector, postsubvector):
         if vector == presubvector:
             loc = 'pre'
         else:
             loc = 'post'
         for vecttype, vectfile in vector:
             if vecttype.lower() == 'r':
                 tokobj.add_regex(vectfile, op_seq=loc)
             elif vecttype.lower() == 'f':
                 tokobj.add_function(vectfile, op_seq=loc)
             else:
                 tokobj.add_abbrev(vectfile, op_seq=loc)
     self.tokenizer = tokobj
     return
Esempio n. 10
0
def calculate(expression):
    return evaluator.Evaluator([parser.tokenizer(expression)]).run()
Esempio n. 11
0
from parser import tokenizer
from pprint import pprint as pp

x = tokenizer()
x.add_regex('example/pre_tokenizer.regexp')
#x.add_abbrev('example/abbreviation.list')
x.add_function_repo('example/fnscanner.py', test=True)
x.add_function('example/pre_functions.list', op_seq='pre')
x.add_function('example/post_functions.list', op_seq='post')
pp(x.tokenize('I don\'t know.'))
 def parse(self, input, debug=False):
     self.is_debug = debug
     self.input = input
     self.p = parser.tokenizer(input)
     self.nextToken = None
     return self.parseProgram()
Esempio n. 13
0
 def test_parse_test1(self):
     text = "c = 2 * 5"
     lexer = tokenizer(text)
     parser = Parser(lexer)
     assign_res = parser.assign_list()
     self.assertEqual(len(assign_res), 1)