def fetch_token(code):
     tokens = get_token(code)
     for t in tokens:
         if t.type == TokenType.ERRTOKEN:
             raise Exception("Error Token")
         yield t
            left = self.create_expression_node(token.type, left, right, token.type)
            if self.cur == len(self.stream):
                break
            token = self.stream[self.cur]
        # print("exit: expression")

        return left


if __name__ == '__main__':
    '''Unit test of parser, output a syntax tree with hierarchy'''
    # to_parse = "for t from -100 to 100 step 1 draw (t, 0);"
    to_parse = "rot is -16+5^3/cos(T);"

    # to_parse = "origin is (2, 3.5);"
    token_stream = get_token(to_parse)

    parser = Parser(token_stream, None)
    ret = parser.parse()
    # print(ret[0])

    for node in ret:
        outputSyntaxTree(node, 1)
        # print(getExpressionValue(node))
    # print("------------------------------")
    # for token in token_stream:
    #     print(token)
    # print("------------------------------")
    #
    # parser = Parser(token_stream)
    # node = parser.expression()
Beispiel #3
0
def lex():
    string = request.args.get('s')
    return jsonify({'data':get_token(string)})
Beispiel #4
0
        elif action == 'WE':
            # while结束
            with open(self.quaternion_save_file, 'a') as f:
                f.write('we' + ',_,_,_')
                f.write('\n')
        elif action == '+' or action == '-' or action == '*' or action == '/' or action == '<<' or action == '>>' or \
                action == '&' or action == '|' or action == '||' or action == '&&' or action == '!=' or action == '==' \
                or action == '<=' or action == '>=' or action == '<' or action == '>':
            # 目前只考虑+—*/的处理
            self.semantic_sym = 't' + str(self.temporary_num)
            with open(self.quaternion_save_file, 'a') as f:
                f.write(action + ',' + op[0][-1]['value'] + ',' +
                        op[-1][-1]['value'] + ',' + self.semantic_sym)
                f.write('\n')
            self.temporary_num += 1

    def semantic_action_step2(self):
        """
        用于生成语义动作
        :return: None
        """
        if len(self.semantic_sym) > 0:
            self.token[-1][-1]['value'] = self.semantic_sym


if __name__ == '__main__':
    file_name = input('输入源程序文件名,无需txt后缀\n') + '.txt'
    my_token = get_token('{0} {1}', file_name)
    sm = Automaton('0', my_token)
    sm.start()
Beispiel #5
0
        return True


def SEMI():
    global list_line, list_syn, list_value, m
    if list_syn[m] == 'SEMI':
        list.append('SEMI')
        m += 1
        return True


if __name__ == '__main__':
    content = lexer.get_code(content)
    content = lexer.clear_comment(content)
    while syn != "#":
        p, syn, value, line = lexer.get_token(content, p, line)
        if syn == 'error1':
            print('string ' + value + ' Not closed! Error in line ' +
                  str(line))
        elif syn == 'error2':
            print('number ' + value + ' Cannot start with 0! Error in line ' +
                  str(line))
        elif syn == 'error3':
            print('char ' + value + ' Not closed! Error in line ' + str(line))
        elif syn == 'error4':
            print('number ' + value + ' illegal! Error in line ' + str(line))
        elif syn == 'error5':
            print('identifier' + value +
                  ' contain illegal characters!Error in line ' + str(line))
        elif syn == 'error6':
            print('number ' + value + ' Contains letters! Error in line ' +
Beispiel #6
0
def read_test():
    path_name = os.path.normpath(os.path.join(os.path.abspath('test.pl'), ''))
    for x in Reader(get_token(path_name)):
        print(x)