Пример #1
0
    def open_lex_parse_compare(self, file_path):
        with open(file_path, 'rb') as f:
            code = f.read().decode("utf-8")

        tokens = lex(code, search_match, TokenExpressions)
        tokens = list(
            filter(lambda token: token.tokentype_ != TokenTypes.NONE, tokens))

        parsed, leftover_token = parse(code, tokens)
        program = Program(loc_={
            'start': {
                'line': 1,
                'index': 0
            },
            "end": {
                "line": tokens[-1].loc_["start"]["line"],
                "index": tokens[-1].loc_["start"]["index"]
            }
        },
                          range_=[0, len(code)],
                          body_=parsed)

        filename, _ = os.path.splitext(os.path.basename(file_path))
        folder_output = root_dir + "/tests/parser_tests/test_output/" + os.path.basename(
            os.path.dirname(file_path))
        folder_required = root_dir + "/tests/parser_tests/required/" + os.path.basename(
            os.path.dirname(file_path))
        output_file = folder_output + "/" + filename + ".json"
        required_file = folder_required + "/" + filename + ".json"

        if not os.path.exists(folder_output): os.makedirs(folder_output)

        with open(output_file, "w") as f:
            f.write(program.jsonify())
        self.compare_files(required_file, output_file)
Пример #2
0
    def lex_and_compare_required_vs_output(self, file_path,
                                           required_token_order):
        with open(file_path, 'rb') as f:
            code = f.read().decode("utf-8")

        tokens = lex(code, search_match, TokenExpressions)
        tokens = list(
            filter(lambda token: token.tokentype_ != TokenTypes.NONE, tokens))

        for i in range(len(required_token_order)):
            self.assertEqual(tokens[i].tokentype_,
                             required_token_order[i],
                             msg=file_path)
Пример #3
0
    if   head.tokentype_ in (TokenTypes.EOF, *termination_tokens) : return [], tokens
    elif head.tokentype_ in (TokenTypes.NEW_LINE, TokenTypes.TAB) : return parse(characters, tail, termination_tokens)
    elif head.tokentype_ == TokenTypes.VARIABLE_DECLARATION       : node, tokens = parse_var_decl.parse_variable_declaration(characters, tokens)
    elif head.tokentype_ == TokenTypes.FUNCTION_DECLARATION       : node, tokens = parse_func_decl.parse_function_declaration(characters, tokens)
    elif head.tokentype_ == TokenTypes.IF                         : node, tokens = parse_if_stmt.parse_if_statement(characters, tokens)
    elif head.tokentype_ == TokenTypes.RETURN                     : node, tokens = parse_func_decl.parse_return_statement(characters, tokens)
    elif head.tokentype_ == TokenTypes.CALL                       : node, tokens = parse_func_call.parse_function_call(characters, tokens)
    else                                                          : return generate_error_message(head, characters, "Invalid Syntax", True)
    nodes, tokens = parse(characters, tokens, termination_tokens)
    return [node] + nodes, tokens

if __name__ == "__main__":
    if len(sys.argv) < 2:
        print("No source file provided")
        exit()
    # os.chdir('..')
    
    with open(sys.argv[1], 'rb') as f:
        code = f.read().decode("utf-8")
    
    lexed = lex(code, search_match, TokenExpressions)
    tokens = list(filter(lambda token: token.tokentype_ != TokenTypes.NONE, lexed))
    # list(map(print, lexed)) 

    parsed, eof_token = parse(code, tokens)
    program = Program(loc_={'start': {'line': 1, 'index': 0}, "end":{"line":tokens[-1].loc_["start"]["line"], "index":tokens[-1].loc_["start"]["index"]}}, range_=[0, len(code)], body_=parsed)
    
    
    # print(str(program))
    with open("C:\\Users\\Nathan\\Documents\\ATP\\pretty_printed.json", "wb") as f:
        f.write(program.jsonify().encode("utf-8"))