コード例 #1
0
ファイル: GenTokens.py プロジェクト: ltcmelo/uaiso
def write_token_names():

    token_names_file = "Parsing/TokenName.cpp"

    print "Creating %s" % token_names_file

    content = GenLib.cpp_file_header()
    content += (
        "#include \"Parsing/Token.h\"\n"
        "\n"
        "namespace uaiso {\n"
        "\n"
        "std::unordered_map<std::uint16_t, const char*> tokenName {\n"
    )

    # Put tokens and names in a map, following the naming convention from the enumeration file.
    for token in _sorted_tokens:
        if token[0].startswith("BEGIN_") or token[0].startswith("END_"):
            content += "    { %s, %s },\n" % (token[0], token[1][1])
        else:
            content += "    { TK_%s, %s },\n" % (token[0], token[1][1])

    content += (
        "};\n"
        "\n"
        "} // namespace uaiso\n"
    )

    with open(token_names_file, "w") as f:
        f.write(content)
コード例 #2
0
ファイル: GenTokens.py プロジェクト: ltcmelo/uaiso
def write_tokens():

    token_file = "Parsing/Token.h"

    print "Creating %s" % token_file

    content = GenLib.cpp_file_header()
    content += (
        "#ifndef UAISO_TOKEN_H__\n"
        "#define UAISO_TOKEN_H__\n"
        "\n"
        "#include \"Common/Config.h\"\n"
        "#include <cstdint>\n"
        "#include <iostream>\n"
        "#include <unordered_map>\n"
        "\n"
        "namespace uaiso {\n"
        "\n"
        "/* Tokens are unified, it's reponsibility of a lexer to provide only\n"
        "   the relevant tokens for a particular language. */\n"
        "\n"
        "enum Token : std::uint16_t\n"
        "{\n"
    )

    # Declare enum items.
    for token in _sorted_tokens:
        if token[0].startswith("BEGIN_") or token[0].startswith("END_"):
            # It's to indicate a token range, not a token itself.
            enum_item = "    %s = %s,\n" % (token[0], token[1][0])
        else:
            enum_item = "    TK_%s = %s,\n" % (token[0], token[1][0])
        content += enum_item

    content += (
        "};\n"
        "\n"
        "UAISO_API std::ostream& operator<<(std::ostream& os, Token tk);\n"
        "\n"
        "} // namespace uaiso\n"
        "\n"
        "#endif"
    )

    with open(token_file, "w") as f:
        f.write(content)
コード例 #3
0
def write_tokens():

    token_file = "Parsing/Token.h"

    print "Creating %s" % token_file

    content = GenLib.cpp_file_header()
    content += (
        "#ifndef UAISO_TOKEN_H__\n"
        "#define UAISO_TOKEN_H__\n"
        "\n"
        "#include \"Common/Config.h\"\n"
        "#include <cstdint>\n"
        "#include <iostream>\n"
        "#include <unordered_map>\n"
        "\n"
        "namespace uaiso {\n"
        "\n"
        "/* Tokens are unified, it's reponsibility of a lexer to provide only\n"
        "   the relevant tokens for a particular language. */\n"
        "\n"
        "enum Token : std::uint16_t\n"
        "{\n")

    # Declare enum items.
    for token in _sorted_tokens:
        if token[0].startswith("BEGIN_") or token[0].startswith("END_"):
            # It's to indicate a token range, not a token itself.
            enum_item = "    %s = %s,\n" % (token[0], token[1][0])
        else:
            enum_item = "    TK_%s = %s,\n" % (token[0], token[1][0])
        content += enum_item

    content += (
        "};\n"
        "\n"
        "UAISO_API std::ostream& operator<<(std::ostream& os, Token tk);\n"
        "\n"
        "} // namespace uaiso\n"
        "\n"
        "#endif")

    with open(token_file, "w") as f:
        f.write(content)
コード例 #4
0
def write_token_names():

    token_names_file = "Parsing/TokenName.cpp"

    print "Creating %s" % token_names_file

    content = GenLib.cpp_file_header()
    content += ("#include \"Parsing/Token.h\"\n"
                "\n"
                "namespace uaiso {\n"
                "\n"
                "std::unordered_map<std::uint16_t, const char*> tokenName {\n")

    # Put tokens and names in a map, following the naming convention from the enumeration file.
    for token in _sorted_tokens:
        if token[0].startswith("BEGIN_") or token[0].startswith("END_"):
            content += "    { %s, %s },\n" % (token[0], token[1][1])
        else:
            content += "    { TK_%s, %s },\n" % (token[0], token[1][1])

    content += ("};\n" "\n" "} // namespace uaiso\n")

    with open(token_names_file, "w") as f:
        f.write(content)