Пример #1
0
def write_engine_header(Modes, Setup):

    QuexClassHeaderFileTemplate = (Setup.QUEX_TEMPLATE_DB_DIR 
                                   + "/template/lexical_analyzer_class").replace("//","/")
    CoreEngineDefinitionsHeader = (Setup.QUEX_TEMPLATE_DB_DIR + "/core_engine/").replace("//","/")
    QuexClassHeaderFileOutput   = Setup.output_file_stem
    LexerClassName              = Setup.output_engine_name
    VersionID                   = Setup.input_application_version_id
    QuexVersionID               = Setup.QUEX_VERSION

    # -- determine character type according to number of bytes per ucs character code point
    #    for the internal engine.
    quex_character_type_str = { 1: "uint8_t ", 2: "uint16_t", 4: "uint32_t", 
                                   "wchar_t": "wchar_t" }[Setup.bytes_per_ucs_code_point]
    quex_lexeme_type_str    = { 1: "char    ", 2: "int16_t",  4: "int32_t",  
                                   "wchar_t": "wchar_t" }[Setup.bytes_per_ucs_code_point]

    #    are bytes of integers Setup 'little endian' or 'big endian' ?
    if Setup.byte_order == "little":
        quex_coding_name_str = { 1: "ASCII", 2: "UCS-2LE", 4: "UCS-4LE", 
                                    "wchar_t": "WCHAR_T" }[Setup.bytes_per_ucs_code_point]
    else:
        quex_coding_name_str = { 1: "ASCII", 2: "UCS-2BE", 4: "UCS-4BE", 
                                    "wchar_t": "WCHAR_T" }[Setup.bytes_per_ucs_code_point]


    # -- determine whether the lexical analyser needs indentation counting
    #    support. if one mode has an indentation handler, than indentation
    #    support must be provided.
    indentation_support_f = False
    for mode in Modes.values():
        if mode.on_indentation.get_code() != "":
            indentation_support_f = True
            break

    lex_id_definitions_str = "" 
    # NOTE: First mode-id needs to be '1' for compatibility with flex generated engines
    i = 0
    for name in Modes.keys():
        i += 1
        lex_id_definitions_str += "const int LEX_ID_%s = %i;\n" % (name, i)

    include_guard_extension = get_include_guard_extension(Setup.output_file_stem)

    # -- instances of mode classes as members of the lexer
    mode_object_members_txt,     \
    constructor_txt,             \
    mode_specific_functions_txt, \
    friend_txt =                 \
         get_mode_class_related_code_fragments(Modes.values(), LexerClassName)

    # -- define a pointer that directly has the type of the derived class
    if Setup.input_derived_class_name == "":
        Setup.input_derived_class_name = LexerClassName
        derived_class_type_declaration = ""
    else:
        derived_class_type_declaration = "class %s;" % Setup.input_derived_class_name

    # -- the friends of the class
    friends_str = ""
    for friend in Setup.input_lexer_class_friends:
        friends_str += "    friend class %s;\n" % friend

    # -- the class body extension
    class_body_extension_str = lexer_mode.class_body.get_code()

    # -- the class constructor extension
    class_constructor_extension_str = lexer_mode.class_init.get_code()

    fh = open_file_or_die(QuexClassHeaderFileTemplate)
    template_code_txt = fh.read()
    fh.close()

    # -- check if exit/entry handlers have to be active
    entry_handler_active_f = False
    exit_handler_active_f = False
    for mode in Modes.values():
        if mode.on_entry_code_fragments() != []: entry_handler_active_f = True
        if mode.on_exit_code_fragments() != []:  exit_handler_active_f = True

    txt = template_code_txt
    def set_switch(txt, SwitchF, Name):
        if SwitchF: txt = txt.replace("$$SWITCH$$ %s" % Name, "#define    %s" % Name)
        else:       txt = txt.replace("$$SWITCH$$ %s" % Name, "// #define %s" % Name)
        return txt
    
    txt = set_switch(txt, entry_handler_active_f,  "__QUEX_OPTION_ON_ENTRY_HANDLER_PRESENT")
    txt = set_switch(txt, exit_handler_active_f,   "__QUEX_OPTION_ON_EXIT_HANDLER_PRESENT")
    txt = set_switch(txt, indentation_support_f,   "__QUEX_OPTION_INDENTATION_TRIGGER_SUPPORT")     
    txt = set_switch(txt, True,                    "__QUEX_OPTION_SUPPORT_BEGIN_OF_LINE_PRE_CONDITION")
    txt = set_switch(txt, Setup.enable_iconv_f,    "QUEX_OPTION_ENABLE_ICONV")
    txt = set_switch(txt, not Setup.disable_token_queue_f,        "QUEX_OPTION_TOKEN_SENDING_VIA_QUEUE")
    txt = set_switch(txt, not Setup.disable_string_accumulator_f, "QUEX_OPTION_STRING_ACCUMULATOR")
    txt = set_switch(txt, Setup.post_categorizer_f,               "QUEX_OPTION_POST_CATEGORIZER")
    txt = set_switch(txt, True,                    "QUEX_OPTION_VIRTUAL_FUNCTION_ON_ACTION_ENTRY")      
    txt = set_switch(txt, True,                    "QUEX_OPTION_LINE_NUMBER_COUNTING")      
    txt = set_switch(txt, True,                    "QUEX_OPTION_COLUMN_NUMBER_COUNTING")        
    txt = set_switch(txt, Setup.output_debug_f,    "QUEX_OPTION_DEBUG_TOKEN_SENDING")
    txt = set_switch(txt, Setup.output_debug_f,    "QUEX_OPTION_DEBUG_MODE_TRANSITIONS")
    txt = set_switch(txt, Setup.output_debug_f,    "QUEX_OPTION_DEBUG_QUEX_PATTERN_MATCHES")
    txt = set_switch(txt, True,                    "QUEX_OPTION_INCLUDE_STACK_SUPPORT")
    txt = set_switch(txt, not Setup.no_mode_transition_check_f,           
                               "QUEX_OPTION_RUNTIME_MODE_TRANSITION_CHECK")

    txt = blue_print(txt,
            [
                ["$$BUFFER_LIMIT_CODE$$",            "0x%X" % Setup.buffer_limit_code],
                ["$$CONSTRUCTOR_EXTENSTION$$",                  class_constructor_extension_str],
                ["$$CONSTRUCTOR_MODE_DB_INITIALIZATION_CODE$$", constructor_txt],
                ["$$CORE_ENGINE_DEFINITIONS_HEADER$$",          CoreEngineDefinitionsHeader],
                ["$$CLASS_BODY_EXTENSION$$",         class_body_extension_str],
                ["$$INCLUDE_GUARD_EXTENSION$$",      include_guard_extension],
                ["$$INITIAL_LEXER_MODE_ID$$",        "LEX_ID_" + lexer_mode.initial_mode.get_code()],
                ["$$LEXER_BUILD_DATE$$",             time.asctime()],
                ["$$LEXER_BUILD_VERSION$$",          VersionID],
                ["$$LEXER_CLASS_FRIENDS$$",          friends_str],
                ["$$LEXER_CLASS_NAME$$",             LexerClassName],
                ["$$LEXER_DERIVED_CLASS_DECL$$",     derived_class_type_declaration],
                ["$$LEXER_DERIVED_CLASS_NAME$$",     Setup.input_derived_class_name],
                ["$$LEX_ID_DEFINITIONS$$",           lex_id_definitions_str],
                ["$$MAX_MODE_CLASS_N$$",             repr(len(Modes))],
                ["$$MODE_CLASS_FRIENDS$$",           friend_txt],
                ["$$MODE_OBJECT_MEMBERS$$",              mode_object_members_txt],
                ["$$MODE_SPECIFIC_ANALYSER_FUNCTIONS$$", mode_specific_functions_txt],
                ["$$PRETTY_INDENTATION$$",               "     " + " " * (len(LexerClassName)*2 + 2)],
                ["$$QUEX_TEMPLATE_DIR$$",                Setup.QUEX_TEMPLATE_DB_DIR],
                ["$$QUEX_VERSION$$",                     QuexVersionID],
                ["$$TOKEN_CLASS$$",                      Setup.input_token_class_name],
                ["$$TOKEN_CLASS_DEFINITION_FILE$$",      Setup.input_token_class_file.replace("//","/")],
                ["$$TOKEN_ID_DEFINITION_FILE$$",         Setup.output_token_id_file.replace("//","/")],
                ["$$QUEX_CHARACTER_TYPE$$",              quex_character_type_str],
                ["$$QUEX_LEXEME_TYPE$$",                 quex_lexeme_type_str],
                ["$$CORE_ENGINE_CHARACTER_CODING$$",     quex_coding_name_str],
                ["$$USER_DEFINED_HEADER$$",              lexer_mode.header.get_code() + "\n"],
             ])

    fh_out = open(QuexClassHeaderFileOutput, "wb")
    if os.linesep != "\n": txt = txt.replace("\n", os.linesep)
    fh_out.write(txt)
    fh_out.close()
Пример #2
0
def _do(Descr):
    # The following things must be ensured before the function is called
    assert Descr != None
    assert Descr.__class__.__name__ == "TokenTypeDescriptor"
    ## ALLOW: Descr.get_member_db().keys() == []

    TemplateFile = QUEX_PATH \
                   + Setup.language_db["$code_base"] \
                   + Setup.language_db["$token_template_file"]

    TemplateIFile = QUEX_PATH \
                   + Setup.language_db["$code_base"] \
                   + Setup.language_db["$token_template_i_file"]

    template_str   = open_file_or_die(TemplateFile, Mode="rb").read()
    template_i_str = open_file_or_die(TemplateIFile, Mode="rb").read()
    
    virtual_destructor_str = ""
    if Descr.open_for_derivation_f: virtual_destructor_str = "virtual "

    if Descr.copy.get_pure_code() == "":
        # Default copy operation: Plain Copy of token memory
        copy_str = "__QUEX_STD_memcpy((void*)__this, (void*)__That, sizeof(QUEX_TYPE_TOKEN));\n"
    else:
        copy_str = Descr.copy.get_code()

    take_text_str = Descr.take_text.get_code()
    if take_text_str == "": take_text_str = "return true;\n" 

    include_guard_extension_str = get_include_guard_extension(
                                        Setup.language_db["$namespace-ref"](Descr.name_space) 
                                        + "__" + Descr.class_name)

    # In case of plain 'C' the class name must incorporate the namespace (list)
    token_class_name = Descr.class_name
    if Setup.language == "C":
        token_class_name = Setup.token_class_name_safe

    txt = blue_print(template_str,
             [
              ["$$BODY$$",                    Descr.body.get_code()],
              ["$$CONSTRUCTOR$$",             Descr.constructor.get_code()],
              ["$$COPY$$",                    copy_str],
              ["$$DESTRUCTOR$$",              Descr.destructor.get_code()],
              ["$$DISTINCT_MEMBERS$$",        get_distinct_members(Descr)],
              ["$$FOOTER$$",                  Descr.footer.get_code()],
              ["$$FUNC_TAKE_TEXT$$",          take_text_str],
              ["$$HEADER$$",                  Descr.header.get_code()],
              ["$$INCLUDE_GUARD_EXTENSION$$", include_guard_extension_str],
              ["$$NAMESPACE_CLOSE$$",         Setup.language_db["$namespace-close"](Descr.name_space)],
              ["$$NAMESPACE_OPEN$$",          Setup.language_db["$namespace-open"](Descr.name_space)],
              ["$$QUICK_SETTERS$$",           get_quick_setters(Descr)],
              ["$$SETTERS_GETTERS$$",         get_setter_getter(Descr)],
              ["$$TOKEN_CLASS$$",             token_class_name],
              ["$$TOKEN_REPETITION_N_GET$$",  Descr.repetition_get.get_code()],
              ["$$TOKEN_REPETITION_N_SET$$",  Descr.repetition_set.get_code()],
              ["$$UNION_MEMBERS$$",           get_union_members(Descr)],
              ["$$VIRTUAL_DESTRUCTOR$$",      virtual_destructor_str],
             ])

    txt_i = blue_print(template_i_str, 
                       [
                        ["$$CONSTRUCTOR$$",             Descr.constructor.get_code()],
                        ["$$COPY$$",                    copy_str],
                        ["$$DESTRUCTOR$$",              Descr.destructor.get_code()],
                        ["$$FOOTER$$",                  Descr.footer.get_code()],
                        ["$$FUNC_TAKE_TEXT$$",          take_text_str],
                        ["$$INCLUDE_GUARD_EXTENSION$$", include_guard_extension_str],
                        ["$$NAMESPACE_CLOSE$$",         Setup.language_db["$namespace-close"](Descr.name_space)],
                        ["$$NAMESPACE_OPEN$$",          Setup.language_db["$namespace-open"](Descr.name_space)],
                        ["$$TOKEN_CLASS$$",             token_class_name],
                        ["$$TOKEN_REPETITION_N_GET$$",  Descr.repetition_get.get_code()],
                        ["$$TOKEN_REPETITION_N_SET$$",  Descr.repetition_set.get_code()],
                       ])

    # Return declaration and implementation as two strings
    return txt, txt_i
Пример #3
0
def write_engine_header(Modes, Setup):

    QuexClassHeaderFileTemplate = (Setup.QUEX_TEMPLATE_DB_DIR +
                                   "/template/lexical_analyzer_class").replace(
                                       "//", "/")
    CoreEngineDefinitionsHeader = (Setup.QUEX_TEMPLATE_DB_DIR +
                                   "/core_engine/").replace("//", "/")
    QuexClassHeaderFileOutput = Setup.output_file_stem
    LexerClassName = Setup.output_engine_name
    VersionID = Setup.input_application_version_id
    QuexVersionID = Setup.QUEX_VERSION

    # -- determine character type according to number of bytes per ucs character code point
    #    for the internal engine.
    quex_character_type_str = {
        1: "uint8_t ",
        2: "uint16_t",
        4: "uint32_t",
        "wchar_t": "wchar_t"
    }[Setup.bytes_per_ucs_code_point]
    quex_lexeme_type_str = {
        1: "char    ",
        2: "int16_t",
        4: "int32_t",
        "wchar_t": "wchar_t"
    }[Setup.bytes_per_ucs_code_point]

    #    are bytes of integers Setup 'little endian' or 'big endian' ?
    if Setup.byte_order == "little":
        quex_coding_name_str = {
            1: "ASCII",
            2: "UCS-2LE",
            4: "UCS-4LE",
            "wchar_t": "WCHAR_T"
        }[Setup.bytes_per_ucs_code_point]
    else:
        quex_coding_name_str = {
            1: "ASCII",
            2: "UCS-2BE",
            4: "UCS-4BE",
            "wchar_t": "WCHAR_T"
        }[Setup.bytes_per_ucs_code_point]

    # -- determine whether the lexical analyser needs indentation counting
    #    support. if one mode has an indentation handler, than indentation
    #    support must be provided.
    indentation_support_f = False
    for mode in Modes.values():
        if mode.on_indentation.get_code() != "":
            indentation_support_f = True
            break

    lex_id_definitions_str = ""
    # NOTE: First mode-id needs to be '1' for compatibility with flex generated engines
    i = 0
    for name in Modes.keys():
        i += 1
        lex_id_definitions_str += "const int LEX_ID_%s = %i;\n" % (name, i)

    include_guard_extension = get_include_guard_extension(
        Setup.output_file_stem)

    # -- instances of mode classes as members of the lexer
    mode_object_members_txt,     \
    constructor_txt,             \
    mode_specific_functions_txt, \
    friend_txt =                 \
         get_mode_class_related_code_fragments(Modes.values(), LexerClassName)

    # -- define a pointer that directly has the type of the derived class
    if Setup.input_derived_class_name == "":
        Setup.input_derived_class_name = LexerClassName
        derived_class_type_declaration = ""
    else:
        derived_class_type_declaration = "class %s;" % Setup.input_derived_class_name

    # -- the friends of the class
    friends_str = ""
    for friend in Setup.input_lexer_class_friends:
        friends_str += "    friend class %s;\n" % friend

    # -- the class body extension
    class_body_extension_str = lexer_mode.class_body.get_code()

    # -- the class constructor extension
    class_constructor_extension_str = lexer_mode.class_init.get_code()

    fh = open_file_or_die(QuexClassHeaderFileTemplate)
    template_code_txt = fh.read()
    fh.close()

    # -- check if exit/entry handlers have to be active
    entry_handler_active_f = False
    exit_handler_active_f = False
    for mode in Modes.values():
        if mode.on_entry_code_fragments() != []: entry_handler_active_f = True
        if mode.on_exit_code_fragments() != []: exit_handler_active_f = True

    txt = template_code_txt

    def set_switch(txt, SwitchF, Name):
        if SwitchF:
            txt = txt.replace("$$SWITCH$$ %s" % Name, "#define    %s" % Name)
        else:
            txt = txt.replace("$$SWITCH$$ %s" % Name, "// #define %s" % Name)
        return txt

    txt = set_switch(txt, entry_handler_active_f,
                     "__QUEX_OPTION_ON_ENTRY_HANDLER_PRESENT")
    txt = set_switch(txt, exit_handler_active_f,
                     "__QUEX_OPTION_ON_EXIT_HANDLER_PRESENT")
    txt = set_switch(txt, indentation_support_f,
                     "__QUEX_OPTION_INDENTATION_TRIGGER_SUPPORT")
    txt = set_switch(txt, True,
                     "__QUEX_OPTION_SUPPORT_BEGIN_OF_LINE_PRE_CONDITION")
    txt = set_switch(txt, Setup.enable_iconv_f, "QUEX_OPTION_ENABLE_ICONV")
    txt = set_switch(txt, not Setup.disable_token_queue_f,
                     "QUEX_OPTION_TOKEN_SENDING_VIA_QUEUE")
    txt = set_switch(txt, not Setup.disable_string_accumulator_f,
                     "QUEX_OPTION_STRING_ACCUMULATOR")
    txt = set_switch(txt, Setup.post_categorizer_f,
                     "QUEX_OPTION_POST_CATEGORIZER")
    txt = set_switch(txt, True, "QUEX_OPTION_VIRTUAL_FUNCTION_ON_ACTION_ENTRY")
    txt = set_switch(txt, True, "QUEX_OPTION_LINE_NUMBER_COUNTING")
    txt = set_switch(txt, True, "QUEX_OPTION_COLUMN_NUMBER_COUNTING")
    txt = set_switch(txt, Setup.output_debug_f,
                     "QUEX_OPTION_DEBUG_TOKEN_SENDING")
    txt = set_switch(txt, Setup.output_debug_f,
                     "QUEX_OPTION_DEBUG_MODE_TRANSITIONS")
    txt = set_switch(txt, Setup.output_debug_f,
                     "QUEX_OPTION_DEBUG_QUEX_PATTERN_MATCHES")
    txt = set_switch(txt, True, "QUEX_OPTION_INCLUDE_STACK_SUPPORT")
    txt = set_switch(txt, not Setup.no_mode_transition_check_f,
                     "QUEX_OPTION_RUNTIME_MODE_TRANSITION_CHECK")

    txt = blue_print(txt, [
        ["$$BUFFER_LIMIT_CODE$$",
         "0x%X" % Setup.buffer_limit_code],
        ["$$CONSTRUCTOR_EXTENSTION$$", class_constructor_extension_str],
        ["$$CONSTRUCTOR_MODE_DB_INITIALIZATION_CODE$$", constructor_txt],
        ["$$CORE_ENGINE_DEFINITIONS_HEADER$$", CoreEngineDefinitionsHeader],
        ["$$CLASS_BODY_EXTENSION$$", class_body_extension_str],
        ["$$INCLUDE_GUARD_EXTENSION$$", include_guard_extension],
        [
            "$$INITIAL_LEXER_MODE_ID$$",
            "LEX_ID_" + lexer_mode.initial_mode.get_code()
        ],
        ["$$LEXER_BUILD_DATE$$", time.asctime()],
        ["$$LEXER_BUILD_VERSION$$", VersionID],
        ["$$LEXER_CLASS_FRIENDS$$", friends_str],
        ["$$LEXER_CLASS_NAME$$", LexerClassName],
        ["$$LEXER_DERIVED_CLASS_DECL$$", derived_class_type_declaration],
        ["$$LEXER_DERIVED_CLASS_NAME$$", Setup.input_derived_class_name],
        ["$$LEX_ID_DEFINITIONS$$", lex_id_definitions_str],
        ["$$MAX_MODE_CLASS_N$$", repr(len(Modes))],
        ["$$MODE_CLASS_FRIENDS$$", friend_txt],
        ["$$MODE_OBJECT_MEMBERS$$", mode_object_members_txt],
        ["$$MODE_SPECIFIC_ANALYSER_FUNCTIONS$$", mode_specific_functions_txt],
        [
            "$$PRETTY_INDENTATION$$", "     " + " " *
            (len(LexerClassName) * 2 + 2)
        ],
        ["$$QUEX_TEMPLATE_DIR$$", Setup.QUEX_TEMPLATE_DB_DIR],
        ["$$QUEX_VERSION$$", QuexVersionID],
        ["$$TOKEN_CLASS$$", Setup.input_token_class_name],
        [
            "$$TOKEN_CLASS_DEFINITION_FILE$$",
            Setup.input_token_class_file.replace("//", "/")
        ],
        [
            "$$TOKEN_ID_DEFINITION_FILE$$",
            Setup.output_token_id_file.replace("//", "/")
        ],
        ["$$QUEX_CHARACTER_TYPE$$", quex_character_type_str],
        ["$$QUEX_LEXEME_TYPE$$", quex_lexeme_type_str],
        ["$$CORE_ENGINE_CHARACTER_CODING$$", quex_coding_name_str],
        ["$$USER_DEFINED_HEADER$$",
         lexer_mode.header.get_code() + "\n"],
    ])

    fh_out = open(QuexClassHeaderFileOutput, "wb")
    if os.linesep != "\n": txt = txt.replace("\n", os.linesep)
    fh_out.write(txt)
    fh_out.close()
Пример #4
0
def _do(Descr):
    # The following things must be ensured before the function is called
    assert Descr != None
    assert Descr.__class__.__name__ == "TokenTypeDescriptor"
    ## ALLOW: Descr.get_member_db().keys() == []

    TemplateFile = QUEX_PATH \
                   + Setup.language_db["$code_base"] \
                   + Setup.language_db["$token_template_file"]

    TemplateIFile = QUEX_PATH \
                   + Setup.language_db["$code_base"] \
                   + Setup.language_db["$token_template_i_file"]

    template_str = open_file_or_die(TemplateFile, Mode="rb").read()
    template_i_str = open_file_or_die(TemplateIFile, Mode="rb").read()

    virtual_destructor_str = ""
    if Descr.open_for_derivation_f: virtual_destructor_str = "virtual "

    if Descr.copy.get_pure_code() == "":
        # Default copy operation: Plain Copy of token memory
        copy_str = "__QUEX_STD_memcpy((void*)__this, (void*)__That, sizeof(QUEX_TYPE_TOKEN));\n"
    else:
        copy_str = Descr.copy.get_code()

    take_text_str = Descr.take_text.get_code()
    if take_text_str == "": take_text_str = "return true;\n"

    include_guard_extension_str = get_include_guard_extension(
        Setup.language_db["$namespace-ref"](Descr.name_space) + "__" +
        Descr.class_name)

    # In case of plain 'C' the class name must incorporate the namespace (list)
    token_class_name = Descr.class_name
    if Setup.language == "C":
        token_class_name = Setup.token_class_name_safe

    txt = blue_print(template_str, [
        ["$$BODY$$", Descr.body.get_code()],
        ["$$CONSTRUCTOR$$", Descr.constructor.get_code()],
        ["$$COPY$$", copy_str],
        ["$$DESTRUCTOR$$", Descr.destructor.get_code()],
        ["$$DISTINCT_MEMBERS$$",
         get_distinct_members(Descr)],
        ["$$FOOTER$$", Descr.footer.get_code()],
        ["$$FUNC_TAKE_TEXT$$", take_text_str],
        ["$$HEADER$$", Descr.header.get_code()],
        ["$$INCLUDE_GUARD_EXTENSION$$", include_guard_extension_str],
        [
            "$$NAMESPACE_CLOSE$$", Setup.language_db["$namespace-close"](
                Descr.name_space)
        ],
        [
            "$$NAMESPACE_OPEN$$", Setup.language_db["$namespace-open"](
                Descr.name_space)
        ],
        ["$$QUICK_SETTERS$$", get_quick_setters(Descr)],
        ["$$SETTERS_GETTERS$$",
         get_setter_getter(Descr)],
        ["$$TOKEN_CLASS$$", token_class_name],
        ["$$TOKEN_REPETITION_N_GET$$",
         Descr.repetition_get.get_code()],
        ["$$TOKEN_REPETITION_N_SET$$",
         Descr.repetition_set.get_code()],
        ["$$UNION_MEMBERS$$", get_union_members(Descr)],
        ["$$VIRTUAL_DESTRUCTOR$$", virtual_destructor_str],
    ])

    txt_i = blue_print(template_i_str, [
        ["$$CONSTRUCTOR$$", Descr.constructor.get_code()],
        ["$$COPY$$", copy_str],
        ["$$DESTRUCTOR$$", Descr.destructor.get_code()],
        ["$$FOOTER$$", Descr.footer.get_code()],
        ["$$FUNC_TAKE_TEXT$$", take_text_str],
        ["$$INCLUDE_GUARD_EXTENSION$$", include_guard_extension_str],
        [
            "$$NAMESPACE_CLOSE$$", Setup.language_db["$namespace-close"](
                Descr.name_space)
        ],
        [
            "$$NAMESPACE_OPEN$$", Setup.language_db["$namespace-open"](
                Descr.name_space)
        ],
        ["$$TOKEN_CLASS$$", token_class_name],
        ["$$TOKEN_REPETITION_N_GET$$",
         Descr.repetition_get.get_code()],
        ["$$TOKEN_REPETITION_N_SET$$",
         Descr.repetition_set.get_code()],
    ])

    # Return declaration and implementation as two strings
    return txt, txt_i
Пример #5
0
def do(setup, IndentationSupportF):
    """Creates a file of token-ids from a given set of names.
       Creates also a function:

       const string& $$token$$::map_id_to_name().
    """
    global file_str
    LanguageDB = Setup.language_db

    __propose_implicit_token_definitions()

    for standard_token_id in standard_token_id_list:
        assert token_id_db.has_key(standard_token_id)

    assert lexer_mode.token_type_definition != None, \
           "Token type has not been defined yet, see $QUEX_PATH/quex/core.py how to\n" + \
           "handle this."

    # (*) Token ID File ________________________________________________________________
    #
    #     The token id file can either be specified as database of
    #     token-id names, or as a file that directly assigns the token-ids
    #     to variables. If the flag '--user-token-id-file' is defined, then
    #     then the token-id file is provided by the user. Otherwise, the
    #     token id file is created by the token-id maker.
    #
    #     The token id maker considers the file passed by the option '-t'
    #     as the database file and creates a C++ file with the output filestem
    #     plus the suffix "--token-ids". Note, that the token id file is a
    #     header file.
    #
    if len(token_id_db.keys()) == len(standard_token_id_list):
        token_id_str = "%sTERMINATION and %sUNINITIALIZED" % \
                       (setup.token_id_prefix_plain, setup.token_id_prefix_plain) 
        # TERMINATION + UNINITIALIZED = 2 token ids. If they are the only ones nothing can be done.
        error_msg("Only token ids %s are defined.\n" % token_id_str + \
                  "Quex refuses to proceed. Please, use the 'token { ... }' section to\n" + \
                  "specify at least one other token id.")

    #______________________________________________________________________________________
    L = max(map(lambda name: len(name), token_id_db.keys()))
    def space(Name):
        return " " * (L - len(Name))

    # -- define values for the token ids
    def define_this(txt, token):
        if setup.language == "C":
            txt.append("#define %s%s %s((QUEX_TYPE_TOKEN_ID)%i)\n" \
                       % (setup.token_id_prefix_plain, token.name, space(token.name), token.number))
        else:
            txt.append("const QUEX_TYPE_TOKEN_ID %s%s%s = ((QUEX_TYPE_TOKEN_ID)%i);\n" \
                       % (setup.token_id_prefix_plain, token.name, space(token.name), token.number))

    if setup.token_id_foreign_definition_file != "":
        token_id_txt = ["#include \"%s\"\n" % get_file_reference(setup.token_id_foreign_definition_file)]

    else:
        if setup.language == "C": 
            prolog = ""
            epilog = ""
        else:
            prolog = LanguageDB["$namespace-open"](setup.token_id_prefix_name_space)
            epilog = LanguageDB["$namespace-close"](setup.token_id_prefix_name_space)

        token_id_txt = [prolog]

        # Assign values to tokens with no numeric identifier
        # NOTE: This has not to happen if token's are defined by the user's provided file.
        i = setup.token_id_counter_offset
        # Take the 'dummy_name' only to have the list sorted by name. The key 'dummy_name' 
        # may contain '--' to indicate a unicode value, so do not use it as name.
        for dummy_name, token in sorted(token_id_db.items()):
            if token.number == None: 
                while __is_token_id_occupied(i):
                    i += 1
                token.number = i; 

            define_this(token_id_txt, token)

        # Double check that no token id appears twice
        # Again, this can only happen, if quex itself produced the numeric values for the token
        token_list = token_id_db.values()
        for i, x in enumerate(token_list):
            for y in token_list[i+1:]:
                if x.number != y.number: continue
                error_msg("Token id '%s'" % x.name, x.file_name, x.line_n, DontExitF=True)
                error_msg("and token id '%s' have same numeric value '%s'." \
                          % (y.name, x.number), y.file_name, y.line_n, DontExitF=True)
                          
        token_id_txt.append(epilog)

    tc_descr   = lexer_mode.token_type_definition

    content = blue_print(file_str,
                         [["$$TOKEN_ID_DEFINITIONS$$",        "".join(token_id_txt)],
                          ["$$DATE$$",                        time.asctime()],
                          ["$$TOKEN_CLASS_DEFINITION_FILE$$", get_file_reference(lexer_mode.token_type_definition.get_file_name())],
                          ["$$INCLUDE_GUARD_EXT$$",           get_include_guard_extension(
                                                                  LanguageDB["$namespace-ref"](tc_descr.name_space) 
                                                                  + "__" + tc_descr.class_name)],
                          ["$$TOKEN_PREFIX$$",                setup.token_id_prefix]])

    write_safely_and_close(setup.output_token_id_file, content)
Пример #6
0
def do(Modes, setup):

    QuexClassHeaderFileTemplate = (setup.QUEX_TEMPLATE_DB_DIR 
                               + "/template/lexical_analyzer_class-C").replace("//","/")
    CoreEngineDefinitionsHeader = (setup.QUEX_TEMPLATE_DB_DIR + "/core_engine/").replace("//","/")
    if setup.plain_memory_f: CoreEngineDefinitionsHeader += "definitions-quex-buffer.h"
    else:                    CoreEngineDefinitionsHeader += "definitions-plain-memory.h"
    QuexClassHeaderFileOutput   = setup.output_file_stem
    LexerClassName              = setup.output_engine_name
    VersionID                   = setup.input_application_version_id
    QuexVersionID               = setup.QUEX_VERSION
    DerivedClassHeaderFileName  = setup.input_derived_class_file
    ModeClassImplementationFile = setup.output_code_file


    # -- determine whether the lexical analyser needs indentation counting
    #    support. if one mode has an indentation handler, than indentation
    #    support must be provided.
    indentation_support_f = False
    for mode in Modes.values():
        if mode.on_indentation.line_n != -1:
        indentation_support_f = True
            break

    lex_id_definitions_str = "" 
    # NOTE: First mode-id needs to be '1' for compatibility with flex generated engines
    i = 0
    for name in Modes.keys():
    i += 1
    lex_id_definitions_str += "const int LEX_ID_%s = %i;\n" % (name, i)

    include_guard_extension = get_include_guard_extension(setup.output_file_stem)

    # -- mode class member function definitions (on_entry, on_exit, has_base, ...)
    mode_class_member_functions_txt = mode_classes.do(Modes.values())

    # -- instances of mode classes as members of the lexer
    mode_object_members_txt,     \
    constructor_txt,             \
    mode_specific_functions_txt, \
    friend_txt =                 \
         get_mode_class_related_code_fragments(Modes.values())

    # -- get the code for the user defined all-match actions
    try:
        fh_aux = open(setup.output.user_match_action)
        user_match_action_str = fh_aux.read()
        fh_aux.close()
    except:
        user_match_action_str = "/* no extra class content */"

    # -- define a pointer that directly has the type of the derived class
    if setup.input_derived_class_name == "":
        setup.input_derived_class_name = LexerClassName
        derived_class_type_declaration = ""
    else:
        derived_class_type_declaration = "class %s;" % setup.input_derived_class_name

    # -- the friends of the class
    friends_str = ""
    for friend in setup.input_lexer_class_friends:
        friends_str += "    friend class %s;\n" % friend

    # -- the class body extension
    class_body_extension_str = lexer_mode.class_body.get_code()

    # -- the class constructor extension
    class_constructor_extension_str = lexer_mode.class_init.get_code()

    fh = open_file_or_die(QuexClassHeaderFileTemplate)
    template_code_txt = fh.read()
    fh.close()

    # -- check if exit/entry handlers have to be active
    entry_handler_active_f = False
    exit_handler_active_f = False
    for mode in Modes.values():
    if mode.on_entry_code_fragments() != []: entry_handler_active_f = True
    if mode.on_exit_code_fragments() != []:  exit_handler_active_f = True

    txt = template_code_txt
    def set_switch(txt, SwitchF, Name):
    if SwitchF: txt = txt.replace("%%%%SWITCH%%%% %s" % Name, "#define    %s" % Name)
    else:       txt = txt.replace("%%%%SWITCH%%%% %s" % Name, "// #define %s" % Name)
    return txt
    
    txt = set_switch(txt, entry_handler_active_f,  "__QUEX_OPTION_ON_ENTRY_HANDLER_PRESENT")
    txt = set_switch(txt, exit_handler_active_f,   "__QUEX_OPTION_ON_EXIT_HANDLER_PRESENT")
    txt = set_switch(txt, indentation_support_f,   "__QUEX_OPTION_INDENTATION_TRIGGER_SUPPORT")     
    txt = set_switch(txt, setup.plain_memory_f,    "__QUEX_CORE_OPTION_PLAIN_MEMORY_BASED")     
    txt = set_switch(txt, True,                    "__QUEX_CORE_OPTION_SUPPORT_BEGIN_OF_LINE_PRE_CONDITION")
    txt = set_switch(txt, True,                    "QUEX_OPTION_VIRTUAL_FUNCTION_ON_ACTION_ENTRY")      
    txt = set_switch(txt, False,                   "QUEX_OPTION_NO_LINE_NUMBER_COUNTING")       
    txt = set_switch(txt, False,                   "QUEX_OPTION_NO_COLUMN_NUMBER_COUNTING")     
    
    txt = blue_print(txt,
                     [
                      ["%%CONSTRUCTOR_EXTENSTION%%",                  class_constructor_extension_str],
                      ["%%CONSTRUCTOR_MODE_DB_INITIALIZATION_CODE%%", constructor_txt],
                      ["%%CORE_ENGINE_DEFINITIONS_HEADER%%",          CoreEngineDefinitionsHeader],
                      ["%%CLASS_BODY_EXTENSION%%",         class_body_extension_str],
                      ["%%INCLUDE_GUARD_EXTENSION%%",      include_guard_extension],
                      ["%%INITIAL_LEXER_MODE_ID%%",        "LEX_ID_" + lexer_mode.initial_mode.get_code()],
                      ["%%LEXER_BUILD_DATE%%",             time.asctime()],
                      ["%%LEXER_BUILD_VERSION%%",          VersionID],
                      ["%%LEXER_CLASS_FRIENDS%%",          friends_str],
                      ["$$LEXER_CLASS_NAME$$",             LexerClassName],
                      ["%%LEXER_DERIVED_CLASS_DECL%%",     derived_class_type_declaration],
                      ["%%LEXER_DERIVED_CLASS_NAME%%",     setup.input_derived_class_name],
                      ["%%LEX_ID_DEFINITIONS%%",           lex_id_definitions_str],
                      ["%%MAX_MODE_CLASS_N%%",             repr(len(Modes))],
                      ["%%MODE_CLASS_FRIENDS%%",           friend_txt],
                      ["%%MODE_OBJECT_MEMBERS%%",          mode_object_members_txt],
                      ["%%MODE_SPECIFIC_ANALYSER_FUNCTIONS%%", mode_specific_functions_txt],
                      ["%%PRETTY_INDENTATION%%",           "     " + " " * (len(LexerClassName)*2 + 2)],
                      ["%%QUEX_TEMPLATE_DIR%%",            setup.QUEX_TEMPLATE_DB_DIR],
                      ["%%QUEX_VERSION%%",                 QuexVersionID],
                      ["%%TOKEN_CLASS%%",                  setup.input_token_class_name],
                      ["%%TOKEN_CLASS_DEFINITION_FILE%%",  setup.input_token_class_file.replace("//","/")],
                      ["%%TOKEN_ID_DEFINITION_FILE%%",     setup.output_token_id_file.replace("//","/")],
                      ["%%QUEX_OUTPUT_FILESTEM%%",         setup.output_file_stem],
             ])

    fh_out = open(QuexClassHeaderFileOutput, "w")
    fh_out.write(txt)
    fh_out.close()

    fh_out = open(ModeClassImplementationFile, "w")
    fh_out.write(lexer_mode.header.get() + "\n")
    
    if DerivedClassHeaderFileName != "":
    fh_out.write("#include<" + DerivedClassHeaderFileName +">\n")
    else:
    fh_out.write("#include<" + setup.output_file_stem +">\n")
    
    fh_out.write("namespace quex {\n")

    mode_class_member_functions_txt = \
         blue_print(mode_class_member_functions_txt,
                [["$$LEXER_CLASS_NAME$$",         LexerClassName],
                             ["%%TOKEN_CLASS%%",              setup.input_token_class_name],
                 ["%%LEXER_DERIVED_CLASS_NAME%%", setup.input_derived_class_name]])
    
    fh_out.write(mode_class_member_functions_txt)
    fh_out.write("} // END: namespace quex\n")
    fh_out.close()


quex_mode_init_call_str = """
        quex_mode_init(&%%MN%%, this, 
                   LEX_ID_%%MN%%, "%%MN%%",
               $analyser_function,
#ifdef __QUEX_OPTION_INDENTATION_TRIGGER_SUPPORT    
               $on_indentation,
#endif
               $on_entry,
               $on_exit
#ifdef __QUEX_OPTION_RUNTIME_MODE_TRANSITION_CHECK
               ,
               $has_base,
               $has_entry_from,
               $has_exit_to
#endif
                      );
"""

def __get_mode_init_call(mode):

    analyser_function = "$$LEXER_CLASS_NAME$$__%s_analyser_function" % mode.name
    on_indentation    = "$$LEXER_CLASS_NAME$$__%s_on_indentation"    % mode.name
    on_entry          = "$$LEXER_CLASS_NAME$$__%s_on_entry"          % mode.name
    on_exit           = "$$LEXER_CLASS_NAME$$__%s_on_exit"           % mode.name
    has_base          = "$$LEXER_CLASS_NAME$$__%s_has_base"          % mode.name
    has_entry_from    = "$$LEXER_CLASS_NAME$$__%s_has_entry_from"    % mode.name
    has_exit_to       = "$$LEXER_CLASS_NAME$$__%s_has_exit_to"       % mode.name

    if mode.options["inheritable"] == "only": 
    analyser_function = "/* %s = */ 0x0" % analyser_function

    if mode.on_entry_code_fragments() == []:
    on_entry = "/* %s = */ $$LEXER_CLASS_NAME$$_on_entry_exit_null_function" % on_entry

    if mode.on_exit_code_fragments() == []:
    on_exit = "/* %s = */ $$LEXER_CLASS_NAME$$_on_entry_exit_null_function" % on_exit

    if mode.on_indentation_code_fragments() == []:
    on_indentation = "/* %s = */ 0x0" % on_indentation

    txt = blue_print(quex_mode_init_call_str,
                [["%%MN%%",             mode.name],
             ["$analyser_function", analyser_function],
                 ["$on_indentation",    on_indentation],
                 ["$on_entry",          on_entry],
                 ["$on_exit",           on_exit],
                 ["$has_base",          has_base],
                 ["$has_entry_from",    has_entry_from],
                 ["$has_exit_to",       has_exit_to]])
    return txt



def __get_mode_function_declaration(Modes, FriendF=False):
    if FriendF: prolog = "        friend "
    else:       prolog = "    extern "

    def __mode_functions(Prolog, ReturnType, NameList, ArgList):
    txt = ""
    for name in NameList:
        function_signature = "%s $$LEXER_CLASS_NAME$$__%s_%s(%s);" % \
                 (ReturnType, mode.name, name, ArgList)
        txt += "%s" % Prolog + "    " + function_signature + "\n"
    return txt

    txt = ""
    for mode in Modes:
        if mode.options["inheritable"] != "only": 
            txt += __mode_functions(prolog, 
                                    "__QUEX_SETTING_ANALYSER_FUNCTION_RETURN_TYPE", ["analyser_function"], 
                                    "$$LEXER_CLASS_NAME$$*")

    for mode in Modes:
        if mode.on_indentation_code_fragments() != []:
            txt += __mode_functions(prolog, "void", ["on_indentation"], 
                            "$$LEXER_CLASS_NAME$$*, const quex_mode*")

    for mode in Modes:
    if mode.on_entry_code_fragments() != []:
        txt += __mode_functions(prolog, "void", ["on_entry"], 
                    "$$LEXER_CLASS_NAME$$*, const quex_mode*")
    if mode.on_exit_code_fragments() != []:
        txt += __mode_functions(prolog, "void", ["on_exit"], 
                    "$$LEXER_CLASS_NAME$$*, const quex_mode*")

    txt += "#ifdef __QUEX_OPTION_RUNTIME_MODE_TRANSITION_CHECK\n"
    for mode in Modes:
    txt += __mode_functions(prolog, "bool", ["has_base", "has_entry_from", "has_exit_to"], 
                    "const quex_mode*")
        
    txt += "#endif\n"
    txt += "\n"

    return txt


def get_mode_class_related_code_fragments(Modes):
    """
       RETURNS:  -- members of the lexical analyzer class for the mode classes
             -- static member functions declaring the analyzer functions for he mode classes 
                 -- constructor init expressions (before '{'),       
             -- constructor text to be executed at construction time 
             -- friend declarations for the mode classes/functions

    """

    L = max(map(lambda m: len(m.name), Modes))

    members_txt = ""    
    for mode in Modes:
        members_txt += "        quex_mode  %s;\n" % mode.name


    # constructor code
    txt = ""
    for mode in Modes:
        txt += "        assert(LEX_ID_%s %s<= %i);\n" % (mode.name, " " * (L-len(mode.name)), len(Modes))

    for mode in Modes:
    txt += __get_mode_init_call(mode)

    for mode in Modes:
        txt += "        mode_db[LEX_ID_%s]%s = &%s;\n" % (mode.name, " " * (L-len(mode.name)), mode.name)
    constructor_txt = txt

    mode_functions_txt = __get_mode_function_declaration(Modes, FriendF=False)
    friends_txt        = __get_mode_function_declaration(Modes, FriendF=True)

    return members_txt,        \
       constructor_txt,    \
       mode_functions_txt, \
       friends_txt
Пример #7
0
def write_configuration_header(ModeDB, IndentationSupportF, BeginOfLineSupportF):
    LexerClassName = Setup.analyzer_class_name
    LanguageDB     = Setup.language_db

    ConfigurationTemplateFile =(  QUEX_PATH \
                                + Setup.language_db["$code_base"] \
                                + "/analyzer/configuration/TXT").replace("//","/")

    txt = get_file_content_or_die(ConfigurationTemplateFile)

    # -- check if exit/entry handlers have to be active
    entry_handler_active_f = False
    exit_handler_active_f = False
    for mode in ModeDB.values():
        if mode.get_code_fragment_list("on_entry") != []: entry_handler_active_f = True
        if mode.get_code_fragment_list("on_exit") != []:  exit_handler_active_f = True

    # Buffer filler converter (0x0 means: no buffer filler converter)
    converter_f = False
    converter_new_str = "#   define QUEX_SETTING_BUFFER_FILLERS_CONVERTER_NEW " 
    if Setup.converter_user_new_func != "": 
        converter_new_str += Setup.converter_user_new_func + "()"
        user_defined_converter_f = True
    else: 
        converter_new_str = "/* " + converter_new_str + " */"
        user_defined_converter_f = False

    # Token repetition support
    token_repeat_test_txt = ""
    for token_id_str in lexer_mode.token_repetition_token_id_list:
        token_repeat_test_txt += "TokenID == %s || " % token_id_str
    if token_repeat_test_txt != "":
        token_repeat_test_txt = token_repeat_test_txt[:-3]
    else:
        token_repeat_test_txt = "false"

    txt = __switch(txt, "QUEX_OPTION_COLUMN_NUMBER_COUNTING",        Setup.count_column_number_f)        
    txt = __switch(txt, "QUEX_OPTION_COMPUTED_GOTOS",                False)
    txt = __switch(txt, "QUEX_OPTION_CONVERTER_ICONV",               Setup.converter_iconv_f)
    txt = __switch(txt, "QUEX_OPTION_CONVERTER_ICU",                 Setup.converter_icu_f)
    txt = __switch(txt, "QUEX_OPTION_INCLUDE_STACK",                 Setup.include_stack_support_f)
    txt = __switch(txt, "QUEX_OPTION_LINE_NUMBER_COUNTING",          Setup.count_line_number_f)      
    txt = __switch(txt, "QUEX_OPTION_POST_CATEGORIZER",              Setup.post_categorizer_f)
    txt = __switch(txt, "QUEX_OPTION_RUNTIME_MODE_TRANSITION_CHECK", Setup.mode_transition_check_f)
    txt = __switch(txt, "QUEX_OPTION_STRING_ACCUMULATOR",            Setup.string_accumulator_f)
    txt = __switch(txt, "QUEX_OPTION_TOKEN_POLICY_QUEUE",            Setup.token_policy == "queue")
    txt = __switch(txt, "QUEX_OPTION_TOKEN_POLICY_SINGLE",           Setup.token_policy == "single")
    txt = __switch(txt, "QUEX_OPTION_TOKEN_REPETITION_SUPPORT",      token_repeat_test_txt != "false")
    txt = __switch(txt, "QUEX_OPTION_USER_MANAGED_TOKEN_MEMORY",     Setup.token_memory_management_by_user_f)
    txt = __switch(txt, "__QUEX_OPTION_BIG_ENDIAN",                  Setup.buffer_byte_order == "big")
    txt = __switch(txt, "__QUEX_OPTION_CONVERTER_HELPER",            Setup.converter_helper_required_f)
    txt = __switch(txt, "__QUEX_OPTION_CONVERTER",                   Setup.converter_f)
    txt = __switch(txt, "QUEX_OPTION_INDENTATION_TRIGGER",           IndentationSupportF)     
    txt = __switch(txt, "__QUEX_OPTION_LITTLE_ENDIAN",               Setup.buffer_byte_order == "little")
    txt = __switch(txt, "__QUEX_OPTION_ON_ENTRY_HANDLER_PRESENT",    entry_handler_active_f)
    txt = __switch(txt, "__QUEX_OPTION_ON_EXIT_HANDLER_PRESENT",     exit_handler_active_f)
    txt = __switch(txt, "__QUEX_OPTION_PLAIN_C",                     Setup.language.upper() == "C")
    txt = __switch(txt, "__QUEX_OPTION_SUPPORT_BEGIN_OF_LINE_PRE_CONDITION", BeginOfLineSupportF)
    txt = __switch(txt, "__QUEX_OPTION_SYSTEM_ENDIAN",               Setup.byte_order_is_that_of_current_system_f)
    txt = __switch(txt, "QUEX_OPTION_BUFFER_BASED_ANALYZIS",         Setup.buffer_based_analyzis_f)

    # -- token class related definitions
    token_descr = lexer_mode.token_type_definition
    namespace_token_str = make_safe_identifier(Setup.language_db["$namespace-ref"](token_descr.name_space))

    # -- name of the character codec
    codec_name = "unicode"
    if Setup.buffer_codec != "": 
        codec_name = make_safe_identifier(Setup.buffer_codec).lower()

    def namespace(NameSpaceList):
        result = Setup.language_db["$namespace-ref"](NameSpaceList)

        if result == "::": return ""

        assert len(result) > 2, \
               "Error while generating namespace reference '%s'" % result

        return result[:-2]

    txt = blue_print(txt, 
            [
             ["$$BUFFER_LIMIT_CODE$$",          "0x%X" % Setup.buffer_limit_code],
             ["$$CODEC_NAME$$",                 codec_name],
             ["$$INCLUDE_GUARD_EXTENSION$$",    get_include_guard_extension( Setup.language_db["$namespace-ref"](Setup.analyzer_name_space) + "__" + Setup.analyzer_class_name)],
             ["$$INITIAL_LEXER_MODE_ID$$",      "QUEX_NAME(ModeID_%s)" % lexer_mode.initial_mode.get_pure_code()],
             ["$$LEXER_BUILD_DATE$$",           time.asctime()],
             ["$$LEXER_CLASS_NAME$$",           LexerClassName],
             ["$$LEXER_CLASS_NAME_SAFE$$",      Setup.analyzer_name_safe],
             ["$$LEXER_DERIVED_CLASS_NAME$$",   Setup.analyzer_derived_class_name],
             ["$$MAX_MODE_CLASS_N$$",           repr(len(ModeDB))],
             ["$$NAMESPACE_MAIN$$",             namespace(Setup.analyzer_name_space)],
             ["$$NAMESPACE_MAIN_CLOSE$$",       Setup.language_db["$namespace-close"](Setup.analyzer_name_space).replace("\n", "\\\n")],
             ["$$NAMESPACE_MAIN_OPEN$$",        Setup.language_db["$namespace-open"](Setup.analyzer_name_space).replace("\n", "\\\n")],
             ["$$NAMESPACE_TOKEN$$",            namespace(token_descr.name_space)],
             ["$$NAMESPACE_TOKEN_CLOSE$$",      Setup.language_db["$namespace-close"](token_descr.name_space).replace("\n", "\\\n")],
             ["$$NAMESPACE_TOKEN_OPEN$$",       Setup.language_db["$namespace-open"](token_descr.name_space).replace("\n", "\\\n")],
             ["$$PATH_TERMINATION_CODE$$",      "0x%X" % Setup.path_limit_code],
             ["$$QUEX_SETTING_BUFFER_FILLERS_CONVERTER_NEW$$", converter_new_str],
             ["$$QUEX_TYPE_CHARACTER$$",        Setup.buffer_element_type],
             ["$$QUEX_VERSION$$",               QUEX_VERSION],
             ["$$TOKEN_CLASS$$",                token_descr.class_name],
             ["$$TOKEN_CLASS_NAME_SAFE$$",      token_descr.class_name_safe],
             ["$$TOKEN_COLUMN_N_TYPE$$",        token_descr.column_number_type.get_pure_code()],
             ["$$TOKEN_ID_TYPE$$",              token_descr.token_id_type.get_pure_code()],
             ["$$TOKEN_LINE_N_TYPE$$",          token_descr.line_number_type.get_pure_code()],
             ["$$TOKEN_PREFIX$$",               Setup.token_id_prefix],
             ["$$TOKEN_QUEUE_SAFETY_BORDER$$",  repr(Setup.token_queue_safety_border)],
             ["$$TOKEN_QUEUE_SIZE$$",           repr(Setup.token_queue_size)],
             ["$$TOKEN_REPEAT_TEST$$",          token_repeat_test_txt],
             ["$$USER_LEXER_VERSION$$",         Setup.user_application_version_id],
             ])

    return txt
Пример #8
0
def write_engine_header(ModeDB):

    QuexClassHeaderFileTemplate = os.path.normpath(  QUEX_PATH
                                                   + Setup.language_db["$code_base"] 
                                                   + Setup.language_db["$analyzer_template_file"]).replace("//","/")
    LexerFileStem  = Setup.output_header_file
    LexerClassName = Setup.analyzer_class_name

    quex_converter_coding_name_str = Setup.converter_ucs_coding_name

    mode_id_definition_str = "" 
    # NOTE: First mode-id needs to be '1' for compatibility with flex generated engines
    for i, info in enumerate(ModeDB.items()):
        name = info[0]
        mode = info[1]
        if mode.options["inheritable"] == "only": continue
        mode_id_definition_str += "    QUEX_NAME(ModeID_%s) = %i,\n" % (name, i)

    if mode_id_definition_str != "":
        mode_id_definition_str = mode_id_definition_str[:-2]

    # -- instances of mode classes as members of the lexer
    mode_object_members_txt,     \
    mode_specific_functions_txt, \
    friend_txt                   = get_mode_class_related_code_fragments(ModeDB.values())

    # -- define a pointer that directly has the type of the derived class
    if Setup.analyzer_derived_class_name == "":
        Setup.analyzer_derived_class_name = LexerClassName
        derived_class_type_declaration = ""
    else:
        derived_class_type_declaration = "class %s;" % Setup.analyzer_derived_class_name

    token_class_file_name = lexer_mode.token_type_definition.get_file_name()
    token_class_name      = lexer_mode.token_type_definition.class_name
    token_class_name_safe = lexer_mode.token_type_definition.class_name_safe

    template_code_txt = get_file_content_or_die(QuexClassHeaderFileTemplate)

    include_guard_ext = get_include_guard_extension(
            Setup.language_db["$namespace-ref"](Setup.analyzer_name_space) 
            + "__" + Setup.analyzer_class_name)

    function_code_txt = write_constructor_and_memento_functions(ModeDB)

    txt = blue_print(template_code_txt,
            [
                ["$$___SPACE___$$",                      " " * (len(LexerClassName) + 1)],
                ["$$CLASS_BODY_EXTENSION$$",             lexer_mode.class_body_extension.get_code()],
                ["$$CONVERTER_HELPER$$",                 get_file_reference(Setup.output_buffer_codec_header)],
                ["$$INCLUDE_GUARD_EXTENSION$$",          include_guard_ext],
                ["$$LEXER_CLASS_NAME$$",                 LexerClassName],
                ["$$LEXER_CLASS_NAME_SAFE$$",            Setup.analyzer_name_safe],
                ["$$LEXER_CONFIG_FILE$$",                get_file_reference(Setup.output_configuration_file)],
                ["$$LEXER_DERIVED_CLASS_DECL$$",         derived_class_type_declaration],
                ["$$LEXER_DERIVED_CLASS_NAME$$",         Setup.analyzer_derived_class_name],
                ["$$QUEX_MODE_ID_DEFINITIONS$$",         mode_id_definition_str],
                ["$$MEMENTO_EXTENSIONS$$",               lexer_mode.memento_class_extension.get_code()],
                ["$$MODE_CLASS_FRIENDS$$",               friend_txt],
                ["$$MODE_OBJECTS$$",                     mode_object_members_txt],
                ["$$MODE_SPECIFIC_ANALYSER_FUNCTIONS$$", mode_specific_functions_txt],
                ["$$PRETTY_INDENTATION$$",               "     " + " " * (len(LexerClassName)*2 + 2)],
                ["$$QUEX_TEMPLATE_DIR$$",                QUEX_PATH + Setup.language_db["$code_base"]],
                ["$$QUEX_VERSION$$",                     QUEX_VERSION],
                ["$$TOKEN_CLASS_DEFINITION_FILE$$",      get_file_reference(token_class_file_name)],
                ["$$TOKEN_CLASS$$",                      token_class_name],
                ["$$TOKEN_CLASS_NAME_SAFE$$",            token_class_name_safe],
                ["$$TOKEN_ID_DEFINITION_FILE$$",         get_file_reference(Setup.output_token_id_file)],
                ["$$CORE_ENGINE_CHARACTER_CODING$$",     quex_converter_coding_name_str],
                ["$$USER_DEFINED_HEADER$$",              lexer_mode.header.get_code() + "\n"],
             ])

    return txt, function_code_txt