Example #1
0
def do():
    """Generates state machines for all modes. Each mode results into 
       a separate state machine that is stuck into a virtual function
       of a class derived from class 'quex_mode'.
    """
    # NOTE: The generated header file that contains the lexical analyser class includes
    #       the file "code_base/code_base/definitions-quex-buffer.h". But the analyser
    #       functions also need 'connection' to the lexer class, so we include the header
    #       of the generated lexical analyser at this place.
    lexer_mode.token_id_db["TERMINATION"]   = TokenInfo("TERMINATION",   ID=Setup.token_id_termination)
    lexer_mode.token_id_db["UNINITIALIZED"] = TokenInfo("UNINITIALIZED", ID=Setup.token_id_uninitialized)

    mode_db = __get_mode_db(Setup)

    # (*) get list of modes that are actually implemented
    #     (abstract modes only serve as common base)
    mode_list      = filter(lambda mode: mode.options["inheritable"] != "only", mode_db.values())
    mode_name_list = map(lambda mode: mode.name, mode_list)

    # (2) implement the 'quex' core class from a template
    #
    # -- do the coding of the class framework
    quex_class_out.do(mode_db, Setup)

    # (3) create the token ids
    token_id_maker.do(Setup) 

    # (3) implement the lexer mode-specific analyser functions
    inheritance_info_str  = "Information about what pattern 'comes' from what mode in the inheritance tree.\n\n"
    inheritance_info_str += "[1] pattern, [2] dominating mode, [3] dominating inheritance level, [4] pattern index\n\n"
    analyzer_code = ""
    for mode in mode_list:        
        # accumulate inheritance information for comment
        x, y = get_code_for_mode(mode, mode_name_list) 
        analyzer_code        += x
        inheritance_info_str += "(%s)\n" % mode.name + y + "\n\n"
        
    # find unused labels
    analyzer_code = generator.delete_unused_labels(analyzer_code)

    # generate frame for analyser code
    analyzer_code = generator.frame_this(analyzer_code)

    # Bring the info about the patterns first
    inheritance_info_str = inheritance_info_str.replace("*/", "* /")
    inheritance_info_str = inheritance_info_str.replace("/*", "/ *")
    analyzer_code = Setup.language_db["$ml-comment"](inheritance_info_str) + "\n" + analyzer_code

    # write code to a header file
    fh = open(Setup.output_core_engine_file, "wb")
    if os.linesep != "\n": analyzer_code = analyzer_code.replace("\n", os.linesep)
    fh.write(analyzer_code)
    fh.close()

    UserCodeFragment_straighten_open_line_pragmas(Setup.output_file_stem, "C")
    UserCodeFragment_straighten_open_line_pragmas(Setup.output_core_engine_file, "C")
    UserCodeFragment_straighten_open_line_pragmas(Setup.output_code_file, "C")
Example #2
0
def analyzer_functions_get(ModeDB):
    code = []

    # (*) Get list of modes that are actually implemented
    #     (abstract modes only serve as common base)
    mode_name_list = ModeDB.keys()

    for name, mode_descr in ModeDB.iteritems():
        dial_db.clear()

        # -- Generate 'Mode' from 'ModeDescriptions'
        mode = Mode(mode_descr)
        blackboard.mode_db[name] = mode

        if not mode.is_implemented(): continue

        txt_analyzer = cpp_generator.do(mode, mode_name_list)
        txt_counter = cpp_generator.do_default_counter(mode)

        code.extend(txt_counter)
        code.extend(txt_analyzer)

    code.append(
        do_comment_pattern_action_pairs(blackboard.mode_db.itervalues()))

    if not Setup.token_class_only_f:
        determine_start_mode(blackboard.mode_db)

    # (*) perform consistency check on newly generated mode_db
    consistency_check.do(blackboard.mode_db)

    # generate frame for analyser code
    return cpp_generator.frame_this("".join(code)), blackboard.mode_db
Example #3
0
def analyzer_functions_get(ModeDB):
    code = []

    # (*) Get list of modes that are actually implemented
    #     (abstract modes only serve as common base)
    mode_name_list = ModeDB.keys()  

    for name, mode_descr in ModeDB.iteritems():        
        dial_db.clear()

        # -- Generate 'Mode' from 'ModeDescriptions'
        mode = Mode(mode_descr)
        blackboard.mode_db[name] = mode

        if not mode.is_implemented(): continue

        txt_analyzer = cpp_generator.do(mode, mode_name_list)
        txt_counter  = cpp_generator.do_default_counter(mode)

        code.extend(txt_counter)
        code.extend(txt_analyzer)

    code.append(do_comment_pattern_action_pairs(blackboard.mode_db.itervalues()))

    if not Setup.token_class_only_f:
        determine_start_mode(blackboard.mode_db)

    # (*) perform consistency check on newly generated mode_db
    consistency_check.do(blackboard.mode_db)

    # generate frame for analyser code
    return cpp_generator.frame_this("".join(code)), blackboard.mode_db
Example #4
0
def do():
    """Generates state machines for all modes. Each mode results into 
       a separate state machine that is stuck into a virtual function
       of a class derived from class 'quex_mode'.
    """
    token_id_maker.prepare_default_standard_token_ids()

    mode_db = __get_mode_db(Setup)

    IndentationSupportF = lexer_mode.requires_indentation_count(mode_db)
    BeginOfLineSupportF = lexer_mode.requires_begin_of_line_condition_support(mode_db)

    # (*) Implement the 'quex' core class from a template
    # -- do the coding of the class framework
    header_engine_txt,           \
    constructor_and_memento_txt, \
    header_configuration_txt     = quex_class_out.do(mode_db, IndentationSupportF, 
                                                     BeginOfLineSupportF)

    mode_implementation_txt  = mode_classes.do(mode_db)

    # (*) Generate the token ids
    #     (This needs to happen after the parsing of mode_db, since during that
    #      the token_id_db is developped.)
    token_id_maker.do(Setup, IndentationSupportF) 
    map_id_to_name_function_implementation_txt = token_id_maker.do_map_id_to_name_function()

    # (*) [Optional] Make a customized token class
    token_class_h, token_class_txt = token_class_maker.do()
    
    # (*) [Optional] Generate a converter helper
    codec_converter_helper.do()

    # (*) implement the lexer mode-specific analyser functions
    inheritance_info_str = ""
    analyzer_code        = ""

    # (*) Get list of modes that are actually implemented
    #     (abstract modes only serve as common base)
    mode_list      = filter(lambda mode: mode.options["inheritable"] != "only", mode_db.values())
    mode_name_list = map(lambda mode: mode.name, mode_list)

    for mode in mode_list:        

        # accumulate inheritance information for comment
        code = get_code_for_mode(mode, mode_name_list, IndentationSupportF, BeginOfLineSupportF) 
        analyzer_code += code

        if Setup.comment_mode_patterns_f:
            inheritance_info_str += mode.get_documentation()

    # Bring the info about the patterns first
    if Setup.comment_mode_patterns_f:
        analyzer_code += Setup.language_db["$ml-comment"]("BEGIN: MODE PATTERNS\n" + \
                                                          inheritance_info_str     + \
                                                          "\nEND: MODE PATTERNS")
        analyzer_code += "\n" # For safety: New content may have to start in a newline, e.g. "#ifdef ..."

    # generate frame for analyser code
    analyzer_code = generator.frame_this(analyzer_code)

    # Implementation (Potential Inline Functions)
    implemtation_txt =   constructor_and_memento_txt + "\n" \
                       + token_class_txt             + "\n" 

    # Engine (Source Code)
    source_txt =   mode_implementation_txt                    + "\n" \
                 + analyzer_code                              + "\n" \
                 + map_id_to_name_function_implementation_txt + "\n" 

    # (*) Write Files
    write_safely_and_close(Setup.output_configuration_file, header_configuration_txt)
    if Setup.language == "C":
        write_safely_and_close(Setup.output_header_file, header_engine_txt)
        write_safely_and_close(Setup.output_code_file, 
                               source_txt + implemtation_txt)
    else:
        header_txt = header_engine_txt.replace("$$ADDITIONAL_HEADER_CONTENT$$", implemtation_txt)
        write_safely_and_close(Setup.output_header_file, header_txt)
        write_safely_and_close(Setup.output_code_file, source_txt)

    if token_class_h != "":
        write_safely_and_close(lexer_mode.token_type_definition.get_file_name(), 
                               token_class_h)

    UserCodeFragment_straighten_open_line_pragmas(Setup.output_header_file, "C")
    UserCodeFragment_straighten_open_line_pragmas(Setup.output_code_file, "C")

    # assert lexer_mode.token_type_definition != None
    UserCodeFragment_straighten_open_line_pragmas(lexer_mode.token_type_definition.get_file_name(), "C")

    if Setup.source_package_directory != "":
        source_package.do()