def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() mode_description_db = quex_file_parser.do(Setup.input_mode_files) # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developed.) if Setup.external_lexeme_null_object != "": # Assume external implementation token_id_header = None function_map_id_to_name_implementation = "" else: token_id_header = token_id_maker.do(Setup) function_map_id_to_name_implementation = token_id_maker.do_map_id_to_name_function() # (*) [Optional] Make a customized token class class_token_header, \ class_token_implementation = token_class_maker.do(function_map_id_to_name_implementation) if Setup.token_class_only_f: write_safely_and_close(blackboard.token_type_definition.get_file_name(), do_token_class_info() \ + class_token_header) write_safely_and_close(Setup.output_token_class_file_implementation, class_token_implementation) write_safely_and_close(Setup.output_token_id_file, token_id_header) Lng.straighten_open_line_pragmas(Setup.output_token_id_file) Lng.straighten_open_line_pragmas(Setup.output_token_class_file_implementation) Lng.straighten_open_line_pragmas(blackboard.token_type_definition.get_file_name()) return # (*) implement the lexer mode-specific analyser functions # During this process: mode_description_db --> mode_db function_analyzers_implementation, \ mode_db = analyzer_functions_get(mode_description_db) # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework configuration_header = configuration.do(mode_db) analyzer_header = analyzer_class.do(mode_db) analyzer_implementation = analyzer_class.do_implementation(mode_db) + "\n" mode_implementation = mode_classes.do(mode_db) # (*) [Optional] Generate a converter helper codec_converter_helper_header, \ codec_converter_helper_implementation = codec_converter_helper.do() # Implementation (Potential Inline Functions) if class_token_implementation is not None: analyzer_implementation += class_token_implementation + "\n" # Engine (Source Code) engine_txt = Lng.ENGINE_TEXT_EPILOG() + "\n" \ + mode_implementation + "\n" \ + function_analyzers_implementation + "\n" \ + function_map_id_to_name_implementation + "\n" # (*) Write Files ___________________________________________________________________ if codec_converter_helper_header is not None: write_safely_and_close(Setup.output_buffer_codec_header, codec_converter_helper_header) write_safely_and_close(Setup.output_buffer_codec_header_i, codec_converter_helper_implementation) if token_id_header is not None: write_safely_and_close(Setup.output_token_id_file, token_id_header) write_safely_and_close(Setup.output_configuration_file, configuration_header) if Setup.language == "C": engine_txt += analyzer_implementation else: analyzer_header = analyzer_header.replace("$$ADDITIONAL_HEADER_CONTENT$$", analyzer_implementation) write_safely_and_close(Setup.output_header_file, analyzer_header) write_safely_and_close(Setup.output_code_file, engine_txt) if class_token_header is not None: write_safely_and_close(blackboard.token_type_definition.get_file_name(), class_token_header) Lng.straighten_open_line_pragmas(Setup.output_header_file) Lng.straighten_open_line_pragmas(Setup.output_code_file) if not blackboard.token_type_definition.manually_written(): Lng.straighten_open_line_pragmas(blackboard.token_type_definition.get_file_name()) if Setup.source_package_directory != "": source_package.do()
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() mode_description_db = quex_file_parser.do(Setup.input_mode_files) # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developed.) if Setup.external_lexeme_null_object != "": # Assume external implementation token_id_header = None function_map_id_to_name_implementation = "" else: token_id_header = token_id_maker.do(Setup) function_map_id_to_name_implementation = token_id_maker.do_map_id_to_name_function( ) # (*) [Optional] Make a customized token class class_token_header, \ class_token_implementation = token_class_maker.do(function_map_id_to_name_implementation) if Setup.token_class_only_f: write_safely_and_close(blackboard.token_type_definition.get_file_name(), do_token_class_info() \ + class_token_header) write_safely_and_close(Setup.output_token_class_file_implementation, class_token_implementation) write_safely_and_close(Setup.output_token_id_file, token_id_header) Lng.straighten_open_line_pragmas(Setup.output_token_id_file) Lng.straighten_open_line_pragmas( Setup.output_token_class_file_implementation) Lng.straighten_open_line_pragmas( blackboard.token_type_definition.get_file_name()) return # (*) implement the lexer mode-specific analyser functions # During this process: mode_description_db --> mode_db function_analyzers_implementation, \ mode_db = analyzer_functions_get(mode_description_db) # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework configuration_header = configuration.do(mode_db) analyzer_header = analyzer_class.do(mode_db) analyzer_implementation = analyzer_class.do_implementation(mode_db) + "\n" mode_implementation = mode_classes.do(mode_db) # (*) [Optional] Generate a converter helper codec_converter_helper_header, \ codec_converter_helper_implementation = codec_converter_helper.do() # Implementation (Potential Inline Functions) if class_token_implementation is not None: analyzer_implementation += class_token_implementation + "\n" # Engine (Source Code) engine_txt = Lng.ENGINE_TEXT_EPILOG() + "\n" \ + mode_implementation + "\n" \ + function_analyzers_implementation + "\n" \ + function_map_id_to_name_implementation + "\n" # (*) Write Files ___________________________________________________________________ if codec_converter_helper_header is not None: write_safely_and_close(Setup.output_buffer_codec_header, codec_converter_helper_header) write_safely_and_close(Setup.output_buffer_codec_header_i, codec_converter_helper_implementation) if token_id_header is not None: write_safely_and_close(Setup.output_token_id_file, token_id_header) write_safely_and_close(Setup.output_configuration_file, configuration_header) if Setup.language == "C": engine_txt += analyzer_implementation else: analyzer_header = analyzer_header.replace( "$$ADDITIONAL_HEADER_CONTENT$$", analyzer_implementation) write_safely_and_close(Setup.output_header_file, analyzer_header) write_safely_and_close(Setup.output_code_file, engine_txt) if class_token_header is not None: write_safely_and_close( blackboard.token_type_definition.get_file_name(), class_token_header) Lng.straighten_open_line_pragmas(Setup.output_header_file) Lng.straighten_open_line_pragmas(Setup.output_code_file) if not blackboard.token_type_definition.manually_written(): Lng.straighten_open_line_pragmas( blackboard.token_type_definition.get_file_name()) if Setup.source_package_directory != "": source_package.do()
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ token_id_maker.prepare_default_standard_token_ids() mode_db = __get_mode_db(Setup) IndentationSupportF = lexer_mode.requires_indentation_count(mode_db) BeginOfLineSupportF = lexer_mode.requires_begin_of_line_condition_support(mode_db) # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework header_engine_txt, \ constructor_and_memento_txt, \ header_configuration_txt = quex_class_out.do(mode_db, IndentationSupportF, BeginOfLineSupportF) mode_implementation_txt = mode_classes.do(mode_db) # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developped.) token_id_maker.do(Setup, IndentationSupportF) map_id_to_name_function_implementation_txt = token_id_maker.do_map_id_to_name_function() # (*) [Optional] Make a customized token class token_class_h, token_class_txt = token_class_maker.do() # (*) [Optional] Generate a converter helper codec_converter_helper.do() # (*) implement the lexer mode-specific analyser functions inheritance_info_str = "" analyzer_code = "" # (*) Get list of modes that are actually implemented # (abstract modes only serve as common base) mode_list = filter(lambda mode: mode.options["inheritable"] != "only", mode_db.values()) mode_name_list = map(lambda mode: mode.name, mode_list) for mode in mode_list: # accumulate inheritance information for comment code = get_code_for_mode(mode, mode_name_list, IndentationSupportF, BeginOfLineSupportF) analyzer_code += code if Setup.comment_mode_patterns_f: inheritance_info_str += mode.get_documentation() # Bring the info about the patterns first if Setup.comment_mode_patterns_f: analyzer_code += Setup.language_db["$ml-comment"]("BEGIN: MODE PATTERNS\n" + \ inheritance_info_str + \ "\nEND: MODE PATTERNS") analyzer_code += "\n" # For safety: New content may have to start in a newline, e.g. "#ifdef ..." # generate frame for analyser code analyzer_code = generator.frame_this(analyzer_code) # Implementation (Potential Inline Functions) implemtation_txt = constructor_and_memento_txt + "\n" \ + token_class_txt + "\n" # Engine (Source Code) source_txt = mode_implementation_txt + "\n" \ + analyzer_code + "\n" \ + map_id_to_name_function_implementation_txt + "\n" # (*) Write Files write_safely_and_close(Setup.output_configuration_file, header_configuration_txt) if Setup.language == "C": write_safely_and_close(Setup.output_header_file, header_engine_txt) write_safely_and_close(Setup.output_code_file, source_txt + implemtation_txt) else: header_txt = header_engine_txt.replace("$$ADDITIONAL_HEADER_CONTENT$$", implemtation_txt) write_safely_and_close(Setup.output_header_file, header_txt) write_safely_and_close(Setup.output_code_file, source_txt) if token_class_h != "": write_safely_and_close(lexer_mode.token_type_definition.get_file_name(), token_class_h) UserCodeFragment_straighten_open_line_pragmas(Setup.output_header_file, "C") UserCodeFragment_straighten_open_line_pragmas(Setup.output_code_file, "C") # assert lexer_mode.token_type_definition != None UserCodeFragment_straighten_open_line_pragmas(lexer_mode.token_type_definition.get_file_name(), "C") if Setup.source_package_directory != "": source_package.do()