def _get_analyzers(mode_db): configuration_header = configuration.do(mode_db) analyzer_header, \ member_function_signature_list = analyzer_class.do(mode_db, Epilog="") mode_implementation = mode_classes.do(mode_db) function_analyzers_implementation = _analyzer_functions_get(mode_db) analyzer_implementation = analyzer_class.do_implementation(mode_db, member_function_signature_list) engine_txt = "\n".join([Lng.ENGINE_TEXT_EPILOG(), mode_implementation, function_analyzers_implementation, analyzer_implementation, "\n"]) if Setup.configuration_by_cmake_f: configuration_file_name = Setup.output_configuration_file_cmake else: configuration_file_name = Setup.output_configuration_file return [ (configuration_header, configuration_file_name), (analyzer_header, Setup.output_header_file), (engine_txt, Setup.output_code_file), ]
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() mode_description_db = quex_file_parser.do(Setup.input_mode_files) # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developed.) if Setup.external_lexeme_null_object != "": # Assume external implementation token_id_header = None function_map_id_to_name_implementation = "" else: token_id_header = token_id_maker.do(Setup) function_map_id_to_name_implementation = token_id_maker.do_map_id_to_name_function( ) # (*) [Optional] Make a customized token class class_token_header, \ class_token_implementation = token_class_maker.do(function_map_id_to_name_implementation) if Setup.token_class_only_f: write_safely_and_close(blackboard.token_type_definition.get_file_name(), do_token_class_info() \ + class_token_header) write_safely_and_close(Setup.output_token_class_file_implementation, class_token_implementation) write_safely_and_close(Setup.output_token_id_file, token_id_header) Lng.straighten_open_line_pragmas(Setup.output_token_id_file) Lng.straighten_open_line_pragmas( Setup.output_token_class_file_implementation) Lng.straighten_open_line_pragmas( blackboard.token_type_definition.get_file_name()) return # (*) implement the lexer mode-specific analyser functions # During this process: mode_description_db --> mode_db function_analyzers_implementation, \ mode_db = analyzer_functions_get(mode_description_db) # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework configuration_header = configuration.do(mode_db) analyzer_header = analyzer_class.do(mode_db) analyzer_implementation = analyzer_class.do_implementation(mode_db) + "\n" mode_implementation = mode_classes.do(mode_db) # (*) [Optional] Generate a converter helper codec_converter_helper_header, \ codec_converter_helper_implementation = codec_converter_helper.do() # Implementation (Potential Inline Functions) if class_token_implementation is not None: analyzer_implementation += class_token_implementation + "\n" # Engine (Source Code) engine_txt = Lng.ENGINE_TEXT_EPILOG() + "\n" \ + mode_implementation + "\n" \ + function_analyzers_implementation + "\n" \ + function_map_id_to_name_implementation + "\n" # (*) Write Files ___________________________________________________________________ if codec_converter_helper_header is not None: write_safely_and_close(Setup.output_buffer_codec_header, codec_converter_helper_header) write_safely_and_close(Setup.output_buffer_codec_header_i, codec_converter_helper_implementation) if token_id_header is not None: write_safely_and_close(Setup.output_token_id_file, token_id_header) write_safely_and_close(Setup.output_configuration_file, configuration_header) if Setup.language == "C": engine_txt += analyzer_implementation else: analyzer_header = analyzer_header.replace( "$$ADDITIONAL_HEADER_CONTENT$$", analyzer_implementation) write_safely_and_close(Setup.output_header_file, analyzer_header) write_safely_and_close(Setup.output_code_file, engine_txt) if class_token_header is not None: write_safely_and_close( blackboard.token_type_definition.get_file_name(), class_token_header) Lng.straighten_open_line_pragmas(Setup.output_header_file) Lng.straighten_open_line_pragmas(Setup.output_code_file) if not blackboard.token_type_definition.manually_written(): Lng.straighten_open_line_pragmas( blackboard.token_type_definition.get_file_name()) if Setup.source_package_directory != "": source_package.do()