def _parse_modes_and_more(InputFileList): mode_prep_prep_db = quex_file_parser.do(InputFileList) if not mode_prep_prep_db: error.log("Missing mode definition in input files.") # Finalization of Mode_PrepPrep --> Mode # requires consideration of inheritance and transition rules. return mode.finalize_modes(mode_prep_prep_db)
def do_plot(): mode_description_db = quex_file_parser.do(Setup.input_mode_files) for mode_descr in mode_description_db.itervalues(): mode = Mode(mode_descr) # -- some modes only define event handlers that are inherited if len(mode.pattern_list) == 0: continue plotter = grapviz_generator.Generator(mode.pattern_list, StateMachineName=mode.name) plotter.do(Option=Setup.character_display)
def do_plot(): mode_description_db = quex_file_parser.do(Setup.input_mode_files) for mode_descr in mode_description_db.itervalues(): mode = Mode(mode_descr) # -- some modes only define event handlers that are inherited if len(mode.pattern_list) == 0: continue plotter = grapviz_generator.Generator(mode.pattern_list, StateMachineName = mode.name) plotter.do(Option=Setup.character_display)
def do_plot(): mode_db = quex_file_parser.do(Setup.input_mode_files) for mode in mode_db.values(): # -- some modes only define event handlers that are inherited pattern_action_pair_list = mode.get_pattern_action_pair_list() if len(pattern_action_pair_list) == 0: continue plotter = grapviz_generator.Generator(pattern_action_pair_list, StateMachineName = mode.name) plotter.do(Option=Setup.character_display)
def code(Language): global output_dir global tail_str command_line.do([ "-i", "nothing.qx", "-o", "TestAnalyzer", "--odir", output_dir, "--language", Language, "--debug-QUEX_TYPE_LEXATOM_EXT", "--config-by-macros" ]) mode_prep_prep_db = quex_file_parser.do(Setup.input_mode_files) mode_db = mode.finalize_modes(mode_prep_prep_db) core._generate(mode_db) return mode_db
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() if Setup.converter_only_f: mode_db = None elif Setup.token_class_only_f: mode_prep_prep_db = quex_file_parser.do(Setup.input_mode_files) if mode_prep_prep_db: error.log("Mode definition found in input files, while in token class \n" "generation mode.") mode_db = None else: mode_db = _parse_modes_and_more(Setup.input_mode_files) blackboard.mode_db = mode_db # Announce! _generate(mode_db)
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() mode_description_db = quex_file_parser.do(Setup.input_mode_files) # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developed.) if Setup.external_lexeme_null_object != "": # Assume external implementation token_id_header = None function_map_id_to_name_implementation = "" else: token_id_header = token_id_maker.do(Setup) function_map_id_to_name_implementation = token_id_maker.do_map_id_to_name_function( ) # (*) [Optional] Make a customized token class class_token_header, \ class_token_implementation = token_class_maker.do(function_map_id_to_name_implementation) if Setup.token_class_only_f: write_safely_and_close(blackboard.token_type_definition.get_file_name(), do_token_class_info() \ + class_token_header) write_safely_and_close(Setup.output_token_class_file_implementation, class_token_implementation) write_safely_and_close(Setup.output_token_id_file, token_id_header) Lng.straighten_open_line_pragmas(Setup.output_token_id_file) Lng.straighten_open_line_pragmas( Setup.output_token_class_file_implementation) Lng.straighten_open_line_pragmas( blackboard.token_type_definition.get_file_name()) return # (*) implement the lexer mode-specific analyser functions # During this process: mode_description_db --> mode_db function_analyzers_implementation, \ mode_db = analyzer_functions_get(mode_description_db) # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework configuration_header = configuration.do(mode_db) analyzer_header = analyzer_class.do(mode_db) analyzer_implementation = analyzer_class.do_implementation(mode_db) + "\n" mode_implementation = mode_classes.do(mode_db) # (*) [Optional] Generate a converter helper codec_converter_helper_header, \ codec_converter_helper_implementation = codec_converter_helper.do() # Implementation (Potential Inline Functions) if class_token_implementation is not None: analyzer_implementation += class_token_implementation + "\n" # Engine (Source Code) engine_txt = Lng.ENGINE_TEXT_EPILOG() + "\n" \ + mode_implementation + "\n" \ + function_analyzers_implementation + "\n" \ + function_map_id_to_name_implementation + "\n" # (*) Write Files ___________________________________________________________________ if codec_converter_helper_header is not None: write_safely_and_close(Setup.output_buffer_codec_header, codec_converter_helper_header) write_safely_and_close(Setup.output_buffer_codec_header_i, codec_converter_helper_implementation) if token_id_header is not None: write_safely_and_close(Setup.output_token_id_file, token_id_header) write_safely_and_close(Setup.output_configuration_file, configuration_header) if Setup.language == "C": engine_txt += analyzer_implementation else: analyzer_header = analyzer_header.replace( "$$ADDITIONAL_HEADER_CONTENT$$", analyzer_implementation) write_safely_and_close(Setup.output_header_file, analyzer_header) write_safely_and_close(Setup.output_code_file, engine_txt) if class_token_header is not None: write_safely_and_close( blackboard.token_type_definition.get_file_name(), class_token_header) Lng.straighten_open_line_pragmas(Setup.output_header_file) Lng.straighten_open_line_pragmas(Setup.output_code_file) if not blackboard.token_type_definition.manually_written(): Lng.straighten_open_line_pragmas( blackboard.token_type_definition.get_file_name()) if Setup.source_package_directory != "": source_package.do()
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() mode_description_db = quex_file_parser.do(Setup.input_mode_files) # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developed.) if Setup.external_lexeme_null_object != "": # Assume external implementation token_id_header = None function_map_id_to_name_implementation = "" else: token_id_header = token_id_maker.do(Setup) function_map_id_to_name_implementation = token_id_maker.do_map_id_to_name_function() # (*) [Optional] Make a customized token class class_token_header, \ class_token_implementation = token_class_maker.do(function_map_id_to_name_implementation) if Setup.token_class_only_f: write_safely_and_close(blackboard.token_type_definition.get_file_name(), do_token_class_info() \ + class_token_header) write_safely_and_close(Setup.output_token_class_file_implementation, class_token_implementation) write_safely_and_close(Setup.output_token_id_file, token_id_header) Lng.straighten_open_line_pragmas(Setup.output_token_id_file) Lng.straighten_open_line_pragmas(Setup.output_token_class_file_implementation) Lng.straighten_open_line_pragmas(blackboard.token_type_definition.get_file_name()) return # (*) implement the lexer mode-specific analyser functions # During this process: mode_description_db --> mode_db function_analyzers_implementation, \ mode_db = analyzer_functions_get(mode_description_db) # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework configuration_header = configuration.do(mode_db) analyzer_header = analyzer_class.do(mode_db) analyzer_implementation = analyzer_class.do_implementation(mode_db) + "\n" mode_implementation = mode_classes.do(mode_db) # (*) [Optional] Generate a converter helper codec_converter_helper_header, \ codec_converter_helper_implementation = codec_converter_helper.do() # Implementation (Potential Inline Functions) if class_token_implementation is not None: analyzer_implementation += class_token_implementation + "\n" # Engine (Source Code) engine_txt = Lng.ENGINE_TEXT_EPILOG() + "\n" \ + mode_implementation + "\n" \ + function_analyzers_implementation + "\n" \ + function_map_id_to_name_implementation + "\n" # (*) Write Files ___________________________________________________________________ if codec_converter_helper_header is not None: write_safely_and_close(Setup.output_buffer_codec_header, codec_converter_helper_header) write_safely_and_close(Setup.output_buffer_codec_header_i, codec_converter_helper_implementation) if token_id_header is not None: write_safely_and_close(Setup.output_token_id_file, token_id_header) write_safely_and_close(Setup.output_configuration_file, configuration_header) if Setup.language == "C": engine_txt += analyzer_implementation else: analyzer_header = analyzer_header.replace("$$ADDITIONAL_HEADER_CONTENT$$", analyzer_implementation) write_safely_and_close(Setup.output_header_file, analyzer_header) write_safely_and_close(Setup.output_code_file, engine_txt) if class_token_header is not None: write_safely_and_close(blackboard.token_type_definition.get_file_name(), class_token_header) Lng.straighten_open_line_pragmas(Setup.output_header_file) Lng.straighten_open_line_pragmas(Setup.output_code_file) if not blackboard.token_type_definition.manually_written(): Lng.straighten_open_line_pragmas(blackboard.token_type_definition.get_file_name()) if Setup.source_package_directory != "": source_package.do()