def UserCodeFragment_straighten_open_line_pragmas(filename, Language): if Language not in UserCodeFragment_OpenLinePragma.keys(): return fh = open_file_or_die(filename) norm_filename = Setup.get_file_reference(filename) new_content = [] line_n = 0 LinePragmaInfoList = UserCodeFragment_OpenLinePragma[Language] for line in fh.readlines(): line_n += 1 if Language == "C": for info in LinePragmaInfoList: if line.find(info[0]) == -1: continue line = info[1] # Since by some definition, line number pragmas < 32768; let us avoid # compiler warnings by setting line_n = min(line_n, 32768) line = line.replace("NUMBER", repr(int(min(line_n + 1, 32767)))) # Even under Windows (tm), the '/' is accepted. Thus do not rely on 'normpath' line = line.replace("FILENAME", norm_filename) if len(line) == 0 or line[-1] != "\n": line = line + "\n" new_content.append(line) fh.close() write_safely_and_close(filename, "".join(new_content))
def __copy_files(FileTxt): input_directory = QUEX_PATH output_directory = Setup.output_directory file_list = map(lambda x: Setup.language_db["$code_base"] + x.strip(), FileTxt.split()) # Ensure that all directories exist directory_list = [] for file in file_list: directory = path.dirname(output_directory + file) if directory in directory_list: continue directory_list.append(directory) # Sort directories according to length --> create parent directories before child for directory in sorted(directory_list, key=len): if os.access(directory, os.F_OK) == True: continue # Create also parent directories, if required os.makedirs(directory) for file in file_list: input_file = input_directory + file output_file = output_directory + file # Copy content = open_file_or_die(input_file, "rb").read() write_safely_and_close(output_file, content)
def __copy_files(FileTxt): input_directory = QUEX_PATH output_directory = Setup.output_directory file_list = map(lambda x: Lng["$code_base"] + x.strip(), FileTxt.split()) # Ensure that all directories exist directory_list = [] for file in file_list: directory = path.dirname(output_directory + file) if directory in directory_list: continue directory_list.append(directory) # Sort directories according to length --> create parent directories before child for directory in sorted(directory_list, key=len): if os.access(directory, os.F_OK) == True: continue # Create also parent directories, if required os.makedirs(directory) for file in file_list: input_file = input_directory + file output_file = output_directory + file # Copy content = open_file_or_die(input_file, "rb").read() write_safely_and_close(output_file, content)
def straighten_open_line_pragmas(self, FileName): norm_filename = Setup.get_file_reference(FileName) line_pragma_txt = self._SOURCE_REFERENCE_END().strip() new_content = [] line_n = 1 # NOT: 0! fh = open_file_or_die(FileName) while 1 + 1 == 2: line = fh.readline() line_n += 1 if not line: break elif line.strip() != line_pragma_txt: new_content.append(line) else: line_n += 1 new_content.append(self._SOURCE_REFERENCE_BEGIN(SourceRef(norm_filename, line_n))) fh.close() write_safely_and_close(FileName, "".join(new_content))
def straighten_open_line_pragmas(self, FileName): norm_filename = Setup.get_file_reference(FileName) line_pragma_txt = self._SOURCE_REFERENCE_END().strip() new_content = [] line_n = 1 # NOT: 0! fh = open_file_or_die(FileName) while 1 + 1 == 2: line = fh.readline() line_n += 1 if not line: break elif line.strip() != line_pragma_txt: new_content.append(line) else: line_n += 1 new_content.append( self._SOURCE_REFERENCE_BEGIN( SourceRef(norm_filename, line_n))) fh.close() write_safely_and_close(FileName, "".join(new_content))
def __do(self, state_machine, FileName, Option="utf8"): dot_code = state_machine.get_graphviz_string(NormalizeF=Setup.normalize_f, Option=Option) write_safely_and_close(FileName, dot_code)
def __do(self, state_machine, FileName, Option="utf8"): dot_code = state_machine.get_graphviz_string( NormalizeF=Setup.normalize_f, Option=Option) write_safely_and_close(FileName, dot_code)
def do(): """Generates state machines for all modes. Each mode results into a separate state machine that is stuck into a virtual function of a class derived from class 'quex_mode'. """ if Setup.language == "DOT": return do_plot() mode_db = quex_file_parser.do(Setup.input_mode_files) # (*) [Optional] Generate a converter helper codec_converter_helper_header, \ codec_converter_helper_implementation = codec_converter_helper.do() # (*) Generate the token ids # (This needs to happen after the parsing of mode_db, since during that # the token_id_db is developed.) if Setup.external_lexeme_null_object != "": # Assume external implementation token_id_header = None function_map_id_to_name_implementation = "" else: token_id_header = token_id_maker.do(Setup) function_map_id_to_name_implementation = token_id_maker.do_map_id_to_name_function() # (*) [Optional] Make a customized token class class_token_header, \ class_token_implementation = token_class_maker.do(function_map_id_to_name_implementation) if Setup.token_class_only_f: write_safely_and_close(blackboard.token_type_definition.get_file_name(), do_token_class_info() \ + class_token_header) write_safely_and_close(Setup.output_token_class_file_implementation, class_token_implementation) write_safely_and_close(Setup.output_token_id_file, token_id_header) return # (*) Implement the 'quex' core class from a template # -- do the coding of the class framework configuration_header = configuration.do(mode_db) class_analyzer_header = analyzer_class.do(mode_db) class_analyzer_implementation = analyzer_class.do_implementation(mode_db) mode_implementation = mode_classes.do(mode_db) # (*) implement the lexer mode-specific analyser functions function_analyzers_implementation = analyzer_functions_get(mode_db) # Implementation (Potential Inline Functions) class_implemtation = class_analyzer_implementation + "\n" if class_token_implementation is not None: class_implemtation += class_token_implementation + "\n" # Engine (Source Code) engine_txt = mode_implementation + "\n" \ + function_analyzers_implementation + "\n" \ + function_map_id_to_name_implementation + "\n" # (*) Write Files ___________________________________________________________________ if codec_converter_helper_header is not None: write_safely_and_close(Setup.output_buffer_codec_header, codec_converter_helper_header) write_safely_and_close(Setup.output_buffer_codec_header_i, codec_converter_helper_implementation) if token_id_header is not None: write_safely_and_close(Setup.output_token_id_file, token_id_header) write_safely_and_close(Setup.output_configuration_file, configuration_header) if Setup.language == "C": engine_txt += class_implemtation else: class_analyzer_header = class_analyzer_header.replace("$$ADDITIONAL_HEADER_CONTENT$$", class_implemtation) write_safely_and_close(Setup.output_header_file, class_analyzer_header) write_safely_and_close(Setup.output_code_file, engine_txt) if class_token_header is not None: write_safely_and_close(blackboard.token_type_definition.get_file_name(), class_token_header) for file_name in [Setup.output_header_file, Setup.output_code_file, blackboard.token_type_definition.get_file_name()]: UserCodeFragment_straighten_open_line_pragmas(file_name, "C") if Setup.source_package_directory != "": source_package.do()