Exemple #1
0
    def get_pure_text(self):
        """RETURNS: Text

        The returned text is free from any annotations.
        """
        assert all_isinstance(self.__code, (str, unicode))
        return "".join(self.__code)
Exemple #2
0
    def get_pure_text(self):
        """RETURNS: Text

        The returned text is free from any annotations.
        """
        assert all_isinstance(self.__code, (str, unicode))
        return "".join(self.__code)
Exemple #3
0
    def get_text(self):
        """RETURNS: Text

        May contain annotations to the code may by the derived class.
        """
        code = self.get_code()
        assert all_isinstance(code, (str, unicode))
        return "".join(code)
Exemple #4
0
    def get_text(self):
        """RETURNS: Text

        May contain annotations to the code may by the derived class.
        """
        code = self.get_code()
        assert all_isinstance(code, (str, unicode))
        return "".join(code)
Exemple #5
0
def wrap_up(ModeName, FunctionBody, VariableDefs, ModeNameList, dial_db):
    txt_function = Lng.ANALYZER_FUNCTION(ModeName, Setup, VariableDefs, 
                                         FunctionBody, dial_db, ModeNameList) 
    txt_header   = Lng.HEADER_DEFINITIONS(dial_db) 
    assert isinstance(txt_header, (str, unicode))

    txt_analyzer = get_plain_strings(txt_function, dial_db)
    assert all_isinstance(txt_analyzer, (str, unicode))

    return [ txt_header ] + txt_analyzer
Exemple #6
0
def wrap_up(ModeName, FunctionBody, VariableDefs, ModeNameList):
    txt_function = Lng.ANALYZER_FUNCTION(ModeName, Setup, VariableDefs, 
                                         FunctionBody, ModeNameList) 
    txt_header   = Lng.HEADER_DEFINITIONS() 
    assert isinstance(txt_header, (str, unicode))

    txt_analyzer = get_plain_strings(txt_function)
    assert all_isinstance(txt_analyzer, (str, unicode))

    return [ txt_header ] + txt_analyzer
Exemple #7
0
    def __get_setting_list(self, Name):
        """RETURNS: [ OptionSetting ] for a given option's Name.

        This function does additional checks for consistency.
        """
        setting_list = dict.get(self, Name)
        if setting_list is None: return None

        assert isinstance(setting_list, list) 
        assert all_isinstance(setting_list, OptionSetting)
        assert (not mode_option_info_db[Name].single_setting_f()) or len(setting_list) == 1
        return setting_list
Exemple #8
0
    def __init__(self, PatternList):
        assert isinstance(PatternList, list)
        assert len(PatternList) > 0
        assert all_isinstance(PatternList, Pattern)
        assert all_true(PatternList, lambda p: p.incidence_id() is not None)

        # (*) Core SM, Pre-Context SM, ...
        #     ... and sometimes backward input position SMs.
        self.sm,                    \
        self.pre_context_sm,        \
        self.bipd_sm_db,            \
        self.pre_context_sm_id_list = self.__prepare(PatternList)
    def __init__(self, PatternList): 
        assert isinstance(PatternList, list)
        assert len(PatternList) > 0
        assert all_isinstance(PatternList, Pattern)
        assert all_true(PatternList, lambda p: p.incidence_id() is not None)

        # (*) Core SM, Pre-Context SM, ...
        #     ... and sometimes backward input position SMs.
        self.sm,                    \
        self.pre_context_sm,        \
        self.bipd_sm_db,            \
        self.pre_context_sm_id_list = self.__prepare(PatternList)
Exemple #10
0
    def __get_setting_list(self, Name):
        """RETURNS: [ OptionSetting ] for a given option's Name.

        This function does additional checks for consistency.
        """
        setting_list = dict.get(self, Name)
        if setting_list is None: return None

        assert isinstance(setting_list, list)
        assert all_isinstance(setting_list, OptionSetting)
        assert (not mode_option_info_db[Name].single_setting_f()
                ) or len(setting_list) == 1
        return setting_list
Exemple #11
0
    def get_documentation(self):
        L = max(map(lambda mode: len(mode.name), self.__base_mode_sequence))
        txt = "\nMODE: %s\n" % self.name

        txt += "\n"
        if len(self.__base_mode_sequence) != 1:
            txt += "    BASE MODE SEQUENCE:\n"
            base_mode_name_list = map(lambda mode: mode.name,
                                      self.__base_mode_sequence[:-1])
            base_mode_name_list.reverse()
            for name in base_mode_name_list:
                txt += "      %s\n" % name
            txt += "\n"

        if len(self.__doc_history_deletion) != 0:
            txt += "    DELETION ACTIONS:\n"
            for entry in self.__doc_history_deletion:
                txt += "      %s:  %s%s  (from mode %s)\n" % \
                       (entry[0], " " * (L - len(self.name)), entry[1], entry[2])
            txt += "\n"

        if len(self.__doc_history_reprioritization) != 0:
            txt += "    PRIORITY-MARK ACTIONS:\n"
            self.__doc_history_reprioritization.sort(
                lambda x, y: cmp(x[4], y[4]))
            for entry in self.__doc_history_reprioritization:
                txt += "      %s: %s%s  (from mode %s)  (%i) --> (%i)\n" % \
                       (entry[0], " " * (L - len(self.name)), entry[1], entry[2], entry[3], entry[4])
            txt += "\n"

        assert all_isinstance(self.__pattern_list, Pattern)
        if len(self.__pattern_list) != 0:
            txt += "    PATTERN LIST:\n"
            for x in self.__pattern_list:
                space = " " * (L - len(x.sr.mode_name))
                txt   += "      (%3i) %s: %s%s\n" % \
                         (x.incidence_id(), x.sr.mode_name, space, x.pattern_string())
            txt += "\n"

        return txt
Exemple #12
0
    def get_documentation(self):
        L = max(map(lambda mode: len(mode.name), self.__base_mode_sequence))
        txt  = "\nMODE: %s\n" % self.name

        txt += "\n"
        if len(self.__base_mode_sequence) != 1:
            txt += "    BASE MODE SEQUENCE:\n"
            base_mode_name_list = map(lambda mode: mode.name, self.__base_mode_sequence[:-1])
            base_mode_name_list.reverse()
            for name in base_mode_name_list:
                txt += "      %s\n" % name
            txt += "\n"

        if len(self.__doc_history_deletion) != 0:
            txt += "    DELETION ACTIONS:\n"
            for entry in self.__doc_history_deletion:
                txt += "      %s:  %s%s  (from mode %s)\n" % \
                       (entry[0], " " * (L - len(self.name)), entry[1], entry[2])
            txt += "\n"

        if len(self.__doc_history_reprioritization) != 0:
            txt += "    PRIORITY-MARK ACTIONS:\n"
            self.__doc_history_reprioritization.sort(lambda x, y: cmp(x[4], y[4]))
            for entry in self.__doc_history_reprioritization:
                txt += "      %s: %s%s  (from mode %s)  (%i) --> (%i)\n" % \
                       (entry[0], " " * (L - len(self.name)), entry[1], entry[2], entry[3], entry[4])
            txt += "\n"

        assert all_isinstance(self.__pattern_list, Pattern)
        if len(self.__pattern_list) != 0:
            txt += "    PATTERN LIST:\n"
            for x in self.__pattern_list:
                space  = " " * (L - len(x.sr.mode_name)) 
                txt   += "      (%3i) %s: %s%s\n" % \
                         (x.incidence_id(), x.sr.mode_name, space, x.pattern_string())
            txt += "\n"

        return txt
Exemple #13
0
def create_state_machine_function(PatternActionPairList,
                                  PatternDictionary,
                                  BufferLimitCode,
                                  SecondModeF=False):

    # (*) Initialize address handling
    dial_db.clear()  # BEFORE constructor of generator;
    variable_db.variable_db.init(
    )  # because constructor creates some addresses.
    blackboard.required_support_begin_of_line_set()

    def action(ThePattern, PatternName):
        txt = []
        if ThePattern.bipd_sm is not None:
            TerminalFactory.do_bipd_entry_and_return(txt, pattern)

        txt.append("%s\n" % Lng.STORE_LAST_CHARACTER(
            blackboard.required_support_begin_of_line()))
        txt.append("%s\n" % Lng.LEXEME_TERMINATING_ZERO_SET(True))
        txt.append('printf("%19s  \'%%s\'\\n", Lexeme); fflush(stdout);\n' %
                   PatternName)

        if "->1" in PatternName:
            txt.append(
                "me->current_analyzer_function = QUEX_NAME(Mr_analyzer_function);\n"
            )
        elif "->2" in PatternName:
            txt.append(
                "me->current_analyzer_function = QUEX_NAME(Mrs_analyzer_function);\n"
            )

        if "CONTINUE" in PatternName: txt.append("")
        elif "STOP" in PatternName: txt.append("return false;\n")
        else: txt.append("return true;\n")

        txt.append("%s\n" % Lng.GOTO(DoorID.continue_with_on_after_match()))
        ## print "#", txt
        return CodeTerminal(txt)

    # -- Display Setup: Patterns and the related Actions
    print "(*) Lexical Analyser Patterns:"
    for pair in PatternActionPairList:
        print "%20s --> %s" % (pair[0], pair[1])

    if not SecondModeF: sm_name = "Mr"
    else: sm_name = "Mrs"

    Setup.analyzer_class_name = sm_name

    pattern_action_list = [
        (regex.do(pattern_str, PatternDictionary), action_str)
        for pattern_str, action_str in PatternActionPairList
    ]

    support_begin_of_line_f = False
    for pattern, action_str in pattern_action_list:
        support_begin_of_line_f |= pattern.pre_context_trivial_begin_of_line_f

    for pattern, action_str in pattern_action_list:
        pattern.prepare_count_info(LineColumnCount_Default(),
                                   CodecTrafoInfo=None)
        pattern.mount_post_context_sm()
        pattern.mount_pre_context_sm()
        pattern.cut_character_list(signal_character_list(Setup))

    # -- PatternList/TerminalDb
    #    (Terminals can only be generated after the 'mount procedure', because,
    #     the bipd_sm is generated through mounting.)
    on_failure = CodeTerminal(["return false;\n"])
    support_begin_of_line_f = False
    terminal_db = {
        E_IncidenceIDs.MATCH_FAILURE:
        Terminal(on_failure, "FAILURE", E_IncidenceIDs.MATCH_FAILURE),
        E_IncidenceIDs.END_OF_STREAM:
        Terminal(on_failure, "END_OF_STREAM", E_IncidenceIDs.END_OF_STREAM),
        E_IncidenceIDs.BAD_LEXATOM:
        Terminal(on_failure, "BAD_LEXATOM", E_IncidenceIDs.BAD_LEXATOM),
        E_IncidenceIDs.OVERFLOW:
        Terminal(on_failure, "NO_SPACE_TO_LOAD", E_IncidenceIDs.OVERFLOW),
        E_IncidenceIDs.LOAD_FAILURE:
        Terminal(on_failure, "LOAD_FAILURE", E_IncidenceIDs.LOAD_FAILURE),
    }
    for pattern, action_str in pattern_action_list:
        name = safe_string(pattern.pattern_string())
        terminal = Terminal(action(pattern, action_str), name)
        terminal.set_incidence_id(pattern.incidence_id())
        terminal_db[pattern.incidence_id()] = terminal

    # -- create default action that prints the name and the content of the token
    #    store_last_character_str = ""
    #    if support_begin_of_line_f:
    #        store_last_character_str  = "    %s = %s;\n" % \
    #                                    ("me->buffer._lexatom_before_lexeme_start",
    #                                     "*(me->buffer._read_p - 1)")
    #    set_terminating_zero_str  = "    QUEX_LEXEME_TERMINATING_ZERO_SET(&me->buffer);\n"
    #    prefix = store_last_character_str + set_terminating_zero_str

    print "## (1) code generation"

    pattern_list = [pattern for pattern, action_str in pattern_action_list]
    function_body, variable_definitions = cpp_generator.do_core(
        pattern_list, terminal_db)
    function_body += "if(0) { __QUEX_COUNT_VOID((QUEX_TYPE_ANALYZER*)0, (QUEX_TYPE_LEXATOM*)0, (QUEX_TYPE_LEXATOM*)0); }\n"
    function_txt = cpp_generator.wrap_up(sm_name,
                                         function_body,
                                         variable_definitions,
                                         ModeNameList=[])

    assert all_isinstance(function_txt, str)

    return   "#define  __QUEX_OPTION_UNIT_TEST\n" \
           + nonsense_default_counter(not SecondModeF) \
           + "".join(function_txt)
Exemple #14
0
 def __init__(self, Code, DropOutF=False):
     assert type(DropOutF) == bool
     self.__drop_out_f = DropOutF
     if isinstance(Code, list): self.__code = Code
     else: self.__code = [Code]
     assert all_isinstance(self.__code, (int, str, unicode))
Exemple #15
0
 def __init__(self, Code, DropOutF=False):
     assert type(DropOutF) == bool
     self.__drop_out_f = DropOutF
     if isinstance(Code, list): self.__code = Code
     else:                      self.__code = [ Code ]
     assert all_isinstance(self.__code, (int, str, unicode))
Exemple #16
0
def create_state_machine_function(PatternActionPairList,
                                  PatternDictionary,
                                  BufferLimitCode,
                                  SecondModeF=False):
    global dial_db
    incidence_db = IncidenceDB()

    if not SecondModeF: sm_name = "M"
    else: sm_name = "M2"

    Setup.analyzer_class_name = "TestAnalyzer"
    Setup.analyzer_name_safe = "TestAnalyzer"

    # (*) Initialize address handling
    dial_db = DialDB()  # BEFORE constructor of generator;
    variable_db.variable_db.init(
    )  # because constructor creates some addresses.
    blackboard.required_support_begin_of_line_set()
    terminal_factory = TerminalFactory(sm_name, incidence_db, dial_db)

    # -- Display Setup: Patterns and the related Actions
    print "(*) Lexical Analyser Patterns:"
    for pair in PatternActionPairList:
        print "%20s --> %s" % (pair[0], pair[1])

    def action(ThePattern, PatternName):
        txt = []
        if ThePattern.sm_bipd_to_be_reversed is not None:
            terminal_factory.do_bipd_entry_and_return(txt, pattern)

        txt.append("%s\n" % Lng.STORE_LAST_CHARACTER(
            blackboard.required_support_begin_of_line()))
        txt.append("%s\n" % Lng.LEXEME_TERMINATING_ZERO_SET(True))
        txt.append('printf("%19s  \'%%s\'\\n", Lexeme); fflush(stdout);\n' %
                   PatternName)

        if "->1" in PatternName:
            txt.append(
                "me->current_analyzer_function = QUEX_NAME(M_analyzer_function);\n"
            )
        elif "->2" in PatternName:
            txt.append(
                "me->current_analyzer_function = QUEX_NAME(M2_analyzer_function);\n"
            )

        if "CONTINUE" in PatternName: txt.append("")
        elif "STOP" in PatternName:
            txt.append(
                "QUEX_NAME(MF_error_code_set_if_first)(me, E_Error_UnitTest_Termination); return;\n"
            )
        else:
            txt.append("return;\n")

        txt.append(
            "%s\n" %
            Lng.GOTO(DoorID.continue_with_on_after_match(dial_db), dial_db))
        ## print "#", txt
        return CodeTerminal(txt)

    pattern_action_list = [
        (regex.do(pattern_str, PatternDictionary), action_str)
        for pattern_str, action_str in PatternActionPairList
    ]

    ca_map = LineColumnCount_Default()
    pattern_list = []
    terminal_db = {}
    for pattern, action_str in pattern_action_list:
        pattern = pattern.finalize(ca_map)
        name = Lng.SAFE_STRING(pattern.pattern_string())
        terminal = Terminal(action(pattern, action_str), name, dial_db=dial_db)
        terminal.set_incidence_id(pattern.incidence_id)

        pattern_list.append(pattern)
        terminal_db[pattern.incidence_id] = terminal

    # -- PatternList/TerminalDb
    #    (Terminals can only be generated after the 'mount procedure', because,
    #     the bipd_sm is generated through mounting.)
    on_failure = CodeTerminal([
        "QUEX_NAME(MF_error_code_set_if_first)(me, E_Error_UnitTest_Termination); return;\n"
    ])
    terminal_db.update({
        E_IncidenceIDs.MATCH_FAILURE:
        Terminal(on_failure,
                 "FAILURE",
                 E_IncidenceIDs.MATCH_FAILURE,
                 dial_db=dial_db),
        E_IncidenceIDs.END_OF_STREAM:
        Terminal(on_failure,
                 "END_OF_STREAM",
                 E_IncidenceIDs.END_OF_STREAM,
                 dial_db=dial_db),
        E_IncidenceIDs.BAD_LEXATOM:
        Terminal(on_failure,
                 "BAD_LEXATOM",
                 E_IncidenceIDs.BAD_LEXATOM,
                 dial_db=dial_db),
        E_IncidenceIDs.LOAD_FAILURE:
        Terminal(on_failure,
                 "LOAD_FAILURE",
                 E_IncidenceIDs.LOAD_FAILURE,
                 dial_db=dial_db),
    })

    mode = Mode(sm_name,
                SourceRef_VOID,
                pattern_list,
                terminal_db, [],
                incidence_db,
                RunTimeCounterDb=None,
                ReloadStateForward=None,
                RequiredRegisterSet=set(),
                dial_db=dial_db,
                Documentation=ModeDocumentation([], [], [], [], []))

    print "## (1) code generation"

    txt = engine_generator.do_with_counter(mode, ["M", "M2"])
    assert all_isinstance(txt, str)

    return "#define  QUEX_OPTION_UNIT_TEST_EXT\n" + "".join(txt)
Exemple #17
0
def create_state_machine_function(PatternActionPairList, PatternDictionary, 
                                  BufferLimitCode, SecondModeF=False):

    # (*) Initialize address handling
    dial_db.clear()     # BEFORE constructor of generator; 
    variable_db.variable_db.init()  # because constructor creates some addresses.
    blackboard.required_support_begin_of_line_set()

    def action(ThePattern, PatternName): 
        txt = []
        if ThePattern.bipd_sm is not None:
            TerminalFactory.do_bipd_entry_and_return(txt, pattern)

        txt.append("%s\n" % Lng.STORE_LAST_CHARACTER(blackboard.required_support_begin_of_line()))
        txt.append("%s\n" % Lng.LEXEME_TERMINATING_ZERO_SET(True))
        txt.append('printf("%19s  \'%%s\'\\n", Lexeme); fflush(stdout);\n' % PatternName)

        if   "->1" in PatternName: txt.append("me->current_analyzer_function = QUEX_NAME(Mr_analyzer_function);\n")
        elif "->2" in PatternName: txt.append("me->current_analyzer_function = QUEX_NAME(Mrs_analyzer_function);\n")

        if "CONTINUE" in PatternName: txt.append("")
        elif "STOP" in PatternName:   txt.append("return false;\n")
        else:                         txt.append("return true;\n")


        txt.append("%s\n" % Lng.GOTO(DoorID.continue_with_on_after_match()))
        ## print "#", txt
        return CodeTerminal(txt)
    
    # -- Display Setup: Patterns and the related Actions
    print "(*) Lexical Analyser Patterns:"
    for pair in PatternActionPairList:
        print "%20s --> %s" % (pair[0], pair[1])

    if not SecondModeF:  sm_name = "Mr"
    else:                sm_name = "Mrs"

    Setup.analyzer_class_name = sm_name
    
    pattern_action_list = [
        (regex.do(pattern_str, PatternDictionary), action_str)
        for pattern_str, action_str in PatternActionPairList
    ]
    
    support_begin_of_line_f = False
    for pattern, action_str in pattern_action_list:
        support_begin_of_line_f |= pattern.pre_context_trivial_begin_of_line_f

    for pattern, action_str in pattern_action_list:
        pattern.prepare_count_info(LineColumnCount_Default(), CodecTrafoInfo=None)
        pattern.mount_post_context_sm()
        pattern.mount_pre_context_sm()
        pattern.cut_character_list(signal_character_list(Setup))

    # -- PatternList/TerminalDb
    #    (Terminals can only be generated after the 'mount procedure', because, 
    #     the bipd_sm is generated through mounting.)
    on_failure              = CodeTerminal(["return false;\n"])
    support_begin_of_line_f = False
    terminal_db             = {
        E_IncidenceIDs.MATCH_FAILURE: Terminal(on_failure, "FAILURE", 
                                               E_IncidenceIDs.MATCH_FAILURE),
        E_IncidenceIDs.END_OF_STREAM: Terminal(on_failure, "END_OF_STREAM", 
                                               E_IncidenceIDs.END_OF_STREAM),
        E_IncidenceIDs.BAD_LEXATOM:   Terminal(on_failure, "BAD_LEXATOM", 
                                               E_IncidenceIDs.BAD_LEXATOM),
        E_IncidenceIDs.OVERFLOW:      Terminal(on_failure, "NO_SPACE_TO_LOAD", 
                                               E_IncidenceIDs.OVERFLOW),
        E_IncidenceIDs.LOAD_FAILURE:  Terminal(on_failure, "LOAD_FAILURE", 
                                               E_IncidenceIDs.LOAD_FAILURE),
    }
    for pattern, action_str in pattern_action_list:
        name     = safe_string(pattern.pattern_string())
        terminal = Terminal(action(pattern, action_str), name)
        terminal.set_incidence_id(pattern.incidence_id())
        terminal_db[pattern.incidence_id()] = terminal

    # -- create default action that prints the name and the content of the token
    #    store_last_character_str = ""
    #    if support_begin_of_line_f:
    #        store_last_character_str  = "    %s = %s;\n" % \
    #                                    ("me->buffer._lexatom_before_lexeme_start", 
    #                                     "*(me->buffer._read_p - 1)")
    #    set_terminating_zero_str  = "    QUEX_LEXEME_TERMINATING_ZERO_SET(&me->buffer);\n"
    #    prefix = store_last_character_str + set_terminating_zero_str

    print "## (1) code generation"    

    pattern_list = [ pattern for pattern, action_str in pattern_action_list ]
    function_body, variable_definitions = cpp_generator.do_core(pattern_list, terminal_db)
    function_body += "if(0) { __QUEX_COUNT_VOID((QUEX_TYPE_ANALYZER*)0, (QUEX_TYPE_LEXATOM*)0, (QUEX_TYPE_LEXATOM*)0); }\n"
    function_txt                        = cpp_generator.wrap_up(sm_name, function_body, 
                                                                variable_definitions, 
                                                                ModeNameList=[])

    assert all_isinstance(function_txt, str)

    return   "#define  __QUEX_OPTION_UNIT_TEST\n" \
           + nonsense_default_counter(not SecondModeF) \
           + "".join(function_txt)