Пример #1
0
def configure_States(TriggerMapA, StateN_A, TriggerMapB, StateN_B):
    state_setup = []
    StateListA = [long(i) for i in xrange(StateN_A)]
    StateListB = [long(i) for i in xrange(StateN_A, StateN_A + StateN_B)]

    def extract_transition_map(XTM, Index):
        result = []
        for interval, specification in XTM:
            result.append((interval, specification[Index]))
        return result

    state_setup.extend([(index.get(), extract_transition_map(TriggerMapA, i))
                        for i, state_index in enumerate(StateListA)])
    state_setup.extend([(index.get(), extract_transition_map(TriggerMapB, i))
                        for i, state_index in enumerate(StateListB)])

    analyzer = get_Analyzer(state_setup)
    if StateN_A == 1:
        state_a = analyzer.state_db[StateListA[0]]  # Normal FSM_State
    else:
        state_a = setup_TemplateState(analyzer, StateListA)
    if StateN_B == 1:
        state_b = analyzer.state_db[StateListB[0]]  # Normal FSM_State
    else:
        state_b = setup_TemplateState(analyzer, StateListB)

    return analyzer, state_a, state_b
Пример #2
0
def configure_States(TriggerMapA, StateN_A, TriggerMapB, StateN_B):
    state_setup = []
    StateListA = [ long(i) for i in xrange(StateN_A) ]
    StateListB = [ long(i) for i in xrange(StateN_A, StateN_A + StateN_B) ]
    def extract_transition_map(XTM, Index):
        result = []
        for interval, specification in XTM:
            result.append((interval, specification[Index]))
        return result

    state_setup.extend([ 
        (index.get(), extract_transition_map(TriggerMapA, i)) 
        for i, state_index in enumerate(StateListA)
    ])
    state_setup.extend([ 
        (index.get(), extract_transition_map(TriggerMapB, i)) 
        for i, state_index in enumerate(StateListB)
    ])

    analyzer = get_Analyzer(state_setup)
    if StateN_A == 1: state_a = analyzer.state_db[StateListA[0]] # Normal AnalyzerState
    else:             state_a = setup_TemplateState(analyzer, StateListA)
    if StateN_B == 1: state_b = analyzer.state_db[StateListB[0]] # Normal AnalyzerState
    else:             state_b = setup_TemplateState(analyzer, StateListB)

    return analyzer, state_a, state_b
Пример #3
0
    def __init__(self, Candidate):
        StateA = Candidate.state_a
        StateB = Candidate.state_b
        my_index = index.get()
        self.__state_a = StateA
        self.__state_b = StateB
        self.__state_index_sequence = StateA.state_index_sequence(
        ) + StateB.state_index_sequence()
        self.__state_index_to_state_key_db = dict(
            (state_index, i)
            for i, state_index in enumerate(self.__state_index_sequence))

        # Combined DropOut and Entry schemes are generated by the same function
        entry = TemplateState_Entry(my_index,
                                    self.__state_index_to_state_key_db,
                                    StateA.entry, StateB.entry)
        drop_out = MegaState_DropOut(StateA, StateB)
        MegaState.__init__(self, entry, drop_out, my_index)

        self.__transition_map, \
        self.__target_scheme_n = combine_maps(self.__state_a, self.__state_b)

        # Compatible with AnalyzerState
        # (A template state can never mimik an init state)
        self.__engine_type = None  # StateA.engine_type
        # self.input         = None # StateA.input # get_input_action(StateA.engine_type, InitStateF=False)

        MegaState.bad_company_set(
            self,
            self.__state_a.bad_company().union(self.__state_b.bad_company()))
Пример #4
0
    def add_transition(self,
                       StartStateIdx,
                       TriggerSet,
                       TargetStateIdx=None,
                       AcceptanceF=False):
        """Adds a transition from Start to Target based on a given Trigger.

           TriggerSet can be of different types: ... see add_transition()
           
           (see comment on 'DFA_State::add_transition)

           RETURNS: The target state index.
        """
        # NOTE: The Transition Constructor is very tolerant, so no tests on TriggerSet()
        #       assert TriggerSet.__class__.__name__ == "NumberSet"
        assert type(
            TargetStateIdx
        ) == long or TargetStateIdx is None or TargetStateIdx in E_StateIndices

        # If target state is undefined (None) then a new one has to be created
        if TargetStateIdx is None: TargetStateIdx = state_machine_index.get()
        if self.states.has_key(StartStateIdx) == False:
            self.states[StartStateIdx] = DFA_State()
        if self.states.has_key(TargetStateIdx) == False:
            self.states[TargetStateIdx] = DFA_State()
        if AcceptanceF: self.states[TargetStateIdx].set_acceptance(True)

        self.states[StartStateIdx].add_transition(TriggerSet, TargetStateIdx)

        return TargetStateIdx
Пример #5
0
def __clone_until_acceptance(Dfa, StartSi):
    """Make a new DFA from the graph between the given 'StartSi' to the 
    until an acceptance state is reached. Walks from a given 'StartSi'
    along all paths until an acceptance state is reached.

    RETURNS: DFA containing the graph.
    """
    correspondance_db = {si: state_index.get() for si in Dfa.states}
    result = DFA(InitStateIndex=correspondance_db[StartSi],
                 AcceptanceF=Dfa.states[StartSi].is_acceptance())

    work_set = set([StartSi])
    done_set = set([StartSi])
    while work_set:
        si = work_set.pop()
        state = Dfa.states[si]

        if si == Dfa.init_state_index:
            result_state = result.get_init_state()
            target_si_iterable = state.target_map.get_target_state_index_list()
        elif not state.is_acceptance():
            result_state = state.clone(correspondance_db)
            target_si_iterable = state.target_map.get_target_state_index_list()
        else:
            result_state = DFA_State()
            result_state.set_acceptance()
            target_si_iterable = []

        work_set.update(target_si for target_si in target_si_iterable
                        if target_si not in done_set)
        result.states[correspondance_db[si]] = result_state

    return result
Пример #6
0
def _prepare_entry_and_reentry(analyzer, OnBegin, OnStep):
    """Prepare the entry and re-entry doors into the initial state
    of the loop-implementing initial state.

                   .----------.
                   | on_entry |
                   '----------'
                        |         .------------.
                        |<--------| on_reentry |<-----.
                        |         '------------'      |
                .----------------.                    |
                |                +-----> Terminal ----+----> Exit
                |      ...       |
                |                +-----> Terminal - - 
                '----------------'

       RETURNS: DoorID of the re-entry door which is used to iterate in 
                the loop.
    """
    # Entry into state machine
    entry            = analyzer.init_state().entry
    init_state_index = analyzer.init_state_index
        
    # OnEntry
    ta_on_entry              = entry.get_action(init_state_index, 
                                                E_StateIndices.BEFORE_ENTRY)
    ta_on_entry.command_list = OpList.concatinate(ta_on_entry.command_list, 
                                                       OnBegin)

    # OnReEntry
    tid_reentry = entry.enter_OpList(init_state_index, index.get(), 
                                          OpList.from_iterable(OnStep))
    entry.categorize(init_state_index)

    return entry.get(tid_reentry).door_id
Пример #7
0
    def __init__(self, Candidate):
        # The 'index' remains None, as long as the TemplateState is not an 
        # accepted element of a state machine. This makes sense, in particular
        # for TemplateStateCandidates (derived from TemplateState). 
        StateA = Candidate.state_a
        StateB = Candidate.state_b
        my_index                    = index.get()
        self.__state_a              = StateA
        self.__state_b              = StateB
        self.__state_index_sequence = StateA.state_index_sequence() + StateB.state_index_sequence()
        self.__state_index_to_state_key_db = dict((state_index, i) for i, state_index in enumerate(self.__state_index_sequence))

        # Combined DropOut and Entry schemes are generated by the same function
        entry    = TemplateState_Entry(my_index, self.__state_index_to_state_key_db, StateA.entry, StateB.entry)
        drop_out = MegaState_DropOut(StateA, StateB)
        MegaState.__init__(self, entry, drop_out, my_index)

        self.__transition_map, \
        self.__target_scheme_n = combine_maps(self.__state_a, self.__state_b)

        # Compatible with AnalyzerState
        # (A template state can never mimik an init state)
        self.__engine_type = None # StateA.engine_type
        # self.input         = None # StateA.input # get_input_action(StateA.engine_type, InitStateF=False)

        MegaState.bad_company_set(self, self.__state_a.bad_company().union(self.__state_b.bad_company()))
Пример #8
0
def do(SM):
    """RETURNS: A state machines that matches anything which is 
               not matched by SM.

       Idea: The paths along SM do not guide to acceptance states,
             but to normal states.

             Any drop-out is translated into a transition into 
             the 'accept all state'.

       NOTE: This function produces a finite state automaton which
             is not applicable by itself. It would eat ANYTHING
             from a certain state on.
    """
    result = deepcopy(SM) # Not clone

    accept_all_state_index = index.get()
    state = State(AcceptanceF=True)
    state.add_transition(NumberSet_All(), accept_all_state_index)
    result.states[accept_all_state_index] = state

    def is_accept_all_state(sm, StateIndex):
        state = sm.states[StateIndex]
        if not state.is_acceptance():                return False
        tm    = state.target_map.get_map()
        if len(tm) != 1:                             return False
        elif tm.iterkeys().next() != StateIndex:     return False
        elif not tm.itervalues().next().is_all():    return False

        # Target is an 'Accept-All' state. Delete the transition.
        return True

    for state_index, state in SM.states.iteritems():
        # deepcopy --> use same state indices in SM and result
        result_state = result.states[state_index]
        assert state.target_map.is_DFA_compliant(), \
               "State machine must be transformed to DFA first: nfa_to_dfa.do()"

        # -- Every transition to 'Accept-All' state becomes a drop-out.
        for target_index in (i for i in state.target_map.get_target_state_index_list()
                               if is_accept_all_state(SM, i)):
            result_state.target_map.delete_transitions_to_target(target_index)

        # -- Every drop-out becomes a transition to 'Accept-All' state.
        trigger_set         = state.target_map.get_trigger_set_union()
        inverse_trigger_set = trigger_set.get_complement(Setup.buffer_codec.source_set)
        if not inverse_trigger_set.is_empty():
            result_state.add_transition(inverse_trigger_set, accept_all_state_index)

    # Every acceptance state becomes a non-acceptance state.
    # Every non-acceptance state becomes an acceptance state.
    for state_index, state in SM.states.iteritems():
        if state.is_acceptance(): 
            result.states[state_index].set_acceptance(False)
        elif state_index != SM.init_state_index:
            result.states[state_index].set_acceptance(True)

    result.clean_up()

    return result.clone()
Пример #9
0
    def __init__(self, SM_A, SM_B, StartingSM=None):
        self.original = SM_A
        self.admissible = SM_B

        if StartingSM is None:
            self.result = StateMachine(
                InitStateIndex=index.map_state_combination_to_index(
                    (SM_A.init_state_index, SM_B.init_state_index)),
                InitState=self.get_state_core(SM_A.init_state_index,
                                              SM_B.init_state_index))
        else:
            self.result = StartingSM

        # TODO: Think if 'state_db' cannot be replaced by 'result'
        self.state_db = {}

        self.path = []

        # Use 'operation_index' to get a unique index that allows to indicate
        # that 'SM_B' is no longer involved. Also, it ensures that the
        # generated state indices from (a_state_index, operation_index) are
        # unique.
        self.operation_index = index.get()

        TreeWalker.__init__(self)
Пример #10
0
def _prepare_entry_and_reentry(analyzer, OnBegin, OnStep):
    """Prepare the entry and re-entry doors into the initial state
    of the loop-implementing initial state.

                   .----------.
                   | on_entry |
                   '----------'
                        |         .------------.
                        |<--------| on_reentry |<-----.
                        |         '------------'      |
                .----------------.                    |
                |                +-----> Terminal ----+----> Exit
                |      ...       |
                |                +-----> Terminal - - 
                '----------------'

       RETURNS: DoorID of the re-entry door which is used to iterate in 
                the loop.
    """
    # Entry into state machine
    entry = analyzer.init_state().entry
    init_state_index = analyzer.init_state_index

    # OnEntry
    ta_on_entry = entry.get_action(init_state_index, E_StateIndices.NONE)
    ta_on_entry.command_list = CommandList.concatinate(
        ta_on_entry.command_list, OnBegin)

    # OnReEntry
    tid_reentry = entry.enter_CommandList(init_state_index, index.get(),
                                          CommandList.from_iterable(OnStep))
    entry.categorize(init_state_index)

    return entry.get(tid_reentry).door_id
Пример #11
0
def get_transition_map(TM, StateIndex, DropOutCatcher=None):
    if DropOutCatcher is None:
        DropOutCatcher = AnalyzerState(sm_index.get(), TransitionMap())

    def get_door_id(Target):
        return DoorID(Target, 0)
    tm = TransitionMap.from_iterable(TM, get_door_id)
    return tm.relate_to_TargetByStateKeys(StateIndex, DropOutCatcher)
Пример #12
0
 def get_result_si(self, A_si, B_si, BridgeSet):
     if BridgeSet is None: key = (A_si, B_si)
     else: key = (A_si, B_si, tuple(sorted(BridgeSet)))
     result_si = self.state_setup_db.get(key)
     if result_si is None:
         result_si = state_index.get()
         self.state_setup_db[key] = result_si
     return result_si
Пример #13
0
def get_transition_map(TM, StateIndex, DropOutCatcher=None):
    if DropOutCatcher is None:
        DropOutCatcher = AnalyzerState(sm_index.get(), TransitionMap())

    def get_door_id(Target):
        return DoorID(Target, 0)

    tm = TransitionMap.from_iterable(TM, get_door_id)
    return tm.relate_to_TargetByStateKeys(StateIndex, DropOutCatcher)
Пример #14
0
def get_transition_map(TM, StateIndex, DropOutCatcher=None):
    global dial_db
    if DropOutCatcher is None:
        DropOutCatcher = FSM_State(sm_index.get(), TransitionMap(), dial_db=dial_db)

    def get_door_id(Target):
        return DoorID(Target, 0, dial_db)
    tm = TransitionMap.from_iterable(TM, get_door_id)
    return tm.relate_to_TargetByStateKeys(StateIndex, DropOutCatcher)
Пример #15
0
def state_index_for_combination(state_setup_db, StateSetup):
    """RETURNS: [0] target index to represent 'StateSetup'
                [1] True, if the combination was new; False, else.
    """
    target_index = state_setup_db.get(StateSetup)
    if target_index is not None:
        return target_index, False
    target_index               = index.get()
    state_setup_db[StateSetup] = target_index
    return target_index, True
Пример #16
0
    def map_incidence_id_to_state_index(self, IncidenceId):
        assert isinstance(IncidenceId, (int, long)) or IncidenceId in E_IncidenceIDs, \
               "Found <%s>" % IncidenceId

        index = self.__map_incidence_id_to_state_index.get(IncidenceId)
        if index is None:
            index = sm_index.get()
            self.__map_incidence_id_to_state_index[IncidenceId] = index

        return index
Пример #17
0
    def map_incidence_id_to_state_index(self, IncidenceId):
        assert isinstance(IncidenceId, (int, long)) or IncidenceId in E_IncidenceIDs, \
               "Found <%s>" % IncidenceId

        index = self.__map_incidence_id_to_state_index.get(IncidenceId)
        if index is None:
            index = sm_index.get()
            self.__map_incidence_id_to_state_index[IncidenceId] = index

        return index
Пример #18
0
 def specify(StateIndex, DoorSubIndex):
     if StateIndex is None:
         state_index = sm_index.get()  # generate a new StateIndex
     else:
         state_index = StateIndex
     if DoorSubIndex is None:
         door_sub_index = self.max_door_sub_index(state_index) + 1
     else:
         door_sub_index = DoorSubIndex
     return state_index, door_sub_index
Пример #19
0
def generate_sm_for_boarders(Boarders, Trafo):
    sm = StateMachine()
    for ucs_char in Boarders:
        target_idx = index.get() 
        sms.line(sm, sm.init_state_index, 
                 (ucs_char, target_idx), (ucs_char, target_idx))
        sm.states[target_idx].set_acceptance()

    Trafo.adapt_source_and_drain_range(-1)
    verdict_f, result = Trafo.do_state_machine(sm, beautifier)
    assert verdict_f
    return result
Пример #20
0
def generate_sm_for_boarders(Boarders, Trafo):
    sm = StateMachine()
    for ucs_char in Boarders:
        target_idx = index.get()
        sms.line(sm, sm.init_state_index, (ucs_char, target_idx),
                 (ucs_char, target_idx))
        sm.states[target_idx].set_acceptance()

    Trafo.adapt_source_and_drain_range(-1)
    verdict_f, result = Trafo.do_state_machine(sm, beautifier)
    assert verdict_f
    return result
Пример #21
0
    def __init__(self, InitStateIndex=None, AcceptanceF=False, InitState=None, DoNothingF=False):
        # Get a unique state machine id 
        self.set_id(state_machine_index.get_state_machine_id())

        if DoNothingF: return

        if InitStateIndex is None: InitStateIndex = state_machine_index.get()
        self.init_state_index = InitStateIndex
            
        # State Index => State (information about what triggers transition to what target state).
        if InitState is None: InitState = State(AcceptanceF=AcceptanceF)
        self.states = { self.init_state_index: InitState }        
Пример #22
0
def generate_sm_for_boarders(Boarders, Trafo):
    sm = DFA()
    for ucs_char in Boarders:
        target_idx = index.get()
        sms.line(sm, sm.init_state_index, (ucs_char, target_idx),
                 (ucs_char, target_idx))
        sm.states[target_idx].set_acceptance()

    Trafo.adapt_ranges_to_lexatom_type_range(Setup.lexatom.type_range)
    verdict_f, result = Trafo.do_state_machine(sm)
    assert verdict_f
    return result
Пример #23
0
def create_state_machine(SM, Result, Class_StateMachine, Class_State):
    # If all states are of size one, this means, that there were no states that
    # could have been combined. In this case a simple copy of the original
    # state machine will do.
    if len(filter(lambda state_set: len(state_set) != 1,
                  Result.state_set_list)) == 0:
        return SM.clone()

    # Define a mapping from the state set to a new target state index
    #
    # map:  state_set_index  --->  index of the state that represents it
    #
    map_new_state_index = dict([(i, state_machine_index.get())
                                for i in xrange(len(Result.state_set_list))])

    # The state set that contains the initial state becomes the initial state of
    # the new state machine.
    state_set_containing_initial_state_i = Result.map[SM.init_state_index]
    new_init_state_index = map_new_state_index[
        state_set_containing_initial_state_i]

    result = StateMachine(new_init_state_index)

    # Ensure that each target state index has a state inside the state machine
    # Build up the state machine out of the state sets
    for state_set_idx, state_set in enumerate(Result.state_set_list):

        new_state_index = map_new_state_index[state_set_idx]

        # Merge all core information of the states inside the state set.
        # If one state set contains an acceptance state, then the result is 'acceptance'.
        # (Note: The initial split separates acceptance states from those that are not
        #  acceptance states. There can be no state set containing acceptance and
        #  non-acceptance states)
        # (Note, that the prototype's info has not been included yet, consider whole set)
        result.states[new_state_index] = Class_State.new_merged_core_state(
            SM.states[i] for i in state_set)

    for state_set_idx, state_set in enumerate(Result.state_set_list):
        # The prototype: States in one set behave all equivalent with respect to target state sets
        # thus only one state from the start set has to be considered.
        prototype = SM.states[state_set[0]]
        representative = result.states[map_new_state_index[state_set_idx]]

        # The representative must have all transitions that the prototype has
        for target_state_index, trigger_set in prototype.target_map.get_map(
        ).iteritems():
            target_state_set_index = Result.map[target_state_index]
            target_index = map_new_state_index[target_state_set_index]
            representative.add_transition(trigger_set, target_index)

    return result
Пример #24
0
    def __init__(self, FirstPath, TheAnalyzer):
        my_index       = index.get()
        ski_db         = StateKeyIndexDB([x.state_index for x in FirstPath.step_list],
                                         IgnoredListIndex=len(FirstPath.step_list)-1)
        MegaState.__init__(self, my_index, FirstPath.transition_map, ski_db, TheAnalyzer.dial_db)

        # Uniform OpList along entries on the path (optional)
        self.uniform_entry_OpList = FirstPath.uniform_entry_OpList.clone()

        self.__path_list = [ FirstPath.step_list ]

        # Following is set by 'finalize()'.
        self.__finalized = None # <-- ._finalize_content()
Пример #25
0
    def __init__(self, FirstPath, TheAnalyzer):
        my_index       = index.get()
        ski_db         = StateKeyIndexDB([x.state_index for x in FirstPath.step_list],
                                         IgnoredListIndex=len(FirstPath.step_list)-1)
        MegaState.__init__(self, my_index, FirstPath.transition_map, ski_db)

        # Uniform OpList along entries on the path (optional)
        self.uniform_entry_OpList = FirstPath.uniform_entry_OpList.clone()

        self.__path_list = [ FirstPath.step_list ]

        # Following is set by 'finalize()'.
        self.__finalized = None # <-- ._finalize_content()
Пример #26
0
    def clone(self,
              ReplDbStateIndex=None,
              ReplDbPreContext=None,
              ReplDbAcceptance=None,
              StateMachineId=None):
        """Clone state machine, i.e. create a new one with the same behavior,
        i.e. transitions, but with new unused state indices. This is used when
        state machines are to be created that combine the behavior of more
        then one state machine. E.g. see the function 'sequentialize'. Note:
        the state ids SUCCESS and TERMINATION are not replaced by new ones.

        RETURNS: cloned object if cloning successful
                 None          if cloning not possible due to external state references

        """
        def assert_transitivity(db):
            """Ids and their replacement remain in order, i.e. if x > y then db[x] > dv[y]."""
            if db is None: return
            prev_new = -1
            for old, new in sorted(db.iteritems(),
                                   key=itemgetter(0)):  # x[0] = old value
                assert new > prev_new
                prev_new = new

        def assert_uniqueness(db):
            if db is None: return
            reference_set = set()
            for value in db.itervalues():
                assert value not in reference_set
                reference_set.add(value)

        assert_uniqueness(ReplDbStateIndex)
        assert_uniqueness(ReplDbPreContext)
        assert_uniqueness(ReplDbAcceptance)
        assert_transitivity(ReplDbAcceptance)

        if ReplDbStateIndex is None:
            ReplDbStateIndex = dict((si, state_machine_index.get())
                                    for si in sorted(self.states.iterkeys()))

        iterable = ((ReplDbStateIndex[si],
                     state.clone(ReplDbStateIndex,
                                 ReplDbPreContext=ReplDbPreContext,
                                 ReplDbAcceptance=ReplDbAcceptance))
                    for si, state in self.states.iteritems())

        result = DFA.from_iterable(ReplDbStateIndex[self.init_state_index],
                                   iterable)
        if StateMachineId is not None: result.set_id(StateMachineId)

        return result
Пример #27
0
def _get_loop_analyzer(LoopMap, EventHandler):
    """Construct a state machine that triggers only on one character. Actions
    according the the triggered character are implemented using terminals which
    are entered upon acceptance.

            .------.
       ---->| Loop |
            |      |----> accept A                 (normal loop terminals)
            |      |----> accept B
            |      |----> accept C
            :      :         :
            |      |----> accept CoupleIncidenceA  (couple terminals towards
            |      |----> accept CoupleIncidenceB   appendix state machines)
            |      |----> accept CoupleIncidenceC    
            :______:         :
            | else |----> accept iid_loop_exit
            '------'

    RETURNS: [0] Loop analyzer (prepared state machine)
             [1] DoorID of loop entry
    """
    # Loop StateMachine
    sm = StateMachine.from_IncidenceIdMap(
        (lei.character_set, lei.incidence_id) for lei in LoopMap)

    # Code Transformation
    verdict_f, sm = Setup.buffer_codec.do_state_machine(sm, beautifier)

    # Loop Analyzer
    analyzer = analyzer_generator.do(
        sm,
        EventHandler.engine_type,
        EventHandler.reload_state_extern,
        OnBeforeReload=EventHandler.on_before_reload,
        OnAfterReload=EventHandler.on_after_reload,
        OnBeforeEntry=EventHandler.on_loop_entry)

    # If reload state is generated
    # => All other analyzers MUST use the same generated reload state.
    if EventHandler.reload_state_extern is None:
        EventHandler.reload_state_extern = analyzer.reload_state

    # Set the 'Re-Entry' Operations.
    entry = analyzer.init_state().entry
    tid_reentry = entry.enter_OpList(analyzer.init_state_index, index.get(),
                                     EventHandler.on_loop_reentry)
    entry.categorize(analyzer.init_state_index)

    return analyzer, entry.get(tid_reentry).door_id
Пример #28
0
def _get_loop_analyzer(LoopMap, EventHandler):
    """Construct a state machine that triggers only on one character. Actions
    according the the triggered character are implemented using terminals which
    are entered upon acceptance.

            .------.
       ---->| Loop |
            |      |----> accept A                 (normal loop terminals)
            |      |----> accept B
            |      |----> accept C
            :      :         :
            |      |----> accept CoupleIncidenceA  (couple terminals towards
            |      |----> accept CoupleIncidenceB   appendix state machines)
            |      |----> accept CoupleIncidenceC    
            :______:         :
            | else |----> accept iid_loop_exit
            '------'

    RETURNS: [0] Loop analyzer (prepared state machine)
             [1] DoorID of loop entry
    """
    # Loop StateMachine
    sm            = StateMachine.from_IncidenceIdMap(
                        (lei.character_set, lei.incidence_id) for lei in LoopMap
                    )

    # Code Transformation
    verdict_f, sm = Setup.buffer_codec.do_state_machine(sm, beautifier)

    # Loop Analyzer
    analyzer = analyzer_generator.do(sm, 
                                     EventHandler.engine_type, 
                                     EventHandler.reload_state_extern, 
                                     OnBeforeReload = EventHandler.on_before_reload, 
                                     OnAfterReload  = EventHandler.on_after_reload,
                                     OnBeforeEntry  = EventHandler.on_loop_entry)

    # If reload state is generated 
    # => All other analyzers MUST use the same generated reload state.
    if EventHandler.reload_state_extern is None:
        EventHandler.reload_state_extern = analyzer.reload_state

    # Set the 'Re-Entry' Operations.
    entry       = analyzer.init_state().entry
    tid_reentry = entry.enter_OpList(analyzer.init_state_index, index.get(), 
                                     EventHandler.on_loop_reentry)
    entry.categorize(analyzer.init_state_index)

    return analyzer, entry.get(tid_reentry).door_id
Пример #29
0
def basic_setup(Triggers):
    state_index_list = [ index.get() for i in xrange(2) ]

    # AnalyzerState-s: The base.
    setup_list = [   
        # StateIndex, TransitionMap
        (long(state_index), recursive_tm(Triggers[i], long(state_index)))
        for i, state_index in enumerate(state_index_list)
    ] 

    analyzer = get_Analyzer(setup_list)

    return analyzer, \
           analyzer.state_db[state_index_list[0]], \
           analyzer.state_db[state_index_list[1]]
Пример #30
0
def basic_setup(Triggers):
    state_index_list = [ index.get() for i in xrange(2) ]

    # AnalyzerState-s: The base.
    setup_list = [   
        # StateIndex, TransitionMap
        (long(state_index), recursive_tm(Triggers[i], long(state_index)))
        for i, state_index in enumerate(state_index_list)
    ] 

    analyzer = get_Analyzer(setup_list)

    return analyzer, \
           analyzer.state_db[state_index_list[0]], \
           analyzer.state_db[state_index_list[1]]
Пример #31
0
    def __init__(self,
                 InitStateIndex=None,
                 AcceptanceF=False,
                 InitState=None,
                 DoNothingF=False):
        # Get a unique state machine id
        self.set_id(state_machine_index.get_state_machine_id())

        if DoNothingF: return

        if InitStateIndex is None: InitStateIndex = state_machine_index.get()
        self.init_state_index = InitStateIndex

        # State Index => State (information about what triggers transition to what target state).
        if InitState is None: InitState = State(AcceptanceF=AcceptanceF)
        self.states = {self.init_state_index: InitState}
Пример #32
0
    def __init__(self, StartState, StartCharacter, AdaptedTransitionMap):
        assert StartState is None     or isinstance(StartState, AnalyzerState)
        assert StartCharacter is None or isinstance(StartCharacter, (int, long))
        assert AdaptedTransitionMap is None or isinstance(AdaptedTransitionMap, list)

        if StartState is None: return # Only for Clone

        self.index    = index.get()
        self.entry    = PathWalkerState_Entry(self.index, StartState.entry)
        self.drop_out = MegaState_DropOut(StartState) 

        self.__sequence         = [ CharacterPathElement(StartState.index, StartCharacter) ]
        self.__transition_map   = AdaptedTransitionMap
        # Set the 'void' target to indicate wildcard.
        transition_map_tools.set(self.__transition_map, StartCharacter, E_StateIndices.VOID)
        self.__wildcard_char  = StartCharacter
Пример #33
0
def create_state_machine(SM, Result, Class_StateMachine, Class_State):
    # If all states are of size one, this means, that there were no states that
    # could have been combined. In this case a simple copy of the original
    # state machine will do.
    if len(filter(lambda state_set: len(state_set) != 1, Result.state_set_list)) == 0:
        return SM.clone()
    
    # Define a mapping from the state set to a new target state index
    #
    # map:  state_set_index  --->  index of the state that represents it
    #
    map_new_state_index = dict([(i, state_machine_index.get()) for i in xrange(len(Result.state_set_list))])
                
    # The state set that contains the initial state becomes the initial state of 
    # the new state machine.   
    state_set_containing_initial_state_i = Result.map[SM.init_state_index]
    new_init_state_index                 = map_new_state_index[state_set_containing_initial_state_i]

    result = StateMachine(new_init_state_index)

    # Ensure that each target state index has a state inside the state machine
    # Build up the state machine out of the state sets
    for state_set_idx, state_set in enumerate(Result.state_set_list):

        new_state_index = map_new_state_index[state_set_idx]

        # Merge all core information of the states inside the state set.
        # If one state set contains an acceptance state, then the result is 'acceptance'.
        # (Note: The initial split separates acceptance states from those that are not
        #  acceptance states. There can be no state set containing acceptance and 
        #  non-acceptance states) 
        # (Note, that the prototype's info has not been included yet, consider whole set)
        result.states[new_state_index] = Class_State.new_merged_core_state(SM.states[i] for i in state_set)

    for state_set_idx, state_set in enumerate(Result.state_set_list):
        # The prototype: States in one set behave all equivalent with respect to target state sets
        # thus only one state from the start set has to be considered.      
        prototype    = SM.states[state_set[0]]
        representative = result.states[map_new_state_index[state_set_idx]]

        # The representative must have all transitions that the prototype has
        for target_state_index, trigger_set in prototype.target_map.get_map().iteritems():
            target_state_set_index = Result.map[target_state_index]
            target_index           = map_new_state_index[target_state_set_index]
            representative.add_transition(trigger_set, target_index)

    return result    
Пример #34
0
    def __init__(self, StartState, StartCharacter, AdaptedTransitionMap):
        if StartState is None: return # Only for Clone

        assert StartState is None           or isinstance(StartState, AnalyzerState)
        assert StartCharacter is None       or isinstance(StartCharacter, (int, long))
        assert AdaptedTransitionMap is None or isinstance(AdaptedTransitionMap, list)

        self.index    = index.get()
        self.entry    = PathWalkerState_Entry(self.index, StartState.entry)
        self.drop_out = MegaState_DropOut(StartState) 

        self.__sequence         = [ CharacterPathElement(StartState.index, StartCharacter) ]
        self.__transition_map   = AdaptedTransitionMap

        # Set the 'void' target to indicate wildcard.
        self.__wildcard_char    = StartCharacter
        transition_map_tools.set(self.__transition_map, StartCharacter, E_StateIndices.VOID)
Пример #35
0
    def clone(self, ReplDbStateIndex=None, ReplDbPreContext=None, ReplDbAcceptance=None):
        """Clone state machine, i.e. create a new one with the same behavior,
        i.e. transitions, but with new unused state indices. This is used when
        state machines are to be created that combine the behavior of more
        then one state machine. E.g. see the function 'sequentialize'. Note:
        the state ids SUCCESS and TERMINATION are not replaced by new ones.

        RETURNS: cloned object if cloning successful
                 None          if cloning not possible due to external state references

        """
        def assert_transitivity(db):
            """Ids and their replacement remain in order, i.e. if x > y then db[x] > dv[y]."""
            if db is None: return
            prev_new = -1
            for old, new in sorted(db.iteritems(), key=itemgetter(0)): # x[0] = old value
                assert new > prev_new
                prev_new = new

        def assert_uniqueness(db):
            if db is None: return
            reference_set = set()
            for value in db.itervalues():
                assert value not in reference_set
                reference_set.add(value)

        assert_uniqueness(ReplDbStateIndex)
        assert_uniqueness(ReplDbPreContext)
        assert_uniqueness(ReplDbAcceptance)
        assert_transitivity(ReplDbAcceptance)

        if ReplDbStateIndex is None: 
            ReplDbStateIndex = dict(
                (si, state_machine_index.get())
                for si in sorted(self.states.iterkeys())
            )

        iterable = (
            (ReplDbStateIndex[si], state.clone(ReplDbStateIndex, 
                                               ReplDbPreContext=ReplDbPreContext,
                                               ReplDbAcceptance=ReplDbAcceptance))
            for si, state in self.states.iteritems()
        )
        
        return StateMachine.from_iterable(ReplDbStateIndex[self.init_state_index], 
                                          iterable)
Пример #36
0
def _get_analyzer_for_loop(loop_sm, loop_config, CutSignalLexatomsF):
    """Construct a state machine that triggers only on one character. Actions
    according the the triggered character are implemented using terminals which
    are entered upon acceptance.

            .------.
       ---->| Loop |
            |      |----> accept A                 (normal loop terminals)
            |      |----> accept B
            |      |----> accept C
            :      :         :
            |      |----> accept CoupleIncidenceA  (couple terminals towards
            |      |----> accept CoupleIncidenceB   appendix state machines)
            |      |----> accept CoupleIncidenceC    
            :______:         :
            | else |----> accept iid_loop_exit
            '------'

    RETURNS: [0] Loop analyzer (prepared state machine)
             [1] DoorID of loop entry
    """

    # Loop FSM
    analyzer = analyzer_generator.do(
        loop_sm,
        loop_config.engine_type,
        loop_config.reload_state_extern,
        OnBeforeReload=loop_config.events.on_before_reload,
        OnAfterReload=loop_config.events.on_after_reload,
        OnBeforeEntry=loop_config.events.on_loop_entry,
        dial_db=loop_config.dial_db,
        OnReloadFailureDoorId=loop_config.door_id_on_reload_failure,
        CutF=CutSignalLexatomsF)

    # If reload state is generated
    # => All other analyzers MUST use the same generated reload state.
    if loop_config.reload_state_extern is None:
        loop_config.reload_state_extern = analyzer.reload_state

    # Set the 'Re-Entry' Operations.
    entry = analyzer.init_state().entry
    tid_reentry = entry.enter_OpList(analyzer.init_state_index, index.get(),
                                     loop_config.events.on_loop_reentry)
    entry.categorize(analyzer.init_state_index)

    return analyzer, entry.get(tid_reentry).door_id
Пример #37
0
def create_state_machine(SM, Result, Class_StateMachine, Class_State):
    # If all states are of size one, this means, that there were no states that
    # could have been combined. In this case a simple copy of the original
    # state machine will do.
    if len(filter(lambda state_set: len(state_set) != 1,
                  Result.state_set_list)) == 0:
        return SM.clone()

    # Define a mapping from the state set to a new target state index
    #
    # map:  state_set_index  --->  index of the state that represents it
    #
    map_new_state_index = dict([(i, state_machine_index.get())
                                for i in xrange(len(Result.state_set_list))])

    # The state set that contains the initial state becomes the initial state of
    # the new state machine.
    state_set_containing_initial_state_i = Result.map[SM.init_state_index]
    new_init_state_index = map_new_state_index[
        state_set_containing_initial_state_i]

    result = Class_StateMachine(new_init_state_index)

    # Ensure that each target state index has a state inside the state machine
    # Build up the state machine out of the state sets
    for state_set_idx, state_set in enumerate(Result.state_set_list):

        new_state_index = map_new_state_index[state_set_idx]

        # Merge all operations of states which are combined into one.
        result.states[new_state_index] = Class_State.from_state_iterable(
            SM.states[i] for i in state_set)

    for state_set_idx, state_set in enumerate(Result.state_set_list):
        # The prototype: States in one set behave all equivalent with respect to target state sets
        # thus only one state from the start set has to be considered.
        prototype = SM.states[state_set[0]]
        representative = result.states[map_new_state_index[state_set_idx]]

        # The representative must have all transitions that the prototype has
        for target_state_index, trigger_set in prototype.target_map:
            target_state_set_index = Result.map[target_state_index]
            target_index = map_new_state_index[target_state_set_index]
            representative.add_transition(trigger_set, target_index)

    return result
Пример #38
0
    def __init__(self, Candidate):
        StateA = Candidate.state_a
        StateB = Candidate.state_b

        # Combined DropOut and Entry schemes are generated by the same function
        transition_map, target_scheme_n = combine_maps(StateA.transition_map, StateB.transition_map)

        ski_db = StateKeyIndexDB(StateA.state_index_sequence() + StateB.state_index_sequence())
        MegaState.__init__(self, index.get(), transition_map, ski_db)

        self.uniform_entry_OpList = UniformObject.from_iterable((
                                                       StateA.uniform_entry_OpList,
                                                       StateB.uniform_entry_OpList))

        self.__target_scheme_n = target_scheme_n
        self.__engine_type     = None # StateA.engine_type

        MegaState.bad_company_set(self, StateA.bad_company().union(StateB.bad_company()))
Пример #39
0
    def clone_subset(self, StartSi, StateSiSet, DfaId=None):
        """Should do the same as 'clone_from_state_subset()', replacement 
        can be made after unit tests.
        """
        correspondance_db = {
            si: state_machine_index.get()
            for si in StateSiSet
        }
        result = DFA(InitStateIndex=correspondance_db[StartSi], DfaId=DfaId)

        result.states = {
            # '.clone(correspondance_db)' only clones transitions to target states
            # which are mentioned in 'correspondance_db'.
            correspondance_db[si]: self.states[si].clone(correspondance_db)
            for si in StateSiSet
        }

        return result
Пример #40
0
    def create_new_state(self,
                         AcceptanceF=False,
                         StateIdx=None,
                         RestoreInputPositionF=False,
                         MarkAcceptanceId=None):
        """RETURNS: DFA_State index of the new state.
        """
        if StateIdx is None: new_si = state_machine_index.get()
        else: new_si = StateIdx

        new_state = DFA_State(AcceptanceF or MarkAcceptanceId is not None)
        if MarkAcceptanceId is not None:
            new_state.mark_acceptance_id(MarkAcceptanceId)
            if RestoreInputPositionF:
                new_state.set_read_position_restore_f()

        self.states[new_si] = new_state
        return new_si
Пример #41
0
    def __init__(self, Candidate):
        StateA = Candidate.state_a
        StateB = Candidate.state_b

        # Combined DropOut and Entry schemes are generated by the same function
        transition_map, target_scheme_n = combine_maps(StateA.transition_map, StateB.transition_map)

        ski_db = StateKeyIndexDB(StateA.state_index_sequence() + StateB.state_index_sequence())
        MegaState.__init__(self, index.get(), transition_map, ski_db, 
                           dial_db=StateA.entry.dial_db)

        self.uniform_entry_OpList = UniformObject.from_iterable((
                                                       StateA.uniform_entry_OpList,
                                                       StateB.uniform_entry_OpList))

        self.__target_scheme_n = target_scheme_n
        self.__engine_type     = None # StateA.engine_type

        MegaState.bad_company_set(self, StateA.bad_company().union(StateB.bad_company()))
Пример #42
0
def get_all():
    """RETURNS:

       A state machine that 'eats' absolutely everything, i.e. 


                              .--- \Any ---.
                              |            |
           (0)--- \Any --->(( 0 ))<--------'
    """
    result = StateMachine()

    i = index.get()
    state = State(AcceptanceF=True)
    state.add_transition(NumberSet_All(), i)
    result.states[i] = state

    result.get_init_state().add_transition(NumberSet_All(), i)

    return result
Пример #43
0
def get_all():
    """RETURNS:

       A state machine that 'eats' absolutely everything, i.e. 


                              .--- \Any ---.
                              |            |
           (0)--- \Any --->(( 0 ))<--------'
    """
    result = StateMachine()

    i      = index.get()
    state  = State(AcceptanceF=True)
    state.add_transition(NumberSet(Interval(-sys.maxint, sys.maxint)), i)
    result.states[i] = state

    result.get_init_state().add_transition(NumberSet(Interval(-sys.maxint, sys.maxint)), i)

    return result
Пример #44
0
    def get(self, NameOrTerminalID):
        """NameOrTerminalID is something that identifies a position/address 
                            in the code. This function returns a numeric id
                            for this address. 

           Exceptions are labels that are 'unique' inside a state machine 
           as defined by '__address_db_special'. For those the string itself
           is returned.
        """
        # Special addresses are not treated, but returned as string
        if NameOrTerminalID in self.__special:
            return NameOrTerminalID

        # If the thing is known, return its id immediately
        entry = self.__db.get(NameOrTerminalID)
        if entry is not None: return entry

        # Generate unique id for the label: Use unique state index
        entry = index.get()
        self.__db[NameOrTerminalID] = entry
        return entry
Пример #45
0
    def __init__(self,
                 InitStateIndex=None,
                 AcceptanceF=False,
                 InitState=None,
                 DoNothingF=False,
                 DfaId=None):
        if DfaId is None:
            self.set_id(state_machine_index.get_state_machine_id())
        else:
            self.set_id(DfaId)

        self.sr = SourceRef_VOID

        if DoNothingF: return

        if InitStateIndex is None: InitStateIndex = state_machine_index.get()
        self.init_state_index = InitStateIndex

        # DFA_State Index => DFA_State (information about what triggers transition to what target state).
        if InitState is None: InitState = DFA_State(AcceptanceF=AcceptanceF)
        self.states = {self.init_state_index: InitState}
Пример #46
0
    def __init__(self, SM_A, SM_B, result=None):
        self.original = SM_A
        self.admissible = SM_B

        if result is None:
            init_state_index = index.map_state_combination_to_index(
                (SM_A.init_state_index, SM_B.init_state_index))
            state = self.get_state_core(SM_A.init_state_index)
            self.result = StateMachine(InitStateIndex=init_state_index,
                                       InitState=state)
        else:
            self.result = result
        self.path = []

        # Use 'operation_index' to get a unique index that allows to indicate
        # that 'SM_B' is no longer involved. Also, it ensures that the
        # generated state indices from (a_state_index, operation_index) are
        # unique.
        self.operation_index = index.get()

        TreeWalker.__init__(self)
Пример #47
0
    def __init__(self, SM_A, SM_B, result=None):
        self.original    = SM_A
        self.admissible  = SM_B

        if result is None:
            init_state_index = index.map_state_combination_to_index((SM_A.init_state_index, 
                                                                     SM_B.init_state_index))
            state            = self.get_state_core(SM_A.init_state_index)
            self.result      = StateMachine(InitStateIndex = init_state_index,
                                            InitState      = state)
        else:
            self.result      = result
        self.path        = []

        # Use 'operation_index' to get a unique index that allows to indicate
        # that 'SM_B' is no longer involved. Also, it ensures that the
        # generated state indices from (a_state_index, operation_index) are
        # unique.
        self.operation_index = index.get()

        TreeWalker.__init__(self)
Пример #48
0
    def get(self, NameOrTerminalID):
        """NameOrTerminalID is something that identifies a position/address 
                            in the code. This function returns a numeric id
                            for this address. 

           Exceptions are labels that are 'unique' inside a state machine 
           as defined by '__address_db_special'. For those the string itself
           is returned.
        """
        # Special addresses are not treated, but returned as string
        if NameOrTerminalID in self.__special:
            return NameOrTerminalID

        # If the thing is known, return its id immediately
        entry = self.__db.get(NameOrTerminalID)
        if entry is not None: return entry

        # Generate unique id for the label: Use unique state index
        entry = index.get()
        self.__db[NameOrTerminalID] = entry
        return entry
Пример #49
0
    def add_transition(self, StartStateIdx, TriggerSet, TargetStateIdx = None, AcceptanceF = False):
        """Adds a transition from Start to Target based on a given Trigger.

           TriggerSet can be of different types: ... see add_transition()
           
           (see comment on 'State::add_transition)

           RETURNS: The target state index.
        """
        # NOTE: The Transition Constructor is very tolerant, so no tests on TriggerSet()
        #       assert TriggerSet.__class__.__name__ == "NumberSet"
        assert type(TargetStateIdx) == long or TargetStateIdx is None

        # If target state is undefined (None) then a new one has to be created
        if TargetStateIdx is None:                       TargetStateIdx = state_machine_index.get()
        if self.states.has_key(StartStateIdx) == False:  self.states[StartStateIdx]  = State()        
        if self.states.has_key(TargetStateIdx) == False: self.states[TargetStateIdx] = State()
        if AcceptanceF:                                  self.states[TargetStateIdx].set_acceptance(True)

        self.states[StartStateIdx].add_transition(TriggerSet, TargetStateIdx)

        return TargetStateIdx
Пример #50
0
    def __init__(self, Candidate):
        StateA = Candidate.state_a
        StateB = Candidate.state_b
        my_index                    = index.get()
        self.__state_a              = StateA
        self.__state_b              = StateB
        self.__state_index_sequence = StateA.state_index_sequence() + StateB.state_index_sequence()
        self.__state_index_to_state_key_db = dict((state_index, i) for i, state_index in enumerate(self.__state_index_sequence))

        # Combined DropOut and Entry schemes are generated by the same function
        entry    = TemplateState_Entry(my_index, self.__state_index_to_state_key_db, StateA.entry, StateB.entry)
        drop_out = MegaState_DropOut(StateA, StateB)
        MegaState.__init__(self, entry, drop_out, my_index)

        self.__transition_map, \
        self.__target_scheme_n = combine_maps(self.__state_a, self.__state_b)

        # Compatible with AnalyzerState
        # (A template state can never mimik an init state)
        self.__engine_type = None # StateA.engine_type
        # self.input         = None # StateA.input # get_input_action(StateA.engine_type, InitStateF=False)

        MegaState.bad_company_set(self, self.__state_a.bad_company().union(self.__state_b.bad_company()))
Пример #51
0
    def __init__(self, SM_A, SM_B, StartingSM=None):
        self.original   = SM_A
        self.admissible = SM_B

        if StartingSM is None:
            self.result = StateMachine(InitStateIndex = index.map_state_combination_to_index((SM_A.init_state_index, 
                                                                                              SM_B.init_state_index)), 
                                       InitState      = self.get_state_core(SM_A.init_state_index, 
                                                                            SM_B.init_state_index))
        else:
            self.result = StartingSM

        # TODO: Think if 'state_db' cannot be replaced by 'result'
        self.state_db   = {}

        self.path       = []

        # Use 'operation_index' to get a unique index that allows to indicate
        # that 'SM_B' is no longer involved. Also, it ensures that the
        # generated state indices from (a_state_index, operation_index) are
        # unique.
        self.operation_index = index.get()

        TreeWalker.__init__(self)
Пример #52
0
def get_skipper(EndSequence, Mode=None, IndentationCounterTerminalID=None, OnSkipRangeOpenStr=""):
    assert type(EndSequence) == list
    assert len(EndSequence) >= 1
    assert map(type, EndSequence) == [int] * len(EndSequence)

    local_variable_db = {}

    global template_str

    LanguageDB = Setup.language_db

    # Name the $$SKIPPER$$
    skipper_index = sm_index.get()

    # Determine the $$DELIMITER$$
    delimiter_str, delimiter_comment_str = get_character_sequence(EndSequence)
    delimiter_length = len(EndSequence)

    tmp = []
    LanguageDB.COMMENT(tmp, "                         Delimiter: %s" % delimiter_comment_str)
    delimiter_comment_str = "".join(tmp)
    # Determine the check for the tail of the delimiter
    delimiter_remainder_test_str = ""
    if len(EndSequence) != 1:
        txt = ""
        i = 0
        for letter in EndSequence[1:]:
            i += 1
            txt += "    %s\n" % LanguageDB.ASSIGN("input", LanguageDB.INPUT_P_DEREFERENCE(i - 1))
            txt += "    %s" % LanguageDB.IF_INPUT("!=", "Skipper$$SKIPPER_INDEX$$[%i]" % i)
            txt += "         %s" % LanguageDB.GOTO(skipper_index)
            txt += "    %s" % LanguageDB.END_IF()
        delimiter_remainder_test_str = txt

    if not end_delimiter_is_subset_of_indentation_counter_newline(Mode, EndSequence):
        goto_after_end_of_skipping_str = LanguageDB.GOTO(E_StateIndices.ANALYZER_REENTRY)
    else:
        # If there is indentation counting involved, then the counter's terminal id must
        # be determined at this place.
        assert IndentationCounterTerminalID is not None
        # If the ending delimiter is a subset of what the 'newline' pattern triggers
        # in indentation counting => move on to the indentation counter.
        goto_after_end_of_skipping_str = LanguageDB.GOTO_TERMINAL(IndentationCounterTerminalID)

    if OnSkipRangeOpenStr != "":
        on_skip_range_open_str = OnSkipRangeOpenStr
    else:
        on_skip_range_open_str = get_on_skip_range_open(Mode, EndSequence)

    # The main part
    code_str = blue_print(
        template_str,
        [
            ["$$DELIMITER_COMMENT$$", delimiter_comment_str],
            ["$$INPUT_P_INCREMENT$$", LanguageDB.INPUT_P_INCREMENT()],
            ["$$INPUT_P_DECREMENT$$", LanguageDB.INPUT_P_DECREMENT()],
            ["$$INPUT_GET$$", LanguageDB.ACCESS_INPUT()],
            ["$$IF_INPUT_EQUAL_DELIMITER_0$$", LanguageDB.IF_INPUT("==", "Skipper$$SKIPPER_INDEX$$[0]")],
            ["$$ENDIF$$", LanguageDB.END_IF()],
            ["$$ENTRY$$", LanguageDB.LABEL(skipper_index)],
            ["$$RELOAD$$", get_label("$reload", skipper_index)],
            ["$$GOTO_ENTRY$$", LanguageDB.GOTO(skipper_index)],
            ["$$INPUT_P_TO_LEXEME_START$$", LanguageDB.INPUT_P_TO_LEXEME_START()],
            # When things were skipped, no change to acceptance flags or modes has
            # happend. One can jump immediately to the start without re-entry preparation.
            ["$$GOTO_AFTER_END_OF_SKIPPING$$", goto_after_end_of_skipping_str],
            ["$$MARK_LEXEME_START$$", LanguageDB.LEXEME_START_SET()],
            ["$$DELIMITER_REMAINDER_TEST$$", delimiter_remainder_test_str],
            ["$$ON_SKIP_RANGE_OPEN$$", on_skip_range_open_str],
        ],
    )

    # Line and column number counting
    code_str, reference_p_f = __lc_counting_replacements(code_str, EndSequence)

    # The finishing touch
    code_str = blue_print(
        code_str,
        [["$$SKIPPER_INDEX$$", __nice(skipper_index)], ["$$GOTO_RELOAD$$", get_label("$reload", skipper_index)]],
    )

    if reference_p_f:
        variable_db.enter(local_variable_db, "reference_p", Condition="QUEX_OPTION_COLUMN_NUMBER_COUNTING")

    variable_db.enter(local_variable_db, "Skipper%i", "{ %s }" % delimiter_str, delimiter_length, Index=skipper_index)
    variable_db.enter(local_variable_db, "Skipper%iL", "%i" % delimiter_length, Index=skipper_index)
    variable_db.enter(local_variable_db, "text_end")
    return code_str, local_variable_db
Пример #53
0
def get_skipper(EndSequence, CloserPattern, ModeName, OnSkipRangeOpen, DoorIdAfter):
    assert len(EndSequence) >= 1

    global template_str

    # Name the $$SKIPPER$$
    skipper_index   = sm_index.get()
    skipper_door_id = dial_db.new_door_id(skipper_index)

    delimiter_str, delimiter_comment_str = get_character_sequence(EndSequence)

    end_sequence_transformed = transformation.do_sequence(EndSequence)

    # Determine the $$DELIMITER$$
    delimiter_length = len(end_sequence_transformed)

    delimiter_comment_str = Lng.COMMENT("                         Delimiter: %s" % delimiter_comment_str)

    # Determine the check for the tail of the delimiter
    delimiter_remainder_test_str = ""
    if len(EndSequence) != 1: 
        txt = "".join(
            "    %s" % Lng.IF_GOTO(Lng.INPUT_P_DEREFERENCE(i-1), "!=", 
                                   "Skipper$$SKIPPER_INDEX$$[%i]" % i,
                                   skipper_door_id, i == 1)
            for i, letter in enumerate(EndSequence[1:], start=1)
        )
        delimiter_remainder_test_str = txt

    door_id_reload = dial_db.new_door_id()
    on_skip_range_open = get_on_skip_range_open(OnSkipRangeOpen, CloserPattern)

    # The main part
    code_str = blue_print(template_str,
                          [
                           ["$$DELIMITER_COMMENT$$",              delimiter_comment_str],
                           ["$$INPUT_P_INCREMENT$$",              Lng.INPUT_P_INCREMENT()],
                           ["$$INPUT_P_DECREMENT$$",              Lng.INPUT_P_DECREMENT()],
                           ["$$INPUT_GET$$",                      Lng.ACCESS_INPUT()],
                           ["$$IF_INPUT_EQUAL_DELIMITER_0$$",     Lng.IF_INPUT("==", "Skipper$$SKIPPER_INDEX$$[0]")],
                           ["$$ENDIF$$",                          Lng.END_IF()],
                           ["$$ENTRY$$",                          dial_db.get_label_by_door_id(skipper_door_id)],
                           ["$$RELOAD$$",                         dial_db.get_label_by_door_id(door_id_reload)],
                           ["$$GOTO_ENTRY$$",                     Lng.GOTO(skipper_door_id)],
                           ["$$INPUT_P_TO_LEXEME_START$$",        Lng.INPUT_P_TO_LEXEME_START()],
                           # When things were skipped, no change to acceptance flags or modes has
                           # happend. One can jump immediately to the start without re-entry preparation.
                           ["$$GOTO_AFTER_END_OF_SKIPPING$$",     Lng.GOTO(DoorIdAfter)], 
                           ["$$MARK_LEXEME_START$$",              Lng.LEXEME_START_SET()],
                           ["$$DELIMITER_REMAINDER_TEST$$",       delimiter_remainder_test_str],
                           ["$$ON_SKIP_RANGE_OPEN$$",             on_skip_range_open],
                          ])

    # Line and column number counting
    code_str, reference_p_f = __lc_counting_replacements(code_str, EndSequence)

    # The finishing touch
    code_str = blue_print(code_str,
                          [["$$SKIPPER_INDEX$$", __nice(skipper_index)],
                           ["$$GOTO_RELOAD$$",   Lng.GOTO(door_id_reload)]])

    if reference_p_f:
        variable_db.require("reference_p", Condition="QUEX_OPTION_COLUMN_NUMBER_COUNTING")

    variable_db.require_array("Skipper%i", Initial="{ %s }" % delimiter_str, ElementN=delimiter_length, Index=skipper_index)
    variable_db.require("Skipper%iL", "%i" % delimiter_length, Index=skipper_index)
    variable_db.require("text_end")

    variable_db.require("input") 

    return [ code_str ]
Пример #54
0
def get_skipper(OpenerSequence, CloserSequence, CloserPattern, ModeName, OnSkipRangeOpen, DoorIdAfter):
    assert len(OpenerSequence) >= 1
    assert len(CloserSequence) >= 1
    assert OpenerSequence != CloserSequence

    skipper_index   = sm_index.get()
    skipper_door_id = dial_db.new_door_id(skipper_index)

    opener_str, opener_comment_str = get_character_sequence(OpenerSequence)
    opener_length = len(OpenerSequence)
    closer_str, closer_comment_str = get_character_sequence(CloserSequence)
    closer_length = len(CloserSequence)

    variable_db.require("reference_p", Condition="QUEX_OPTION_COLUMN_NUMBER_COUNTING")
    variable_db.require("counter")
    variable_db.require_array("Skipper%i_Opener", Initial="{ %s }" % opener_str, ElementN=opener_length, Index = skipper_index)
    variable_db.require("Skipper%i_OpenerEnd", "Skipper%i_Opener + (ptrdiff_t)%i" % (skipper_index, opener_length), Index = skipper_index) 
    variable_db.require("Skipper%i_Opener_it", "0x0", Index = skipper_index) 
    variable_db.require_array("Skipper%i_Closer", Initial="{ %s }" % closer_str, ElementN=closer_length, Index = skipper_index) 
    variable_db.require("Skipper%i_CloserEnd", "Skipper%i_Closer + (ptrdiff_t)%i" % (skipper_index, closer_length), Index = skipper_index) 
    variable_db.require("Skipper%i_Closer_it", "0x0", Index = skipper_index) 

    reference_p_def = "    __QUEX_IF_COUNT_COLUMNS(reference_p = QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer));\n"
    before_reload   = "    __QUEX_IF_COUNT_COLUMNS_ADD((size_t)(QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer)\n" + \
                      "                                - reference_p));\n" 
    after_reload    = "        __QUEX_IF_COUNT_COLUMNS(reference_p = QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer));\n"

    if CloserSequence[-1] == ord('\n'):
        end_procedure  = "       __QUEX_IF_COUNT_LINES_ADD((size_t)1);\n"
        end_procedure += "       __QUEX_IF_COUNT_COLUMNS_SET((size_t)1);\n"
    else:
        end_procedure = "        __QUEX_IF_COUNT_COLUMNS_ADD((size_t)(QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer)\n" + \
                        "                                    - reference_p));\n" 

    reload_door_id     = dial_db.new_door_id()
    on_skip_range_open = get_on_skip_range_open(OnSkipRangeOpen, CloserPattern, NestedF=True)

    code_str = blue_print(template_str, [
                   ["$$SKIPPER_INDEX$$",   __nice(skipper_index)],
                   #
                   ["$$OPENER_LENGTH$$",                  "%i" % opener_length],
                   ["$$INPUT_P_INCREMENT$$",              Lng.INPUT_P_INCREMENT()],
                   ["$$INPUT_P_DECREMENT$$",              Lng.INPUT_P_DECREMENT()],
                   ["$$INPUT_GET$$",                      Lng.ACCESS_INPUT()],
                   ["$$IF_INPUT_EQUAL_DELIMITER_0$$",     Lng.IF_INPUT("==", "Skipper$$SKIPPER_INDEX$$[0]")],
                   ["$$ENDIF$$",                          Lng.END_IF()],
                   ["$$ENTRY$$",                          Lng.LABEL(skipper_door_id)],
                   ["$$RELOAD$$",                         dial_db.get_label_by_door_id(reload_door_id)],
                   ["$$GOTO_AFTER_END_OF_SKIPPING$$",     Lng.GOTO(DoorIdAfter)], 
                   ["$$GOTO_RELOAD$$",                    Lng.GOTO(reload_door_id)],
                   ["$$INPUT_P_TO_LEXEME_START$$",        Lng.INPUT_P_TO_LEXEME_START()],
                   # When things were skipped, no change to acceptance flags or modes has
                   # happend. One can jump immediately to the start without re-entry preparation.
                   ["$$GOTO_ENTRY$$",                     Lng.GOTO(skipper_door_id)],
                   ["$$MARK_LEXEME_START$$",              Lng.LEXEME_START_SET()],
                   ["$$ON_SKIP_RANGE_OPEN$$",             on_skip_range_open],
                   #
                   ["$$LC_COUNT_COLUMN_N_POINTER_DEFINITION$$", reference_p_def],
                   ["$$LC_COUNT_IN_LOOP$$",                     line_column_counter_in_loop()],
                   ["$$LC_COUNT_END_PROCEDURE$$",               end_procedure],
                   ["$$LC_COUNT_BEFORE_RELOAD$$",               before_reload],
                   ["$$LC_COUNT_AFTER_RELOAD$$",                after_reload],
               ])

    return [ code_str ]
Пример #55
0
    def create_new_state(self, AcceptanceF=False, StateIdx=None):
        if StateIdx is None: new_state_index = state_machine_index.get()
        else:                new_state_index = StateIdx

        self.states[new_state_index] = State(AcceptanceF)
        return new_state_index
Пример #56
0
 def __init__(self, Code, Name=""):
     Processor.__init__(self, index.get(), Entry())
     self.__incidence_id = None
     self.__code         = Code
     self.__name         = Name
Пример #57
0
def do(SM_List):
    """Intersection: 

       Only match on patterns which are matched by all state machines
       in 'SM_List'.

       (C) 2013 Frank-Rene Schaefer
       ________________________________________________________________________

       A lexeme which matches all patterns must reach an acceptance in each 
       given state machine. That is, 
       
          For each state machine; there is a path from the init 
          state to an acceptance state triggered along the by 
          the characters of the lexeme.

       We cannot go forward, since we cannot omit a path upon non-fit.

       Now, consider the super-state consisting of all acceptance states
       of all state machines. There there must be a way backward from the 
       super-acceptance-state to the init state states. As soon, as a
       path is interupted, it can be thrown away. This can be achieved
       by reversed state machines which are combined into a single one.
       
       Reverse all state machines; The epsilon closure of the init state
       corresponds to the super acceptance state. The transitions in the
       super-state machine correspond to the way backwards in the state
       machine. For each feasible state in the super-state machine create
       a new state. 

       The acceptance state of the reversed state machines correspond to 
       the init state of the original state machines. If the super state
       contains an acceptance state of the original state, it can become
       an acceptance state of the intersection, because we now found a 
       path. The found state machine must be reversed at the end.

    """
    for sm in SM_List:
        if special.is_none(sm):         # If one state machine is '\None'
            return special.get_none()   # then, the intersection is '\None'

    reverse_sm_list          = [ reverse.do(sm)                            for sm in SM_List ]
    state_id_set_list        = [ set(sm.states.iterkeys())                 for sm in reverse_sm_list ]
    acceptance_state_id_list = [ set(sm.get_acceptance_state_index_list()) for sm in reverse_sm_list ]

    def has_one_from_each(StateIDSet_List, StateIDSet):
        """StateIDSet_List[i] is the set of state indices from state 
        machine 'i' in 'reverse_sm_list'. 

        RETURNS: True -- If the StateIDSet has at least one state 
                         from every state machine.
                 False -- If there is at least one state machine 
                          that has no state in 'StateIDSet'.
        """
        for state_id_set in StateIDSet_List:
            if state_id_set.isdisjoint(StateIDSet): 
                return False
        return True

    def get_merged_state(AcceptanceStateIndexList, EpsilonClosure):
        """Create the new target state in the state machine
           Accept only if all accept.
        """
        acceptance_f = has_one_from_each(AcceptanceStateIndexList, 
                                         EpsilonClosure)
        return State(AcceptanceF=acceptance_f)

    # Plain merge of all states of all state machines with an 
    # epsilon transition from the init state to all init states
    # of the reverse_sm
    sm = StateMachine()
    for rsm in reverse_sm_list:
        sm.states.update(rsm.states)
        sm.add_epsilon_transition(sm.init_state_index, rsm.init_state_index) 

    initial_state_epsilon_closure = sm.get_epsilon_closure(sm.init_state_index) 

    InitState = get_merged_state(acceptance_state_id_list, 
                                 initial_state_epsilon_closure)

    result    = StateMachine(InitStateIndex=index.get(), InitState=InitState)

    # (*) prepare the initial worklist
    worklist = [ ( result.init_state_index, initial_state_epsilon_closure) ]

    epsilon_closure_db = sm.get_epsilon_closure_db()

    while len(worklist) != 0:
        # 'start_state_index' is the index of an **existing** state in the state machine.
        # It was either created above, in StateMachine's constructor, or as a target
        # state index.
        start_state_index, start_state_combination = worklist.pop()
 
        # (*) compute the elementary trigger sets together with the 
        #     epsilon closure of target state combinations that they trigger to.
        #     In other words: find the ranges of characters where the state triggers to
        #     a unique state combination. E.g:
        #                Range        Target State Combination 
        #                [0:23]   --> [ State1, State2, State10 ]
        #                [24:60]  --> [ State1 ]
        #                [61:123] --> [ State2, State10 ]
        #
        elementary_trigger_set_infos = sm.get_elementary_trigger_sets(start_state_combination,
                                                                      epsilon_closure_db)
        ## DEBUG_print(start_state_combination, elementary_trigger_set_infos)

        # (*) loop over all elementary trigger sets
        for epsilon_closure_of_target_state_combination, trigger_set in elementary_trigger_set_infos.iteritems():
            #  -- if there is no trigger to the given target state combination, then drop it
            if trigger_set.is_empty(): 
                continue
            elif not has_one_from_each(state_id_set_list, epsilon_closure_of_target_state_combination):
                continue

            # -- add a new target state representing the state combination
            #    (if this did not happen yet)
            target_state_index = \
                 map_state_combination_to_index(epsilon_closure_of_target_state_combination)

            # -- if target state combination was not considered yet, then create 
            #    a new state in the state machine
            if not result.states.has_key(target_state_index):
                result.states[target_state_index] = get_merged_state(acceptance_state_id_list, 
                                                                     epsilon_closure_of_target_state_combination)

                worklist.append((target_state_index, epsilon_closure_of_target_state_combination))  

            # -- add the transition 'start state to target state'
            result.add_transition(start_state_index, trigger_set, target_state_index)

    if not result.has_acceptance_states():
        return StateMachine()
    else:
        return beautifier.do(reverse.do(result))
Пример #58
0
 def specify(StateIndex, DoorSubIndex):
     if StateIndex is None:   state_index = sm_index.get() # generate a new StateIndex
     else:                    state_index = StateIndex
     if DoorSubIndex is None: door_sub_index = self.max_door_sub_index(state_index) + 1
     else:                    door_sub_index = DoorSubIndex
     return state_index, door_sub_index
Пример #59
0
def get_skipper(OpenerSequence, CloserSequence, Mode=None, IndentationCounterTerminalID=None, OnSkipRangeOpenStr=""):
    assert OpenerSequence.__class__  == list
    assert len(OpenerSequence)       >= 1
    assert map(type, OpenerSequence) == [int] * len(OpenerSequence)
    assert CloserSequence.__class__  == list
    assert len(CloserSequence)       >= 1
    assert map(type, CloserSequence) == [int] * len(CloserSequence)
    assert OpenerSequence != CloserSequence

    LanguageDB    = Setup.language_db

    skipper_index = sm_index.get()

    opener_str, opener_comment_str = get_character_sequence(OpenerSequence)
    opener_length = len(OpenerSequence)
    closer_str, closer_comment_str = get_character_sequence(CloserSequence)
    closer_length = len(CloserSequence)

    if not end_delimiter_is_subset_of_indentation_counter_newline(Mode, CloserSequence):
        goto_after_end_of_skipping_str = LanguageDB.GOTO(E_StateIndices.ANALYZER_REENTRY)
    else:
        # If there is indentation counting involved, then the counter's terminal id must
        # be determined at this place.
        assert IndentationCounterTerminalID is not None
        # If the ending delimiter is a subset of what the 'newline' pattern triggers 
        # in indentation counting => move on to the indentation counter.
        goto_after_end_of_skipping_str = LanguageDB.GOTO_TERMINAL(IndentationCounterTerminalID)

    if OnSkipRangeOpenStr != "": on_skip_range_open_str = OnSkipRangeOpenStr
    else:                        on_skip_range_open_str = get_on_skip_range_open(Mode, CloserSequence)

    local_variable_db = {}
    variable_db.enter(local_variable_db, "reference_p", Condition="QUEX_OPTION_COLUMN_NUMBER_COUNTING")
    # variable_db.enter(local_variable_db, "text_end")
    variable_db.enter(local_variable_db, "counter")
    variable_db.enter(local_variable_db, "Skipper%i_Opener",    "{ %s }" % opener_str, ElementN=opener_length, 
                                         Index = skipper_index)
    variable_db.enter(local_variable_db, "Skipper%i_OpenerEnd", 
                                         "Skipper%i_Opener + (ptrdiff_t)%i" % (skipper_index, opener_length),
                                         Index = skipper_index) 
    variable_db.enter(local_variable_db, "Skipper%i_Opener_it", "0x0", 
                                         Index = skipper_index) 
    variable_db.enter(local_variable_db, "Skipper%i_Closer",    "{ %s }" % closer_str, ElementN=closer_length, 
                                         Index = skipper_index) 
    variable_db.enter(local_variable_db, "Skipper%i_CloserEnd", 
                                         "Skipper%i_Closer + (ptrdiff_t)%i" % (skipper_index, closer_length),
                                         Index = skipper_index) 
    variable_db.enter(local_variable_db, "Skipper%i_Closer_it", "0x0", 
                                         Index = skipper_index) 

   
    reference_p_def = "    __QUEX_IF_COUNT_COLUMNS(reference_p = QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer));\n"

    reference_p_def = "    __QUEX_IF_COUNT_COLUMNS(reference_p = QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer));\n"
    before_reload   = "    __QUEX_IF_COUNT_COLUMNS_ADD((size_t)(QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer)\n" + \
                      "                                - reference_p));\n" 
    after_reload    = "        __QUEX_IF_COUNT_COLUMNS(reference_p = QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer));\n"

    if CloserSequence[-1] == ord('\n'):
        end_procedure  = "       __QUEX_IF_COUNT_LINES_ADD((size_t)1);\n"
        end_procedure += "       __QUEX_IF_COUNT_COLUMNS_SET((size_t)1);\n"
    else:
        end_procedure = "        __QUEX_IF_COUNT_COLUMNS_ADD((size_t)(QUEX_NAME(Buffer_tell_memory_adr)(&me->buffer)\n" + \
                        "                                    - reference_p));\n" 

    code_str = blue_print(template_str,
                          [
                           ["$$SKIPPER_INDEX$$",   __nice(skipper_index)],
                           #
                           ["$$OPENER_LENGTH$$",                  "%i" % opener_length],
                           ["$$INPUT_P_INCREMENT$$",              LanguageDB.INPUT_P_INCREMENT()],
                           ["$$INPUT_P_DECREMENT$$",              LanguageDB.INPUT_P_DECREMENT()],
                           ["$$INPUT_GET$$",                      LanguageDB.ACCESS_INPUT()],
                           ["$$IF_INPUT_EQUAL_DELIMITER_0$$",     LanguageDB.IF_INPUT("==", "Skipper$$SKIPPER_INDEX$$[0]")],
                           ["$$ENDIF$$",                          LanguageDB.END_IF()],
                           ["$$ENTRY$$",                          LanguageDB.LABEL(skipper_index)],
                           ["$$RELOAD$$",                         get_label("$reload", skipper_index)],
                           ["$$GOTO_AFTER_END_OF_SKIPPING$$",     goto_after_end_of_skipping_str], 
                           ["$$GOTO_RELOAD$$",                    get_label("$reload", skipper_index)],
                           ["$$INPUT_P_TO_LEXEME_START$$",        LanguageDB.INPUT_P_TO_LEXEME_START()],
                           # When things were skipped, no change to acceptance flags or modes has
                           # happend. One can jump immediately to the start without re-entry preparation.
                           ["$$GOTO_ENTRY$$",                     LanguageDB.GOTO(skipper_index)],
                           ["$$MARK_LEXEME_START$$",              LanguageDB.LEXEME_START_SET()],
                           ["$$ON_SKIP_RANGE_OPEN$$",             on_skip_range_open_str],
                           #
                           ["$$LC_COUNT_COLUMN_N_POINTER_DEFINITION$$", reference_p_def],
                           ["$$LC_COUNT_IN_LOOP$$",                     line_column_counter_in_loop()],
                           ["$$LC_COUNT_END_PROCEDURE$$",               end_procedure],
                           ["$$LC_COUNT_BEFORE_RELOAD$$",               before_reload],
                           ["$$LC_COUNT_AFTER_RELOAD$$",                after_reload],
                          ])

    return code_str, local_variable_db
Пример #60
0
def do(StateMachineList, CommonTerminalStateF=True, CloneF=True):
    """Connect state machines paralell.

       CommonTerminalStateF tells whether the state machines shall trigger 
                            to a common terminal. This may help nfa-to-dfa
                            or hopcroft minimization for ISOLATED patterns.

                            A state machine that consists of the COMBINATION
                            of patterns MUST set this flag to 'False'.

       CloneF               Controls if state machine list is cloned or not.
                            If the single state machines are no longer required after
                            construction, the CloneF can be set to False.

                            If Cloning is disabled the state machines themselves
                            will be altered--which brings some advantage in speed.
    """
    assert type(StateMachineList) == list
    assert len(StateMachineList) != 0
    for x in StateMachineList:
        assert isinstance(x, StateMachine), x.__class__.__name__
              
    # filter out empty state machines from the consideration          
    state_machine_list       = [ sm for sm in StateMachineList if not (sm.is_empty() or special.is_none(sm))]
    empty_state_machine_list = [ sm for sm in StateMachineList if     (sm.is_empty() or special.is_none(sm))]

    if len(state_machine_list) < 2:
        if len(state_machine_list) < 1: result = StateMachine()
        elif CloneF:                    result = state_machine_list[0].clone()
        else:                           result = state_machine_list[0]

        return __consider_empty_state_machines(result, empty_state_machine_list)

    # (*) need to clone the state machines, i.e. provide their internal
    #     states with new ids, but the 'behavior' remains. This allows
    #     state machines to appear twice, or being used in 'larger'
    #     conglomerates.
    if CloneF: clone_list = map(lambda sm: sm.clone(), state_machine_list)
    else:      clone_list = state_machine_list

    # (*) collect all transitions from both state machines into a single one
    #     (clone to ensure unique identifiers of states)
    new_init_state = State.new_merged_core_state((clone.get_init_state() for clone in clone_list), 
                                                 ClearF=True)
    result         = StateMachine(InitState=new_init_state)

    for clone in clone_list:
        result.states.update(clone.states)

    # (*) add additional **init** and **end** state
    #     NOTE: when the result state machine was created, it already contains a 
    #           new initial state index. thus at this point only the new terminal
    #           state has to be created. 
    #     NOTE: it is essential that the acceptance flag stays False, at this
    #           point in time, so that the mounting operations only happen on
    #           the old acceptance states. Later the acceptance state is raised
    #           to 'accepted' (see below)
    new_terminal_state_index = -1L
    if CommonTerminalStateF:
        new_terminal_state_index = index.get()
        result.states[new_terminal_state_index] = \
                    State.new_merged_core_state(result.get_acceptance_state_list(), \
                                                ClearF=True)
    
    # (*) Connect from the new initial state to the initial states of the
    #     clones via epsilon transition. 
    #     Connect from each success state of the clones to the new end state
    #     via epsilon transition.
    for clone in clone_list:
        result.mount_to_initial_state(clone.init_state_index)

    if CommonTerminalStateF:
        result.mount_to_acceptance_states(new_terminal_state_index,
                                          CancelStartAcceptanceStateF=False)

    return __consider_empty_state_machines(result, empty_state_machine_list)