コード例 #1
0
ファイル: core.py プロジェクト: dkopecek/amplify
    def _branch_table_core(self, Selector, CaseList, get_case, DefaultConsequence=None):
        def get_content(C):
            if type(C) == list: return "".join(C)
            else:               return C

        def iterable(CaseList, DefaultConsequence):
            item, effect = CaseList[0]
            for item_ahead, effect_ahead in CaseList[1:]:
                if effect_ahead == effect: 
                    yield item, ""
                else:
                    yield item, effect
                item   = item_ahead
                effect = effect_ahead
            yield item, effect
            if DefaultConsequence is not None:
                yield None, DefaultConsequence

        txt = [ "switch( %s ) {\n" % Selector ]
        txt.extend(
            flatten_list_of_lists(
                get_case(item, text, get_content)
                for item, text in iterable(CaseList, DefaultConsequence)
            )
        )
        txt.append("}\n")
        return txt
コード例 #2
0
ファイル: find.py プロジェクト: dkopecek/amplify
def do(TheAnalyzer, CompressionType, AvailableStateIndexSet):
    """Starting point of the search for single character traces in the 
    state machine (TheAnalyzer). For each state in the state machine
    try to find branches of paths. 
    
    States which are closer to the init state are searched first. This 
    way quickly a set can be build of longest paths, which make searches
    from follower states unnecessary.
    """
    # depth_db: state_index ---> distance from init state.
    # We first search for the longest paths, so that searches for sub paths
    # become unnecessary. This way computation time is reduced.
    depth_db = TheAnalyzer.get_depth_db()

    iterable_state_indices = (                                                \
        i for i in TheAnalyzer.state_db.iterkeys()                            \
        if i in AvailableStateIndexSet and i != TheAnalyzer.init_state_index \
    )

    path_list = flatten_list_of_lists(
        CharacterPathList_find(TheAnalyzer, state_index, CompressionType, AvailableStateIndexSet)
        for state_index in sorted(iterable_state_indices, key=lambda i: depth_db[i])
    )

    return path_list
コード例 #3
0
def do_from_leaf_to_root(TheState,
                         CmdTree,
                         LeafDoorId,
                         done_set,
                         GlobalEntryF=False):
    """Code the sequence from a leaf of the command tree to its root. This
    avoids unnecessary gotos from outer nodes to their parents. It stops,
    whenever a parent is already implemented.  Then, the function 'code()'
    automatically inserts a 'goto parent' at the end of the node.

    RETURNS: list of strings 
    
    The list of string implements nodes from a command tree leaf over all of
    its parents to the root, or the first already implemented parent.
    """
    txt = []
    if not GlobalEntryF:
        # When the entry is a global entry into the analyzer, then it is slipped
        # into at function begin. => no 'assert unreachable'! Else, yes!
        txt.append("\n\n    %s\n" % Lng.UNREACHABLE)

    txt.extend(
        flatten_list_of_lists(
            __code(node, TheState, done_set, GlobalEntryF)
            for node in CmdTree.iterable_to_root(LeafDoorId, done_set)))
    return txt
コード例 #4
0
def do(TheAnalyzer, CompressionType, AvailableStateIndexSet):
    """Starting point of the search for single character traces in the 
    state machine (TheAnalyzer). For each state in the state machine
    try to find branches of paths. 
    
    States which are closer to the init state are searched first. This 
    way quickly a set can be build of longest paths, which make searches
    from follower states unnecessary.
    """
    # depth_db: state_index ---> distance from init state.
    # We first search for the longest paths, so that searches for sub paths
    # become unnecessary. This way computation time is reduced.
    depth_db = TheAnalyzer.get_depth_db()

    iterable_state_indices = (                                                \
        i for i in TheAnalyzer.state_db.iterkeys()                            \
        if i in AvailableStateIndexSet and i != TheAnalyzer.init_state_index \
    )

    path_list = flatten_list_of_lists(
        CharacterPathList_find(TheAnalyzer, state_index, CompressionType,
                               AvailableStateIndexSet)
        for state_index in sorted(iterable_state_indices,
                                  key=lambda i: depth_db[i]))

    return path_list
コード例 #5
0
ファイル: interval_handling.py プロジェクト: yifsun/amplify
 def get_number_list(self):
     """RETURNS: -- List of all numbers which are contained in the number set. 
                 -- None, if one border is 'sys.maxint'. The list would be too big.
     """
     return flatten_list_of_lists(
         xrange(interval.begin, interval.end)
         for interval in self.__intervals)
コード例 #6
0
ファイル: core.py プロジェクト: yifsun/amplify
    def _branch_table_core(self,
                           Selector,
                           CaseList,
                           get_case,
                           DefaultConsequence=None):
        def get_content(C):
            if type(C) == list: return "".join(C)
            else: return C

        def iterable(CaseList, DefaultConsequence):
            item, effect = CaseList[0]
            for item_ahead, effect_ahead in CaseList[1:]:
                if effect_ahead == effect:
                    yield item, ""
                else:
                    yield item, effect
                item = item_ahead
                effect = effect_ahead
            yield item, effect
            if DefaultConsequence is not None:
                yield None, DefaultConsequence

        txt = ["switch( %s ) {\n" % Selector]
        txt.extend(
            flatten_list_of_lists(
                get_case(item, text, get_content)
                for item, text in iterable(CaseList, DefaultConsequence)))
        txt.append("}\n")
        return txt
コード例 #7
0
ファイル: interval_handling.py プロジェクト: dkopecek/amplify
 def get_number_list(self):
     """RETURNS: -- List of all numbers which are contained in the number set. 
                 -- None, if one border is 'sys.maxint'. The list would be too big.
     """
     return flatten_list_of_lists(
         xrange(interval.begin, interval.end)
         for interval in self.__intervals
     )
コード例 #8
0
ファイル: transformation.py プロジェクト: dkopecek/amplify
def do_sequence(Sequence, TrafoInfo=None, fh=-1):
    if TrafoInfo is None:
        TrafoInfo = Setup.buffer_codec

    return flatten_list_of_lists(
        do_character(x, TrafoInfo, fh)
        for x in Sequence
    )
コード例 #9
0
ファイル: mode_option.py プロジェクト: yifsun/amplify
    def value_list(self, Name):
        """The content of a value is a sequence, and the return value of this
        function is a concantinated list of all listed option setting values.
        """
        setting_list = self.__get_setting_list(Name)
        if setting_list is None: return None

        info = mode_option_info_db[Name]
        if info.content_is_list():
            result = flatten_list_of_lists(x.value for x in setting_list)
        else:
            result = [x.value for x in setting_list]

        return result
コード例 #10
0
ファイル: mode_option.py プロジェクト: dkopecek/amplify
    def value_list(self, Name):
        """The content of a value is a sequence, and the return value of this
        function is a concantinated list of all listed option setting values.
        """
        setting_list = self.__get_setting_list(Name)
        if setting_list is None: return None

        info = mode_option_info_db[Name]
        if info.content_is_list():
            result = flatten_list_of_lists(
                x.value for x in setting_list
            )
        else:
            result = [ x.value for x in setting_list ]

        return result
コード例 #11
0
def do_leafs(TheState, CmdTree, done_set):
    """Create code starting from the 'leafs' of the command tree. The leafs are 
    the entry points from other states, i.e. the 'doors'.

    RETURNS: List of strings.
    """
    outer_door_id_set = TheState.entry.door_id_set()

    txt_list = []
    for door_id in outer_door_id_set:
        if door_id in done_set: continue
        branch_txt = do_from_leaf_to_root(TheState, CmdTree, door_id, done_set)
        txt_list.append(branch_txt)

    # Flatten the list of lists, where the longest list has to come last.
    result = flatten_list_of_lists(sorted(txt_list, key=lambda x: len(x)))
    return result
コード例 #12
0
ファイル: entry.py プロジェクト: dkopecek/amplify
def do_leafs(TheState, CmdTree, done_set):
    """Create code starting from the 'leafs' of the command tree. The leafs are 
    the entry points from other states, i.e. the 'doors'.

    RETURNS: List of strings.
    """
    outer_door_id_set = TheState.entry.door_id_set()

    txt_list = []
    for door_id in outer_door_id_set:
        if door_id in done_set: continue
        branch_txt = do_from_leaf_to_root(TheState, CmdTree, door_id, done_set)
        txt_list.append(branch_txt)

    # Flatten the list of lists, where the longest list has to come last.
    result = flatten_list_of_lists(sorted(txt_list, key=lambda x: len(x)))
    return result
コード例 #13
0
    def get_tree_text(self, CommandAliasDb, Node=None, Depth=0):
        """__dive: indicate recursion. May be solved by 'TreeWalker'.
        """
        if Node is None:
            Node = self.root

        txt = flatten_list_of_lists(
            self.get_tree_text(CommandAliasDb, self.door_db[door_id], Depth +
                               1) for door_id in sorted(Node.child_set))

        txt.extend([
            "    " * (Depth + 1), ".--",
            str(Node.door_id),
            " [%s]\n" % ("".join("%s " % CommandAliasDb[cmd]
                                 for cmd in Node.command_list)).strip()
        ])
        return txt
コード例 #14
0
ファイル: tree.py プロジェクト: dkopecek/amplify
    def get_tree_text(self, CommandAliasDb, Node=None, Depth=0):
        """__dive: indicate recursion. May be solved by 'TreeWalker'.
        """
        if Node is None: 
            Node = self.root

        txt = flatten_list_of_lists(
            self.get_tree_text(CommandAliasDb, self.door_db[door_id], Depth+1)
            for door_id in sorted(Node.child_set)
        )

        txt.extend([
            "    " * (Depth + 1), 
            ".--", 
            str(Node.door_id), 
            " [%s]\n" % ("".join("%s " % CommandAliasDb[cmd] for cmd in Node.command_list)).strip()
        ])
        return txt
コード例 #15
0
ファイル: entry.py プロジェクト: dkopecek/amplify
def do_from_leaf_to_root(TheState, CmdTree, LeafDoorId, done_set, GlobalEntryF=False):
    """Code the sequence from a leaf of the command tree to its root. This
    avoids unnecessary gotos from outer nodes to their parents. It stops,
    whenever a parent is already implemented.  Then, the function 'code()'
    automatically inserts a 'goto parent' at the end of the node.

    RETURNS: list of strings 
    
    The list of string implements nodes from a command tree leaf over all of
    its parents to the root, or the first already implemented parent.
    """
    txt = []
    if not GlobalEntryF:
        # When the entry is a global entry into the analyzer, then it is slipped
        # into at function begin. => no 'assert unreachable'! Else, yes!
        txt.append("\n\n    %s\n" % Lng.UNREACHABLE)

    txt.extend( 
        flatten_list_of_lists(
            __code(node, TheState, done_set, GlobalEntryF)
            for node in CmdTree.iterable_to_root(LeafDoorId, done_set)
        )
    )
    return txt
コード例 #16
0
ファイル: transformation.py プロジェクト: yifsun/amplify
def do_sequence(Sequence, TrafoInfo=None, fh=-1):
    if TrafoInfo is None:
        TrafoInfo = Setup.buffer_codec

    return flatten_list_of_lists(
        do_character(x, TrafoInfo, fh) for x in Sequence)
コード例 #17
0
ファイル: paths_to_state.py プロジェクト: dkopecek/amplify
 def accepting_state_index_list(self):
     return flatten_list_of_lists(
         (x.accepting_state_index for x in acceptance_sequence)
         for acceptance_sequence in self.__list
     )
コード例 #18
0
ファイル: range.py プロジェクト: yifsun/amplify
def TRY_terminal_delimiter_sequence(Mode, UnicodeSequence, UnicodeEndSequencePattern, UponReloadDoneAdr):
    UnicodeEndSequencePattern.prepare_count_info(Mode.counter_db, 
                                                 Setup.buffer_codec)

    # Trasform letter by letter.
    sequence = flatten_list_of_lists(
        transformation.do_character(x, Setup.buffer_codec)
        for x in UnicodeSequence
    )

    EndSequenceChunkN = len(sequence)

    # Column and line number count for closing delimiter
    run_time_counting_required_f, counter_txt = \
            counter_for_pattern.get(UnicodeEndSequencePattern, ShiftF=False)
    # The Closer Delimiter must be a string. As such it has a pre-determined size.
    assert not run_time_counting_required_f 

    # Column and line number count for 'normal' character.
    tm, column_counter_per_chunk = \
            counter.get_XXX_counter_map(Mode.counter_db, "me->buffer._input_p", 
                                    Trafo=Setup.buffer_codec)

    dummy, character_count_txt, dummy = \
            counter.get_core_step(tm, "me->buffer._input_p")


    txt = []
    for i, x in enumerate(sequence):
        txt.append(i)
        txt.append(Lng.IF_INPUT("==", "0x%X" % x, FirstF=True)) # Opening the 'if'
        txt.append(i+1)
        txt.append("%s\n" % Lng.INPUT_P_INCREMENT())

    Lng.INDENT(counter_txt, i+1)
    if column_counter_per_chunk:
        txt.append(i+1)
        if column_counter_per_chunk == UnicodeEndSequencePattern.count_info().column_n_increment_by_lexeme_length:
            txt += Lng.REEFERENCE_P_COLUMN_ADD("me->buffer._input_p", 
                                              column_counter_per_chunk) 
        else:
            txt += Lng.REEFERENCE_P_COLUMN_ADD("(me->buffer._input_p - %i)" % EndSequenceChunkN, 
                                              column_counter_per_chunk) 
            txt.append(i+1)
            txt.extend(counter_txt)
    txt.append(i+1)
    txt.append("break;\n")

    for i, x in r_enumerate(sequence):
        txt.append(i)
        txt.append("%s"   % Lng.IF_INPUT("==", "0x%X" % Setup.buffer_limit_code, FirstF=False)) # Check BLC
        txt.append(i+1)
        txt.append("%s\n" % Lng.LEXEME_START_SET("me->buffer._input_p - %i" % i))
        txt.append(i+1)
        txt.append("%s\n" % Lng.GOTO_RELOAD(UponReloadDoneAdr, True, engine.FORWARD))  # Reload
        if i == 0: break
        txt.append(i)
        txt.append("%s"   % Lng.ELSE)
        txt.append(i+1)
        txt.append("%s\n" % Lng.INPUT_P_ADD(- i))
        txt.append(i)
        txt.append("%s\n" % Lng.END_IF())

    txt.append(i)
    txt.append("%s\n" % Lng.END_IF())

    txt.extend(character_count_txt)

    # print "##DEBUG:\n%s" % "".join(Lng.GET_PLAIN_STRINGS(txt))
    return txt
コード例 #19
0
ファイル: paths_to_state.py プロジェクト: yifsun/amplify
 def accepting_state_index_list(self):
     return flatten_list_of_lists((x.accepting_state_index
                                   for x in acceptance_sequence)
                                  for acceptance_sequence in self.__list)