Example #1
0
    def expand_first(self, seq):
        head = ''

        keys, mapped_to = self._find_full_match(self.state.mode, seq)
        if keys:
            self.state.logger.info(
                "[Mappings] found full command: {0} -> {1}".format(
                    keys, mapped_to))
            return Mapping(seq, mapped_to['name'], seq[len(keys):],
                           mapping_status.COMPLETE)

        for key in KeySequenceTokenizer(seq).iter_tokenize():
            head += key
            keys, mapped_to = self._find_full_match(self.state.mode, head)
            if keys:
                self.state.logger.info(
                    "[Mappings] found full command: {0} -> {1}".format(
                        keys, mapped_to))
                return Mapping(head, mapped_to['name'], seq[len(head):],
                               mapping_status.COMPLETE)
            else:
                break

        if self._find_partial_match(self.state.mode, seq):
            self.state.logger.info(
                "[Mappings] found partial command: {0}".format(seq))
            return Mapping(seq, '', '', mapping_status.INCOMPLETE)

        return None
Example #2
0
 def parse(self, input_):
     tokenizer = KeySequenceTokenizer(input_)
     return list(tokenizer.iter_tokenize())
Example #3
0
 def parse(self, input_):
     tokenizer = KeySequenceTokenizer(input_)
     return tokenizer.tokenize_one()
Example #4
0
 def parse(self, input_):
     tokenizer = KeySequenceTokenizer(input_)
     return list(tokenizer.iter_tokenize())
Example #5
0
 def parse(self, input_):
     tokenizer = KeySequenceTokenizer(input_)
     return tokenizer.tokenize_one()