Example #1
0
    def _get_highlight_events(self, text):
        events = defaultdict(dict)

        for token in tokenize_rules(text):
            format = self._FORMAT_DICT.get(token.type)
            if format is not None:
                events[token.source_span.start_pos]['format'] = format
                events[token.source_span.end_pos + 1]['format'] = None

        error_span = self._get_block_level_adjusted_error_span()
        if error_span is not None:
            events[error_span.start_pos]['error'] = True
            events[error_span.end_pos + 1]['error'] = False

        events[len(text)]['format'] = None
        events[len(text)]['error'] = False

        return events
Example #2
0
 def input(self, rules_text):
     self._token_stream = tokenize_rules(rules_text)
Example #3
0
 def parse_result(self, text_input):
     return [token.test_repr() for token in tokenize_rules(text_input)]