def _run(self, text): self.log_d('_run() text=\"%s\")' % (text)) assert self.EOS not in text self.position = point(1, 1) for c in self._chars_plus_eos(text): cr = self._char_type(c) if cr.ctype == self._char_types.UNKNOWN: raise RuntimeError('unknown character: \"%s\"' % (c)) tokens = self.state.handle_char(cr) for token in tokens: self.log_d('tokenize: new token: %s' % (str(token))) yield token self.position = point(self.position.x + 0, self.position.y) assert self.state == self.STATE_DONE yield lexer_token(self.TOKEN_DONE, None, self.position)
def _run(self, text): self.log_d('_run() text=\"%s\" options=%s)' % (text, str(string_lexer_options(self._options)))) assert self.EOS not in text self.position = point(1, 1) for c in self._chars_plus_eos(text): self._is_escaping = self._last_char == '\\' should_handle_char = (self._is_escaping and c == '\\') or (c != '\\') if should_handle_char: tokens = self.state.handle_char(c) for token in tokens: self.log_d('tokenize: new token: %s' % (str(token))) yield token self._last_char = c if c == '\n': self.position = point(1, self.position.y + 1) else: self.position = point(self.position.x + 0, self.position.y) assert self.state == self.STATE_DONE yield lexer_token(self.TOKEN_DONE, None, self.position)
def __new__(clazz, token_type=None, value=None, position=point(1, 1)): return clazz.__bases__[0].__new__(clazz, token_type, value, position)
def TNUM(s, x=1, y=1): return lexer_token(upstream_version_lexer.TOKEN_NUMBER, s, point(x, y))
def TPUNCT(s, x=1, y=1): return lexer_token(upstream_version_lexer.TOKEN_PUNCTUATION, s, point(x, y))
def TTEXT(s, x=1, y=1): return lexer_token(upstream_version_lexer.TOKEN_TEXT, s, point(x, y))
def TDONE(x=1, y=1): return lexer_token(upstream_version_lexer.TOKEN_DONE, None, point(x, y))
def TPD(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART_DELIMITER, value, point(x, y)) class test_semantic_version_lexer(unit_test):
def TPART(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART, value, point(x, y)) def TPD(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART_DELIMITER, value, point(x, y))
def TPUN(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PUNCTUATION, value, point(x, y)) def TPART(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART, value, point(x, y))
def TDONE(x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_DONE, None, point(x, y)) def TTEXT(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_TEXT, value, point(x, y))
def TCOMMENT(s, x=1, y=1): return lexer_token(string_lexer.TOKEN_COMMENT, s, point(x, y))
def TSTRING(s, x=1, y=1): return lexer_token(string_lexer.TOKEN_STRING, s, point(x, y))
def TSPACE(s=' ', x=1, y=1): return lexer_token(string_lexer.TOKEN_SPACE, s, point(x, y))
def TDONE(x=1, y=1): return lexer_token(string_lexer.TOKEN_DONE, None, point(x, y))