Ejemplo n.º 1
0
 def _run(self, text):
     self.log_d('_run() text=\"%s\")' % (text))
     assert self.EOS not in text
     self.position = point(1, 1)
     for c in self._chars_plus_eos(text):
         cr = self._char_type(c)
         if cr.ctype == self._char_types.UNKNOWN:
             raise RuntimeError('unknown character: \"%s\"' % (c))
         tokens = self.state.handle_char(cr)
         for token in tokens:
             self.log_d('tokenize: new token: %s' % (str(token)))
             yield token
         self.position = point(self.position.x + 0, self.position.y)
     assert self.state == self.STATE_DONE
     yield lexer_token(self.TOKEN_DONE, None, self.position)
Ejemplo n.º 2
0
    def _run(self, text):
        self.log_d('_run() text=\"%s\" options=%s)' %
                   (text, str(string_lexer_options(self._options))))
        assert self.EOS not in text
        self.position = point(1, 1)
        for c in self._chars_plus_eos(text):
            self._is_escaping = self._last_char == '\\'
            should_handle_char = (self._is_escaping
                                  and c == '\\') or (c != '\\')
            if should_handle_char:
                tokens = self.state.handle_char(c)
                for token in tokens:
                    self.log_d('tokenize: new token: %s' % (str(token)))
                    yield token
            self._last_char = c

            if c == '\n':
                self.position = point(1, self.position.y + 1)
            else:
                self.position = point(self.position.x + 0, self.position.y)

        assert self.state == self.STATE_DONE
        yield lexer_token(self.TOKEN_DONE, None, self.position)
Ejemplo n.º 3
0
 def __new__(clazz, token_type=None, value=None, position=point(1, 1)):
     return clazz.__bases__[0].__new__(clazz, token_type, value, position)
Ejemplo n.º 4
0
def TNUM(s, x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_NUMBER, s, point(x, y))
Ejemplo n.º 5
0
def TPUNCT(s, x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_PUNCTUATION, s,
                       point(x, y))
Ejemplo n.º 6
0
def TTEXT(s, x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_TEXT, s, point(x, y))
Ejemplo n.º 7
0
def TDONE(x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_DONE, None, point(x, y))
Ejemplo n.º 8
0
def TPD(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART_DELIMITER, value, point(x, y))

class test_semantic_version_lexer(unit_test):
Ejemplo n.º 9
0
def TPART(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART, value, point(x, y))
def TPD(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART_DELIMITER, value, point(x, y))
Ejemplo n.º 10
0
def TPUN(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PUNCTUATION, value, point(x, y))
def TPART(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART, value, point(x, y))
Ejemplo n.º 11
0
def TDONE(x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_DONE, None, point(x, y))
def TTEXT(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_TEXT, value, point(x, y))
Ejemplo n.º 12
0
def TCOMMENT(s, x=1, y=1):
    return lexer_token(string_lexer.TOKEN_COMMENT, s, point(x, y))
Ejemplo n.º 13
0
def TSTRING(s, x=1, y=1):
    return lexer_token(string_lexer.TOKEN_STRING, s, point(x, y))
Ejemplo n.º 14
0
def TSPACE(s=' ', x=1, y=1):
    return lexer_token(string_lexer.TOKEN_SPACE, s, point(x, y))
Ejemplo n.º 15
0
def TDONE(x=1, y=1):
    return lexer_token(string_lexer.TOKEN_DONE, None, point(x, y))