Ejemplo n.º 1
0
 def _run(self, text):
     self.log_d('_run() text=\"%s\")' % (text))
     assert self.EOS not in text
     self.position = point(1, 1)
     for c in self._chars_plus_eos(text):
         cr = self._char_type(c)
         if cr.ctype == self._char_types.UNKNOWN:
             raise RuntimeError('unknown character: \"%s\"' % (c))
         tokens = self.state.handle_char(cr)
         for token in tokens:
             self.log_d('tokenize: new token: %s' % (str(token)))
             yield token
         self.position = point(self.position.x + 0, self.position.y)
     assert self.state == self.STATE_DONE
     yield lexer_token(self.TOKEN_DONE, None, self.position)
Ejemplo n.º 2
0
def TPUNCT(s, x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_PUNCTUATION, s,
                       point(x, y))
Ejemplo n.º 3
0
def TTEXT(s, x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_TEXT, s, point(x, y))
Ejemplo n.º 4
0
def TDONE(x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_DONE, None, point(x, y))
Ejemplo n.º 5
0
def TSPACE(s=' ', x=1, y=1):
    return lexer_token(string_lexer.TOKEN_SPACE, s, point(x, y))
Ejemplo n.º 6
0
 def make_token_number(self):
     return lexer_token(self.TOKEN_NUMBER, int(self.buffer_value()),
                        self.position)
Ejemplo n.º 7
0
def TDONE(x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_DONE, None, point(x, y))
def TTEXT(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_TEXT, value, point(x, y))
Ejemplo n.º 8
0
 def assertEqual(self, expected, actual):
   assert isinstance(expected, list)
   expected = [ lexer_token(*t) for t in expected ]
   super(test_sentence_lexer, self).assertEqual(expected, actual)
Ejemplo n.º 9
0
 def make_token_part(self):
   value = self.buffer_value()
   offset = len(value) - 1
   position = self.position.clone(mutations = { 'x': self.position.x - offset })
   return lexer_token(self.TOKEN_PART, int(value), position)
Ejemplo n.º 10
0
 def make_token_punctuation(self):
   value = self.buffer_value()
   offset = len(value) - 1
   position = self.position.clone(mutations = { 'x': self.position.x - offset })
   return lexer_token(self.TOKEN_PUNCTUATION, value, position)
Ejemplo n.º 11
0
 def make_token_part_delimiter(self):
   return lexer_token(self.TOKEN_PART_DELIMITER, self.buffer_value(), self.position)
Ejemplo n.º 12
0
def TCOMMENT(s, x=1, y=1):
    return lexer_token(string_lexer.TOKEN_COMMENT, s, point(x, y))
Ejemplo n.º 13
0
def TSTRING(s, x=1, y=1):
    return lexer_token(string_lexer.TOKEN_STRING, s, point(x, y))
Ejemplo n.º 14
0
def TNUM(s, x=1, y=1):
    return lexer_token(upstream_version_lexer.TOKEN_NUMBER, s, point(x, y))
Ejemplo n.º 15
0
def TPUN(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PUNCTUATION, value, point(x, y))
def TPART(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART, value, point(x, y))
Ejemplo n.º 16
0
 def assertEqual(self, expected, actual):
     assert isinstance(expected, list)
     expected = [lexer_token(*t) for t in expected]
     super(test_upstream_version_lexer, self).assertEqual(expected, actual)
Ejemplo n.º 17
0
def TPART(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART, value, point(x, y))
def TPD(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART_DELIMITER, value, point(x, y))
Ejemplo n.º 18
0
def TPUNCT(s, x = 1, y = 1): return lexer_token(L.TOKEN_PUNCTUATION, s, (x, y))

class test_sentence_lexer(unit_test):
Ejemplo n.º 19
0
def TPD(value, x = 0, y = 0): return lexer_token(semantic_version_lexer.TOKEN_PART_DELIMITER, value, point(x, y))

class test_semantic_version_lexer(unit_test):
Ejemplo n.º 20
0
 def make_token_text(self):
     return lexer_token(self.TOKEN_TEXT, self.buffer_value(), self.position)
Ejemplo n.º 21
0
 def make_token_delimiter(self):
     return lexer_token(self.TOKEN_DELIMITER, self.delimiter, self.position)
Ejemplo n.º 22
0
 def make_token_punctuation(self):
     return lexer_token(self.TOKEN_PUNCTUATION, self.buffer_value(),
                        self.position)
Ejemplo n.º 23
0
def TDONE(x=1, y=1):
    return lexer_token(string_lexer.TOKEN_DONE, None, point(x, y))