Exemple #1
0
 def reflect(self):
     symbols = operators.keys()
     symbols.sort(key=lambda s: len(s), reverse=True)
     index = 1
     for symbol in symbols:
         def t_op(s, symbol=symbol):
             self.tokens.append(Token(type=symbol))
         t_op.__doc__ = escape(symbol)
         setattr(self, 't_zzzz_op_%.4d' % index, t_op)
         index += 1
         
     return GenericScanner.reflect(self)
Exemple #2
0
    def reflect(self):
        symbols = operators.keys()
        symbols.sort(key=lambda s: len(s), reverse=True)
        index = 1
        for symbol in symbols:

            def t_op(s, symbol=symbol):
                self.tokens.append(Token(type=symbol))

            t_op.__doc__ = escape(symbol)
            setattr(self, 't_zzzz_op_%.4d' % index, t_op)
            index += 1

        return GenericScanner.reflect(self)
Exemple #3
0
 def tokenize(self, input):
     self.tokens = []
     self.open_square_parenthesizes = []
     GenericScanner.tokenize(self, input)
     return self.tokens
Exemple #4
0
 def tokenize(self, input):
     self.tokens = []
     self.open_square_parenthesizes = []
     GenericScanner.tokenize(self, input)
     return self.tokens