Example #1
0
 def tokenize(self, s):
     lexer = shlex.shlex(minisix.io.StringIO(s))
     lexer.commenters = ''
     lexer.quotes = ''
     lexer.whitespace = ''
     lexer.separators += self.separators
     ret = []
     while True:
         token = lexer.get_token()
         if not token:
             break
         elif token == '(':
             ret.append(self._insideParens(lexer))
         else:
             ret.append(token)
     return ''.join(ret)
Example #2
0
 def tokenize(self, s):
     lexer = shlex.shlex(StringIO(s))
     lexer.commenters = ''
     lexer.quotes = ''
     lexer.whitespace = ''
     lexer.separators += self.separators
     ret = []
     while True:
         token = lexer.get_token()
         if not token:
             break
         elif token == '(':
             ret.append(self._insideParens(lexer))
         else:
             ret.append(token)
     return ''.join(ret)