Ejemplo n.º 1
0
 def test_tokensStr_01(self):
     """global function tokensStr() test multiple tokens."""
     myTokS = [
         PpToken.PpToken('f', 'identifier'),
         PpToken.PpToken('g', 'identifier'),
     ]
     self.assertEqual('fg', PpToken.tokensStr(myTokS))
Ejemplo n.º 2
0
 def test_tokensStr_02(self):
     """global function tokensStr() test single token, long form."""
     myTokS = [
         PpToken.PpToken('f', 'identifier'),
     ]
     self.assertEqual(
         #'"f", identifier, True, False, False',
         'PpToken(t="f", tt=identifier, line=True, prev=False, ?=False)',
         PpToken.tokensStr(myTokS, shortForm=False))
Ejemplo n.º 3
0
 def pprintReplacementList(self, theList):
     """Pretty prints the replacement list."""
     #print
     i = 0
     for aTtt in theList:
         # PpTokeniser.NAME_ENUM['preprocessing-op-or-punc']),
         print('%2d: %s,' \
             % (i, self.__stringiseToken(aTtt)))
         i += 1
     # TODO: This is horrible we expand the list with None's then pass them to something that thinks they are PpTokens
     print('As string:')
     print(PpToken.tokensStr(theList))
Ejemplo n.º 4
0
 def _debugTokenStream(self, thePrefix, theArg=''):
     """Writes to logging.debug() an interpretation of the token stream
     provided by theList. It will be preceded by the debugMarker value
     (if set) and that will always be cleared."""
     assert (self._enableTrace)
     if type(theArg) == list:
         # Assume list of class PpToken
         debugStr = '[%d] %s' \
             % (len(theArg), PpToken.tokensStr(theArg, shortForm=True))
     elif type(theArg) == str:
         debugStr = theArg
     elif theArg is None:
         debugStr = 'None'
     else:
         raise ExceptionMacroEnv(
             'Unknown argument type %s, %s passed to _debugTokenStream()' \
                         % (type(theArg), theArg))
     if self.debugMarker is not None:
         logging.debug(self.debugMarker)
     self.debugMarker = None
     stackPrefix = ' ' * len(traceback.extract_stack())
     logging.debug('[%2d]%s%s: %s' \
                   % (len(stackPrefix), stackPrefix, thePrefix, debugStr))
Ejemplo n.º 5
0
 def test_tokensStr_00(self):
     """global function tokensStr() test single token."""
     myTokS = [
         PpToken.PpToken('f', 'identifier'),
     ]
     self.assertEqual('f', PpToken.tokensStr(myTokS))