예제 #1
0
 def retPpTokeniser(self, theContent):
     """Returns a PpTokeniser object with the supplied content."""
     # io.StringIO expects Unicode
     if sys.version_info.major == 2:
         return PpTokeniser.PpTokeniser(
             theFileObj=io.StringIO(theContent.decode('ascii')))
     else:
         return PpTokeniser.PpTokeniser(theFileObj=io.StringIO(theContent))
예제 #2
0
 def stringToTokens(self, theString):
     """Returns a list of preprocessing tokens from a string. This can be
     used to test against expected values."""
     # io.StringIO expects Unicode
     if sys.version_info.major == 2:
         myCpp = PpTokeniser.PpTokeniser(
             theFileObj=io.StringIO(theString.decode('ascii')))
     else:
         myCpp = PpTokeniser.PpTokeniser(theFileObj=io.StringIO(theString))
     return [t_tt for t_tt in myCpp.next()]
예제 #3
0
 def testEval_Fail_00(self):
     """ConstantExpression - evaluation raises for '"x" < ==' as eval fails."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs('"x" < ==')]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertRaises(ConstantExpression.ExceptionEvaluateExpression,
                       myObj.evaluate)
예제 #4
0
 def test_00(self):
     """TestConstantExpressionLinux.test_00(): 1000000UL * 1000"""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [
         t for t in myCpp.genLexPptokenAndSeqWs('1000000UL * 1000\n')
     ]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(1000000000, myObj.evaluate())
예제 #5
0
 def testConditionalExpression_01(self):
     """ConstantExpression - Conditional expression evaluation: ((1)>(2)) ? (1) : (2)\\n"""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [
         t for t in myCpp.genLexPptokenAndSeqWs('((1)>(2)) ? (1) : (2)\n')
     ]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(2, myObj.evaluate())
예제 #6
0
 def testEval_Word_06(self):
     """ConstantExpression - evaluation of "(1 && 1) && ((0 > 0 ) || 1) && (true || false)"."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [
         t for t in myCpp.genLexPptokenAndSeqWs(
             '(1 && 1) && ((0 > 0 ) || 1) && (true || false)')
     ]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(1, myObj.evaluate())
예제 #7
0
 def __setString(self, theStr):
     """Takes a string 'identifier replacement\n' and sets the macro map.
     This uses __defien(...) so only a redefinition exception is raised."""
     myCpp = PpTokeniser.PpTokeniser(
         theFileObj=io.StringIO(theStr)
         )
     myGen = myCpp.next()
     # Set file to '' and line to 1 as these are builtin macros
     myDef = PpDefine.PpDefine(myGen, '', 1)
     self.__define(myDef)
예제 #8
0
 def testConditionalExpression_51(self):
     """ConstantExpression - Conditional expression evaluation: raises on ((&&)==(||)) ? (1) : (2)\\n"""
     myCpp = PpTokeniser.PpTokeniser()
     expression = '((&&)==(||)) ? (1) : (2)\n'
     myCe = ConstantExpression.ConstantExpression([])
     m = myCe.RE_CONDITIONAL_EXPRESSION.match(expression)
     self.assertNotEqual(None, m)
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs(expression)]
     # print()
     # print(myToksTypes)
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertRaises(ConstantExpression.ExceptionConditionalExpression, myObj.evaluate)
예제 #9
0
 def __init__(self, theFpo, theDiag):
     """Constructor
     theFpo     - A FilePathOrigin object that identifies the file.
     theDiag    - A CppDiagnostic object to give to the PpTokeniser."""
     self.fileName = theFpo.filePath
     # Create a new PpTokeniser
     self.ppt = PpTokeniser.PpTokeniser(
         theFileObj=theFpo.fileObj,
         theFileId=theFpo.filePath,
         theDiagnostic=theDiag,
     )
     self.tokenCounter = PpTokenCount.PpTokenCount()
     self.origin = theFpo.origin
예제 #10
0
파일: MacroEnv.py 프로젝트: cybort/cpip
    def __setString(self, theStr):
        """Takes a string ``'identifier replacement\\n'`` and sets the macro map.
        This uses :py:class:`__define()` so only a redefinition exception is raised.

        :param theStr: Replacement string.
        :type theStr: ``str``

        :returns: ``NoneType``
        """
        myCpp = PpTokeniser.PpTokeniser(theFileObj=io.StringIO(theStr))
        myGen = myCpp.next()
        # Set file to '' and line to 1 as these are builtin macros
        myDef = PpDefine.PpDefine(myGen, '', 1)
        self.__define(myDef)
예제 #11
0
    def __init__(self, theFpo, theDiag):
        """Constructor.

        :param theFpo: A FilePathOrigin object that identifies the file.
        :type theFpo: ``cpip.core.IncludeHandler.FilePathOrigin([_io.StringIO, str, NoneType, str]), cpip.core.IncludeHandler.FilePathOrigin([_io.TextIOWrapper, str, str, str])``

        :param theDiag: A CppDiagnostic object to give to the PpTokeniser.
        :type theDiag: ``cpip.core.CppDiagnostic.PreprocessDiagnosticStd``

        :returns: ``NoneType``
        """
        self.fileName = theFpo.filePath
        # Create a new PpTokeniser
        self.ppt = PpTokeniser.PpTokeniser(
            theFileObj=theFpo.fileObj,
            theFileId=theFpo.filePath,
            theDiagnostic=theDiag,
        )
        self.tokenCounter = PpTokenCount.PpTokenCount()
예제 #12
0
 def testEval_02(self):
     """ISO/IEC 9899:1999 (E) 6.10.1-3 - evaluation of "A == 0" is true when A not defined."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs('A == 0')]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(1, myObj.evaluate())
예제 #13
0
 def testEval_00(self):
     """ConstantExpression - evaluation of "1 < 2"."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs('1 < 2')]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(1, myObj.evaluate())
예제 #14
0
 def testCtor_01(self):
     """ConstantExpression - construction with "1 < 2"."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs('1 < 2')]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual("1 < 2", str(myObj))
예제 #15
0
 def retPpTokeniser(self, theContent):
     """Returns a PpTokeniser object with the supplied content."""
     return PpTokeniser.PpTokeniser(theFileObj=io.StringIO(theContent))
예제 #16
0
 def testEval_Word_05(self):
     """ConstantExpression - evaluation of "false"."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs('false')]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(0, myObj.evaluate())
예제 #17
0
 def testEval_11(self):
     """Evaluation of long character-literal comparison "L'A' == L'B'" is False."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs("L'A' == L'B'")]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(0, myObj.evaluate())
예제 #18
0
 def testEval_04(self):
     """Evaluation of character-literal comparison "'A' == 'A'" is True."""
     myCpp = PpTokeniser.PpTokeniser()
     myToksTypes = [t for t in myCpp.genLexPptokenAndSeqWs("'A' == 'A'")]
     myObj = ConstantExpression.ConstantExpression(myToksTypes)
     self.assertEqual(1, myObj.evaluate())
예제 #19
0
 def stringToTokens(self, theString):
     """Returns a list of preprocessing tokens from a string. This can be
     used to test against expected values."""
     myCpp = PpTokeniser.PpTokeniser(theFileObj=io.StringIO(theString))
     return [t_tt for t_tt in myCpp.next()]