def testConsumeIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint64)
    self.assertEqual(-1, tokenizer.ConsumeInt32())

    self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt32)
    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInt64())

    self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt64)
    self.assertEqual(int64_max + 1, tokenizer.ConsumeUint64())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 -0 0 0'
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertEqual(0, tokenizer.ConsumeUint32())
    self.assertEqual(0, tokenizer.ConsumeUint64())
    self.assertEqual(0, tokenizer.ConsumeUint32())
    self.assertEqual(0, tokenizer.ConsumeUint64())
    self.assertTrue(tokenizer.AtEnd())
Example #2
0
  def testConsumeIntegers(self):
    # This test only tests the failures in the integer parsing methods as well
    # as the '0' special cases.
    int64_max = (1 << 63) - 1
    uint32_max = (1 << 32) - 1
    text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
    tokenizer = text_format._Tokenizer(text)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint64)
    self.assertEqual(-1, tokenizer.ConsumeInt32())

    self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt32)
    self.assertEqual(uint32_max + 1, tokenizer.ConsumeInt64())

    self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt64)
    self.assertEqual(int64_max + 1, tokenizer.ConsumeUint64())
    self.assertTrue(tokenizer.AtEnd())

    text = '-0 -0 0 0'
    tokenizer = text_format._Tokenizer(text)
    self.assertEqual(0, tokenizer.ConsumeUint32())
    self.assertEqual(0, tokenizer.ConsumeUint64())
    self.assertEqual(0, tokenizer.ConsumeUint32())
    self.assertEqual(0, tokenizer.ConsumeUint64())
    self.assertTrue(tokenizer.AtEnd())
Example #3
0
    def testSimpleTokenCases(self):
        text = ('identifier1:"string1"\n     \n\n'
                'identifier2 : \n \n123  \n  identifier3 :\'string\'\n'
                'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n'
                'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n'
                'ID9: 22 ID10: -111111111111111111 ID11: -22\n'
                'ID12: 2222222222222222222 '
                'false_bool:  0 true_BOOL:t \n true_bool1:  1 false_BOOL1:f ')
        tokenizer = text_format._Tokenizer(text)
        methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), ':',
                   (tokenizer.ConsumeString, 'string1'),
                   (tokenizer.ConsumeIdentifier, 'identifier2'), ':',
                   (tokenizer.ConsumeInt32, 123),
                   (tokenizer.ConsumeIdentifier, 'identifier3'), ':',
                   (tokenizer.ConsumeString, 'string'),
                   (tokenizer.ConsumeIdentifier, 'identifiER_4'), ':',
                   (tokenizer.ConsumeFloat, 1.1e+2),
                   (tokenizer.ConsumeIdentifier, 'ID5'), ':',
                   (tokenizer.ConsumeFloat, -0.23),
                   (tokenizer.ConsumeIdentifier, 'ID6'), ':',
                   (tokenizer.ConsumeString, 'aaaa\'bbbb'),
                   (tokenizer.ConsumeIdentifier, 'ID7'), ':',
                   (tokenizer.ConsumeString, 'aa\"bb'),
                   (tokenizer.ConsumeIdentifier, 'ID8'), ':', '{',
                   (tokenizer.ConsumeIdentifier, 'A'), ':',
                   (tokenizer.ConsumeFloat, text_format._INFINITY),
                   (tokenizer.ConsumeIdentifier, 'B'), ':',
                   (tokenizer.ConsumeFloat, -text_format._INFINITY),
                   (tokenizer.ConsumeIdentifier, 'C'), ':',
                   (tokenizer.ConsumeBool, True),
                   (tokenizer.ConsumeIdentifier, 'D'), ':',
                   (tokenizer.ConsumeBool, False), '}',
                   (tokenizer.ConsumeIdentifier, 'ID9'), ':',
                   (tokenizer.ConsumeUint32, 22),
                   (tokenizer.ConsumeIdentifier, 'ID10'), ':',
                   (tokenizer.ConsumeInt64, -111111111111111111),
                   (tokenizer.ConsumeIdentifier, 'ID11'), ':',
                   (tokenizer.ConsumeInt32, -22),
                   (tokenizer.ConsumeIdentifier, 'ID12'), ':',
                   (tokenizer.ConsumeUint64, 2222222222222222222),
                   (tokenizer.ConsumeIdentifier, 'false_bool'), ':',
                   (tokenizer.ConsumeBool, False),
                   (tokenizer.ConsumeIdentifier, 'true_BOOL'), ':',
                   (tokenizer.ConsumeBool, True),
                   (tokenizer.ConsumeIdentifier, 'true_bool1'), ':',
                   (tokenizer.ConsumeBool, True),
                   (tokenizer.ConsumeIdentifier, 'false_BOOL1'), ':',
                   (tokenizer.ConsumeBool, False)]

        i = 0
        while not tokenizer.AtEnd():
            m = methods[i]
            if type(m) == str:
                token = tokenizer.token
                self.assertEqual(token, m)
                tokenizer.NextToken()
            else:
                self.assertEqual(m[1], m[0]())
            i += 1
Example #4
0
  def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format._Tokenizer(text)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format._Tokenizer(text)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format._Tokenizer(text)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format._Tokenizer(text)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format._Tokenizer(text)
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
  def testConsumeByteString(self):
    text = '"string1\''
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = 'string1"'
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\xt"'
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\"'
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

    text = '\n"\\x"'
    tokenizer = text_format._Tokenizer(text.splitlines())
    self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
    def testConsumeByteString(self):
        text = "\"string1'"
        tokenizer = text_format._Tokenizer(text.splitlines())
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = 'string1"'
        tokenizer = text_format._Tokenizer(text.splitlines())
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = '\n"\\xt"'
        tokenizer = text_format._Tokenizer(text.splitlines())
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = '\n"\\"'
        tokenizer = text_format._Tokenizer(text.splitlines())
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = '\n"\\x"'
        tokenizer = text_format._Tokenizer(text.splitlines())
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
    def testConsumeByteString(self):
        text = b('"string1\'')
        tokenizer = text_format._Tokenizer(text)
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = b('string1"')
        tokenizer = text_format._Tokenizer(text)
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = b('\n"\\xt"')
        tokenizer = text_format._Tokenizer(text)
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = b('\n"\\"')
        tokenizer = text_format._Tokenizer(text)
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)

        text = b('\n"\\x"')
        tokenizer = text_format._Tokenizer(text)
        self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
Example #8
0
 def testConsumeBool(self):
   text = 'not-a-bool'
   tokenizer = text_format._Tokenizer(text)
   self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
Example #9
0
  def testSimpleTokenCases(self):
    text = ('identifier1:"string1"\n     \n\n'
            'identifier2 : \n \n123  \n  identifier3 :\'string\'\n'
            'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n'
            'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n'
            'ID9: 22 ID10: -111111111111111111 ID11: -22\n'
            'ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f '
            'false_bool:  0 true_BOOL:t \n true_bool1:  1 false_BOOL1:f ' )
    tokenizer = text_format._Tokenizer(text)
    methods = [(tokenizer.ConsumeIdentifier, 'identifier1'),
               ':',
               (tokenizer.ConsumeString, 'string1'),
               (tokenizer.ConsumeIdentifier, 'identifier2'),
               ':',
               (tokenizer.ConsumeInt32, 123),
               (tokenizer.ConsumeIdentifier, 'identifier3'),
               ':',
               (tokenizer.ConsumeString, 'string'),
               (tokenizer.ConsumeIdentifier, 'identifiER_4'),
               ':',
               (tokenizer.ConsumeFloat, 1.1e+2),
               (tokenizer.ConsumeIdentifier, 'ID5'),
               ':',
               (tokenizer.ConsumeFloat, -0.23),
               (tokenizer.ConsumeIdentifier, 'ID6'),
               ':',
               (tokenizer.ConsumeString, 'aaaa\'bbbb'),
               (tokenizer.ConsumeIdentifier, 'ID7'),
               ':',
               (tokenizer.ConsumeString, 'aa\"bb'),
               (tokenizer.ConsumeIdentifier, 'ID8'),
               ':',
               '{',
               (tokenizer.ConsumeIdentifier, 'A'),
               ':',
               (tokenizer.ConsumeFloat, float('inf')),
               (tokenizer.ConsumeIdentifier, 'B'),
               ':',
               (tokenizer.ConsumeFloat, -float('inf')),
               (tokenizer.ConsumeIdentifier, 'C'),
               ':',
               (tokenizer.ConsumeBool, True),
               (tokenizer.ConsumeIdentifier, 'D'),
               ':',
               (tokenizer.ConsumeBool, False),
               '}',
               (tokenizer.ConsumeIdentifier, 'ID9'),
               ':',
               (tokenizer.ConsumeUint32, 22),
               (tokenizer.ConsumeIdentifier, 'ID10'),
               ':',
               (tokenizer.ConsumeInt64, -111111111111111111),
               (tokenizer.ConsumeIdentifier, 'ID11'),
               ':',
               (tokenizer.ConsumeInt32, -22),
               (tokenizer.ConsumeIdentifier, 'ID12'),
               ':',
               (tokenizer.ConsumeUint64, 2222222222222222222),
               (tokenizer.ConsumeIdentifier, 'ID13'),
               ':',
               (tokenizer.ConsumeFloat, 1.23456),
               (tokenizer.ConsumeIdentifier, 'ID14'),
               ':',
               (tokenizer.ConsumeFloat, 1.2e+2),
               (tokenizer.ConsumeIdentifier, 'false_bool'),
               ':',
               (tokenizer.ConsumeBool, False),
               (tokenizer.ConsumeIdentifier, 'true_BOOL'),
               ':',
               (tokenizer.ConsumeBool, True),
               (tokenizer.ConsumeIdentifier, 'true_bool1'),
               ':',
               (tokenizer.ConsumeBool, True),
               (tokenizer.ConsumeIdentifier, 'false_BOOL1'),
               ':',
               (tokenizer.ConsumeBool, False)]

    i = 0
    while not tokenizer.AtEnd():
      m = methods[i]
      if type(m) == str:
        token = tokenizer.token
        self.assertEqual(token, m)
        tokenizer.NextToken()
      else:
        self.assertEqual(m[1], m[0]())
      i += 1
 def testConsumeBool(self):
   text = 'not-a-bool'
   tokenizer = text_format._Tokenizer(text.splitlines())
   self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
    def testSimpleTokenCases(self):
        text = (
            'identifier1:"string1"\n     \n\n'
            "identifier2 : \n \n123  \n  identifier3 :'string'\n"
            "identifiER_4 : 1.1e+2 ID5:-0.23 ID6:'aaaa\\'bbbb'\n"
            'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n'
            "ID9: 22 ID10: -111111111111111111 ID11: -22\n"
            "ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f "
            "false_bool:  0 true_BOOL:t \n true_bool1:  1 false_BOOL1:f "
        )
        tokenizer = text_format._Tokenizer(text)
        methods = [
            (tokenizer.ConsumeIdentifier, "identifier1"),
            ":",
            (tokenizer.ConsumeString, "string1"),
            (tokenizer.ConsumeIdentifier, "identifier2"),
            ":",
            (tokenizer.ConsumeInt32, 123),
            (tokenizer.ConsumeIdentifier, "identifier3"),
            ":",
            (tokenizer.ConsumeString, "string"),
            (tokenizer.ConsumeIdentifier, "identifiER_4"),
            ":",
            (tokenizer.ConsumeFloat, 1.1e2),
            (tokenizer.ConsumeIdentifier, "ID5"),
            ":",
            (tokenizer.ConsumeFloat, -0.23),
            (tokenizer.ConsumeIdentifier, "ID6"),
            ":",
            (tokenizer.ConsumeString, "aaaa'bbbb"),
            (tokenizer.ConsumeIdentifier, "ID7"),
            ":",
            (tokenizer.ConsumeString, 'aa"bb'),
            (tokenizer.ConsumeIdentifier, "ID8"),
            ":",
            "{",
            (tokenizer.ConsumeIdentifier, "A"),
            ":",
            (tokenizer.ConsumeFloat, float("inf")),
            (tokenizer.ConsumeIdentifier, "B"),
            ":",
            (tokenizer.ConsumeFloat, -float("inf")),
            (tokenizer.ConsumeIdentifier, "C"),
            ":",
            (tokenizer.ConsumeBool, True),
            (tokenizer.ConsumeIdentifier, "D"),
            ":",
            (tokenizer.ConsumeBool, False),
            "}",
            (tokenizer.ConsumeIdentifier, "ID9"),
            ":",
            (tokenizer.ConsumeUint32, 22),
            (tokenizer.ConsumeIdentifier, "ID10"),
            ":",
            (tokenizer.ConsumeInt64, -111111111111111111),
            (tokenizer.ConsumeIdentifier, "ID11"),
            ":",
            (tokenizer.ConsumeInt32, -22),
            (tokenizer.ConsumeIdentifier, "ID12"),
            ":",
            (tokenizer.ConsumeUint64, 2222222222222222222),
            (tokenizer.ConsumeIdentifier, "ID13"),
            ":",
            (tokenizer.ConsumeFloat, 1.23456),
            (tokenizer.ConsumeIdentifier, "ID14"),
            ":",
            (tokenizer.ConsumeFloat, 1.2e2),
            (tokenizer.ConsumeIdentifier, "false_bool"),
            ":",
            (tokenizer.ConsumeBool, False),
            (tokenizer.ConsumeIdentifier, "true_BOOL"),
            ":",
            (tokenizer.ConsumeBool, True),
            (tokenizer.ConsumeIdentifier, "true_bool1"),
            ":",
            (tokenizer.ConsumeBool, True),
            (tokenizer.ConsumeIdentifier, "false_BOOL1"),
            ":",
            (tokenizer.ConsumeBool, False),
        ]

        i = 0
        while not tokenizer.AtEnd():
            m = methods[i]
            if type(m) == str:
                token = tokenizer.token
                self.assertEqual(token, m)
                tokenizer.NextToken()
            else:
                self.assertEqual(m[1], m[0]())
            i += 1
 def testConsumeBool(self):
     text = "not-a-bool"
     tokenizer = text_format._Tokenizer(text.splitlines())
     self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
 def testConsumeBool(self):
     text = b('not-a-bool')
     tokenizer = text_format._Tokenizer(text)
     self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)