示例#1
0
 def testTokenizeStringWithWhitespace(self):
     tokens = list(vdsmapi.tokenize("'s1 s2'"))
     self.assertEqual(tokens, ['s1 s2'])
示例#2
0
 def testTokenizeString(self):
     tokens = list(vdsmapi.tokenize("'string'"))
     self.assertEqual(tokens, ['string'])
示例#3
0
 def testTokenizeString(self):
     tokens = list(vdsmapi.tokenize("'string'"))
     self.assertEqual(tokens, ['string'])
示例#4
0
 def testTokenizeSkipWhitespaceBetweenTokens(self):
     tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
示例#5
0
 def testTokenizeRaiseOnNull(self):
     generator = vdsmapi.tokenize("null")
     self.assertRaises(ValueError, list, generator)
示例#6
0
 def testTokenizeArrayEmpty(self):
     tokens = list(vdsmapi.tokenize("[]"))
     self.assertEqual(tokens, ['[', ']'])
示例#7
0
 def testTokenizeObjectEmpty(self):
     tokens = list(vdsmapi.tokenize("{}"))
     self.assertEqual(tokens, ['{', '}'])
示例#8
0
 def testTokenizeObjectEmpty(self):
     tokens = list(vdsmapi.tokenize("{}"))
     self.assertEqual(tokens, ['{', '}'])
示例#9
0
 def testTokenizeMixed(self):
     tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}"))
     self.assertEqual(tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']',
                      '}', '}'])
示例#10
0
 def testTokenizeArrayEmpty(self):
     tokens = list(vdsmapi.tokenize("[]"))
     self.assertEqual(tokens, ['[', ']'])
示例#11
0
 def testTokenizeObject(self):
     tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
示例#12
0
 def testTokenizeArray(self):
     tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
     self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
示例#13
0
 def testTokenizeStringEmpty(self):
     tokens = list(vdsmapi.tokenize("''"))
     self.assertEqual(tokens, [''])
示例#14
0
 def testTokenizeStringWithWhitespace(self):
     tokens = list(vdsmapi.tokenize("'s1 s2'"))
     self.assertEqual(tokens, ['s1 s2'])
示例#15
0
 def testTokenizeStringEmpty(self):
     tokens = list(vdsmapi.tokenize("''"))
     self.assertEqual(tokens, [''])
示例#16
0
 def testTokenizeSkipWhitespaceBetweenTokens(self):
     tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
示例#17
0
 def testTokenizeArray(self):
     tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
     self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
示例#18
0
 def testTokenizeRaiseOnFalse(self):
     generator = vdsmapi.tokenize("false")
     self.assertRaises(ValueError, list, generator)
示例#19
0
 def testTokenizeObject(self):
     tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
示例#20
0
 def testTokenizeRaiseOnNull(self):
     generator = vdsmapi.tokenize("null")
     self.assertRaises(ValueError, list, generator)
示例#21
0
 def testTokenizeMixed(self):
     tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}"))
     self.assertEqual(
         tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']', '}', '}'])
示例#22
0
 def testTokenizeRaiseOnInvalidData(self):
     generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}")
     self.assertRaises(ValueError, list, generator)
示例#23
0
 def testTokenizeRaiseOnFalse(self):
     generator = vdsmapi.tokenize("false")
     self.assertRaises(ValueError, list, generator)
示例#24
0
 def testTokenizeEmpty(self):
     tokens = list(vdsmapi.tokenize(''))
     self.assertEqual(tokens, [])
示例#25
0
 def testTokenizeRaiseOnInvalidData(self):
     generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}")
     self.assertRaises(ValueError, list, generator)
示例#26
0
 def testTokenizeEmpty(self):
     tokens = list(vdsmapi.tokenize(''))
     self.assertEqual(tokens, [])