Пример #1
0
 def testTokenizeStringWithWhitespace(self):
     tokens = list(vdsmapi.tokenize("'s1 s2'"))
     self.assertEqual(tokens, ['s1 s2'])
Пример #2
0
 def testTokenizeString(self):
     tokens = list(vdsmapi.tokenize("'string'"))
     self.assertEqual(tokens, ['string'])
Пример #3
0
 def testTokenizeString(self):
     tokens = list(vdsmapi.tokenize("'string'"))
     self.assertEqual(tokens, ['string'])
Пример #4
0
 def testTokenizeSkipWhitespaceBetweenTokens(self):
     tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Пример #5
0
 def testTokenizeRaiseOnNull(self):
     generator = vdsmapi.tokenize("null")
     self.assertRaises(ValueError, list, generator)
Пример #6
0
 def testTokenizeArrayEmpty(self):
     tokens = list(vdsmapi.tokenize("[]"))
     self.assertEqual(tokens, ['[', ']'])
Пример #7
0
 def testTokenizeObjectEmpty(self):
     tokens = list(vdsmapi.tokenize("{}"))
     self.assertEqual(tokens, ['{', '}'])
Пример #8
0
 def testTokenizeObjectEmpty(self):
     tokens = list(vdsmapi.tokenize("{}"))
     self.assertEqual(tokens, ['{', '}'])
Пример #9
0
 def testTokenizeMixed(self):
     tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}"))
     self.assertEqual(
         tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']', '}', '}'])
Пример #10
0
 def testTokenizeArrayEmpty(self):
     tokens = list(vdsmapi.tokenize("[]"))
     self.assertEqual(tokens, ['[', ']'])
Пример #11
0
 def testTokenizeObject(self):
     tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Пример #12
0
 def testTokenizeArray(self):
     tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
     self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
Пример #13
0
 def testTokenizeStringEmpty(self):
     tokens = list(vdsmapi.tokenize("''"))
     self.assertEqual(tokens, [''])
Пример #14
0
 def testTokenizeStringWithWhitespace(self):
     tokens = list(vdsmapi.tokenize("'s1 s2'"))
     self.assertEqual(tokens, ['s1 s2'])
Пример #15
0
 def testTokenizeStringEmpty(self):
     tokens = list(vdsmapi.tokenize("''"))
     self.assertEqual(tokens, [''])
Пример #16
0
 def testTokenizeSkipWhitespaceBetweenTokens(self):
     tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Пример #17
0
 def testTokenizeArray(self):
     tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
     self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
Пример #18
0
 def testTokenizeRaiseOnFalse(self):
     generator = vdsmapi.tokenize("false")
     self.assertRaises(ValueError, list, generator)
Пример #19
0
 def testTokenizeObject(self):
     tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Пример #20
0
 def testTokenizeRaiseOnNull(self):
     generator = vdsmapi.tokenize("null")
     self.assertRaises(ValueError, list, generator)
Пример #21
0
 def testTokenizeMixed(self):
     tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}"))
     self.assertEqual(tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']',
                      '}', '}'])
Пример #22
0
 def testTokenizeRaiseOnInvalidData(self):
     generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}")
     self.assertRaises(ValueError, list, generator)
Пример #23
0
 def testTokenizeRaiseOnFalse(self):
     generator = vdsmapi.tokenize("false")
     self.assertRaises(ValueError, list, generator)
Пример #24
0
 def testTokenizeEmpty(self):
     tokens = list(vdsmapi.tokenize(''))
     self.assertEqual(tokens, [])
Пример #25
0
 def testTokenizeRaiseOnInvalidData(self):
     generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}")
     self.assertRaises(ValueError, list, generator)
Пример #26
0
 def testTokenizeEmpty(self):
     tokens = list(vdsmapi.tokenize(''))
     self.assertEqual(tokens, [])