Beispiel #1
0
 def testTokenizeStringWithWhitespace(self):
     tokens = list(vdsmapi.tokenize("'s1 s2'"))
     self.assertEqual(tokens, ['s1 s2'])
Beispiel #2
0
 def testTokenizeString(self):
     tokens = list(vdsmapi.tokenize("'string'"))
     self.assertEqual(tokens, ['string'])
Beispiel #3
0
 def testTokenizeString(self):
     tokens = list(vdsmapi.tokenize("'string'"))
     self.assertEqual(tokens, ['string'])
Beispiel #4
0
 def testTokenizeSkipWhitespaceBetweenTokens(self):
     tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Beispiel #5
0
 def testTokenizeRaiseOnNull(self):
     generator = vdsmapi.tokenize("null")
     self.assertRaises(ValueError, list, generator)
Beispiel #6
0
 def testTokenizeArrayEmpty(self):
     tokens = list(vdsmapi.tokenize("[]"))
     self.assertEqual(tokens, ['[', ']'])
Beispiel #7
0
 def testTokenizeObjectEmpty(self):
     tokens = list(vdsmapi.tokenize("{}"))
     self.assertEqual(tokens, ['{', '}'])
Beispiel #8
0
 def testTokenizeObjectEmpty(self):
     tokens = list(vdsmapi.tokenize("{}"))
     self.assertEqual(tokens, ['{', '}'])
Beispiel #9
0
 def testTokenizeMixed(self):
     tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}"))
     self.assertEqual(tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']',
                      '}', '}'])
Beispiel #10
0
 def testTokenizeArrayEmpty(self):
     tokens = list(vdsmapi.tokenize("[]"))
     self.assertEqual(tokens, ['[', ']'])
Beispiel #11
0
 def testTokenizeObject(self):
     tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Beispiel #12
0
 def testTokenizeArray(self):
     tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
     self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
Beispiel #13
0
 def testTokenizeStringEmpty(self):
     tokens = list(vdsmapi.tokenize("''"))
     self.assertEqual(tokens, [''])
Beispiel #14
0
 def testTokenizeStringWithWhitespace(self):
     tokens = list(vdsmapi.tokenize("'s1 s2'"))
     self.assertEqual(tokens, ['s1 s2'])
Beispiel #15
0
 def testTokenizeStringEmpty(self):
     tokens = list(vdsmapi.tokenize("''"))
     self.assertEqual(tokens, [''])
Beispiel #16
0
 def testTokenizeSkipWhitespaceBetweenTokens(self):
     tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Beispiel #17
0
 def testTokenizeArray(self):
     tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
     self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
Beispiel #18
0
 def testTokenizeRaiseOnFalse(self):
     generator = vdsmapi.tokenize("false")
     self.assertRaises(ValueError, list, generator)
Beispiel #19
0
 def testTokenizeObject(self):
     tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}"))
     self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
Beispiel #20
0
 def testTokenizeRaiseOnNull(self):
     generator = vdsmapi.tokenize("null")
     self.assertRaises(ValueError, list, generator)
Beispiel #21
0
 def testTokenizeMixed(self):
     tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}"))
     self.assertEqual(
         tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']', '}', '}'])
Beispiel #22
0
 def testTokenizeRaiseOnInvalidData(self):
     generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}")
     self.assertRaises(ValueError, list, generator)
Beispiel #23
0
 def testTokenizeRaiseOnFalse(self):
     generator = vdsmapi.tokenize("false")
     self.assertRaises(ValueError, list, generator)
Beispiel #24
0
 def testTokenizeEmpty(self):
     tokens = list(vdsmapi.tokenize(''))
     self.assertEqual(tokens, [])
Beispiel #25
0
 def testTokenizeRaiseOnInvalidData(self):
     generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}")
     self.assertRaises(ValueError, list, generator)
Beispiel #26
0
 def testTokenizeEmpty(self):
     tokens = list(vdsmapi.tokenize(''))
     self.assertEqual(tokens, [])