def testTokenizeStringWithWhitespace(self): tokens = list(vdsmapi.tokenize("'s1 s2'")) self.assertEqual(tokens, ['s1 s2'])
def testTokenizeString(self): tokens = list(vdsmapi.tokenize("'string'")) self.assertEqual(tokens, ['string'])
def testTokenizeSkipWhitespaceBetweenTokens(self): tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' , 'c'\n\n : 'd' } \n")) self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
def testTokenizeRaiseOnNull(self): generator = vdsmapi.tokenize("null") self.assertRaises(ValueError, list, generator)
def testTokenizeArrayEmpty(self): tokens = list(vdsmapi.tokenize("[]")) self.assertEqual(tokens, ['[', ']'])
def testTokenizeObjectEmpty(self): tokens = list(vdsmapi.tokenize("{}")) self.assertEqual(tokens, ['{', '}'])
def testTokenizeMixed(self): tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}")) self.assertEqual( tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']', '}', '}'])
def testTokenizeObject(self): tokens = list(vdsmapi.tokenize("{'a': 'b', 'c': 'd'}")) self.assertEqual(tokens, ['{', 'a', ':', 'b', ',', 'c', ':', 'd', '}'])
def testTokenizeArray(self): tokens = list(vdsmapi.tokenize("['i1', 'i2']")) self.assertEqual(tokens, ['[', 'i1', ',', 'i2', ']'])
def testTokenizeStringEmpty(self): tokens = list(vdsmapi.tokenize("''")) self.assertEqual(tokens, [''])
def testTokenizeRaiseOnFalse(self): generator = vdsmapi.tokenize("false") self.assertRaises(ValueError, list, generator)
def testTokenizeMixed(self): tokens = list(vdsmapi.tokenize("{'a': {'b': ['c']}}")) self.assertEqual(tokens, ['{', 'a', ':', '{', 'b', ':', '[', 'c', ']', '}', '}'])
def testTokenizeRaiseOnInvalidData(self): generator = vdsmapi.tokenize("{'a': invalid, 'b': 'c'}") self.assertRaises(ValueError, list, generator)
def testTokenizeEmpty(self): tokens = list(vdsmapi.tokenize('')) self.assertEqual(tokens, [])