def test_sequence(self): result = [token for token in tokenize(StringIO("123 \"abc\":{}"))] self.assertEqual(result, [(2, 123), (1, 'abc'), (0, ':'), (0, '{'), (0, '}')]) # Borrowed from http://en.wikipedia.org/wiki/JSON big_file = """{ "firstName": "John", "lastName": "Smith", "isAlive": true, "isDead": false, "age": 25, "height_cm": 167.6, "address": { "streetAddress": "21 2nd Street", "city": "New York", "state": "NY", "postalCode": "10021-3100" }, "phoneNumbers": [ { "type": "home", "number": "212 555-1234" }, { "type": "office", "number": "646 555-4567" } ], "children": [], "spouse": null }""" result = [token for token in tokenize(StringIO(big_file))] expected = [(0, '{'), (1, 'firstName'), (0, ':'), (1, 'John'), (0, ','), (1, 'lastName'), (0, ':'), (1, 'Smith'), (0, ','), (1, 'isAlive'), (0, ':'), (3, True), (0, ','), (1, 'isDead'), (0, ':'), (3, False), (0, ','), (1, 'age'), (0, ':'), (2, 25), (0, ','), (1, 'height_cm'), (0, ':'), (2, 167.6), (0, ','), (1, 'address'), (0, ':'), (0, '{'), (1, 'streetAddress'), (0, ':'), (1, '21 2nd Street'), (0, ','), (1, 'city'), (0, ':'), (1, 'New York'), (0, ','), (1, 'state'), (0, ':'), (1, 'NY'), (0, ','), (1, 'postalCode'), (0, ':'), (1, '10021-3100'), (0, '}'), (0, ','), (1, 'phoneNumbers'), (0, ':'), (0, '['), (0, '{'), (1, 'type'), (0, ':'), (1, 'home'), (0, ','), (1, 'number'), (0, ':'), (1, '212 555-1234'), (0, '}'), (0, ','), (0, '{'), (1, 'type'), (0, ':'), (1, 'office'), (0, ','), (1, 'number'), (0, ':'), (1, '646 555-4567'), (0, '}'), (0, ']'), (0, ','), (1, 'children'), (0, ':'), (0, '['), (0, ']'), (0, ','), (1, 'spouse'), (0, ':'), (4, None), (0, '}')] self.assertListEqual(result, expected) big_file_no_space = '{"firstName":"John","lastName":"Smith","isAlive":true,"isDead":false,"age":25,"height_cm' \ '":167.6,"address":{"streetAddress":"21 2nd Street","city":"New York","state":"NY","posta' \ 'lCode":"10021-3100"},"phoneNumbers":[{"type":"home","number":"212 555-1234"},{"type":"of' \ 'fice","number":"646 555-4567"}],"children":[],"spouse":null}' result = [token for token in tokenize(StringIO(big_file_no_space))] self.assertListEqual(result, expected) result = [token for token in tokenize(StringIO("854.6,123"))] self.assertEqual(result, [(2, 854.6), (0, ','), (2, 123)]) self.assertRaises(ValueError, self.tokenize_sequence, "123\"text\"") self.assertRaises(ValueError, self.tokenize_sequence, "23.9e10true") self.assertRaises(ValueError, self.tokenize_sequence, "\"test\"56")
def load(fp, persistent=False): token_stream = tokenize(fp) _, token = next(token_stream) return StreamingJSONBase.factory(token, token_stream, persistent)
def visit(fp, visitor): token_stream = tokenize(fp) _, token = next(token_stream) obj = StreamingJSONBase.factory(token, token_stream, persistent=False) _visit(obj, visitor, ())
def assertStringEquals(self, expected, actual): token_list = [token for token in tokenize(StringIO('"{}"'.format(actual)))] self.assertEqual(1, len(token_list)) ttype, token = token_list[0] self.assertEqual(expected, token) self.assertEqual(ttype, TokenType.STRING)
def tokenize_sequence(self, string): return [token for token in tokenize(StringIO(string))]