def decode_hash(self, hash, cache, as_map_key): if len(hash) != 1: h = {} for k, v in hash.items(): # crude/verbose implementation, but this is only version that # plays nice w/cache for both msgpack and json thus far. # -- e.g., we have to specify encode/decode order for key/val # -- explicitly, all implicit ordering has broken in corner # -- cases, thus these extraneous seeming assignments key = self._decode(k, cache, True) val = self._decode(v, cache, False) h[key] = val return transit_types.frozendict(h) else: key = list(hash)[0] value = hash[key] key = self._decode(key, cache, True) if isinstance(key, Tag): return self.decode_tag(key.tag, self._decode(value, cache, as_map_key)) return transit_types.frozendict({key: self._decode(value, cache, False)})
def decode_hash(self, hash, cache, as_map_key): if len(hash) != 1: h = {} for k, v in hash.items(): # crude/verbose implementation, but this is only version that # plays nice w/cache for both msgpack and json thus far. # -- e.g., we have to specify encode/decode order for key/val # -- explicitly, all implicit ordering has broken in corner # -- cases, thus these extraneous seeming assignments key = self._decode(k, cache, True) val = self._decode(v, cache, False) h[key] = val return transit_types.frozendict(h) else: key = list(hash)[0] value = hash[key] key = self._decode(key, cache, True) if isinstance(key, Tag): return self.decode_tag(key.tag, self._decode(value, cache, as_map_key)) return transit_types.frozendict( {key: self._decode(value, cache, False)})
def decode_list(self, node, cache, as_map_key): """Special case decodes map-as-array. Otherwise lists are treated as Python lists. Arguments follow the same convention as the top-level 'decode' function. """ if node: if node[0] == MAP_AS_ARR: # key must be decoded before value for caching to work. returned_dict = {} for k, v in pairs(node[1:]): key = self._decode(k, cache, True) val = self._decode(v, cache, as_map_key) returned_dict[key] = val return transit_types.frozendict(returned_dict) decoded = self._decode(node[0], cache, as_map_key) if isinstance(decoded, Tag): return self.decode_tag(decoded.tag, self._decode(node[1], cache, as_map_key)) return tuple(self._decode(x, cache, as_map_key) for x in node)
def decode_list(self, node, cache, as_map_key): """Special case decodes map-as-array. Otherwise lists are treated as Python lists. Arguments follow the same convention as the top-level 'decode' function. """ if node: if node[0] == MAP_AS_ARR: # key must be decoded before value for caching to work. returned_dict = {} for k, v in pairs(node[1:]): key = self._decode(k, cache, True) val = self._decode(v, cache, as_map_key) returned_dict[key] = val return transit_types.frozendict(returned_dict) decoded = self._decode(node[0], cache, as_map_key) if isinstance(decoded, Tag): return self.decode_tag( decoded.tag, self._decode(node[1], cache, as_map_key)) return tuple(self._decode(x, cache, as_map_key) for x in node)
def hash_of_size(n): return frozendict(izip(array_of_symbools(n), range(0, n+1)))
def test_roundtrip(self): in_data = value io = StringIO() w = Writer(io, "json") w.write(in_data) r = Reader("json") out_data = r.read(StringIO(io.getvalue())) self.assertEqual(in_data, out_data) globals()["test_" + name + "_json"] = RegressionTest regression("cache_consistency", ({"Problem?":true}, Symbol("Here"), Symbol("Here"))) regression("one_pair_frozendict", frozendict({"a":1})) regression("json_int_max", (2**53+100, 2**63+100)) regression("newline_in_string", "a\nb") regression("big_decimal", Decimal("190234710272.2394720347203642836434")) def json_int_boundary(value, expected_type): class JsonIntBoundaryTest(unittest.TestCase): def test_max_is_number(self): for protocol in ("json", "json_verbose"): io = StringIO() w = Writer(io, protocol) w.write([value]) actual_type = type(json.loads(io.getvalue())[0]) self.assertEqual(expected_type, actual_type)
UUID('d1dc64fa-da79-444b-9fa4-d4412f427289'), UUID('501a978e-3a3e-4060-b3be-1cf2bd4b1a38'), UUID('b3ba141a-a776-48e4-9fae-a28ea8571f58')) URIS = (URI(u'http://example.com'), URI(u'ftp://example.com'), URI(u'file:///path/to/file.txt'), URI(u'http://www.詹姆斯.com/')) DATES = tuple( map(lambda x: datetime.fromtimestamp(x / 1000.0, tz=dateutil.tz.tzutc()), [-6106017600000, 0, 946728000000, 1396909037000])) SET_SIMPLE = frozenset(ARRAY_SIMPLE) SET_MIXED = frozenset(ARRAY_MIXED) SET_NESTED = frozenset([SET_SIMPLE, SET_MIXED]) MAP_SIMPLE = frozendict({Keyword("a"): 1, Keyword("b"): 2, Keyword("c"): 3}) MAP_MIXED = frozendict({ Keyword("a"): 1, Keyword("b"): u"a string", Keyword("c"): true }) MAP_NESTED = frozendict({ Keyword("simple"): MAP_SIMPLE, Keyword("mixed"): MAP_MIXED }) exemplar("nil", None) exemplar("true", true) exemplar("false", false)
def from_rep(cmap): return transit_types.frozendict(pairs(cmap))
def hash_of_size(n): return frozendict(izip(array_of_symbools(n), range(0, n + 1)))
def test_roundtrip(self): in_data = value io = StringIO() w = Writer(io, "json") w.write(in_data) r = Reader("json") out_data = r.read(StringIO(io.getvalue())) self.assertEqual(in_data, out_data) globals()["test_" + name + "_json"] = RegressionTest regression("cache_consistency", ({ "Problem?": true }, Symbol("Here"), Symbol("Here"))) regression("one_pair_frozendict", frozendict({"a": 1})) regression("json_int_max", (2**53 + 100, 2**63 + 100)) regression("newline_in_string", "a\nb") regression("big_decimal", Decimal("190234710272.2394720347203642836434")) regression("dict_in_set", frozenset(frozendict({"test": "case"}))) def json_verbose_cache_bug(): class JsonVerboseCacheBug(RegressionBaseTest): """Can't rely on roundtrip behavior to test this bug, have to actually verify that both keys are written for json_verbose behavior to be correct.""" def test_key_not_cached(self): io = StringIO() w = Writer(io, "json_verbose") w.write([{'myKey1': 42}, {'myKey1': 42}])
URIS = ( URI(u'http://example.com'), URI(u'ftp://example.com'), URI(u'file:///path/to/file.txt'), URI(u'http://www.詹姆斯.com/')) DATES = tuple(map(lambda x: datetime.fromtimestamp(x/1000.0, tz=dateutil.tz.tzutc()), [-6106017600000, 0, 946728000000, 1396909037000])) SET_SIMPLE = frozenset(ARRAY_SIMPLE) SET_MIXED = frozenset(ARRAY_MIXED) SET_NESTED = frozenset([SET_SIMPLE, SET_MIXED]) MAP_SIMPLE = frozendict({Keyword("a"): 1, Keyword("b"): 2, Keyword("c"): 3}) MAP_MIXED = frozendict({Keyword("a"): 1, Keyword("b"): u"a string", Keyword("c"): true}) MAP_NESTED = frozendict({Keyword("simple"): MAP_SIMPLE, Keyword("mixed"): MAP_MIXED}) exemplar("nil", None) exemplar("true", true) exemplar("false", false) exemplar("zero", 0) exemplar("one", 1) exemplar("one_string", "hello")