def test_string_compression(): json = dumps(ordered_map, compression=3) assert json[:2] == b'\x1f\x8b' data2 = loads(json, decompression=True) assert ordered_map == data2 data3 = loads(json, decompression=None) assert ordered_map == data3
def test_order(): json = dumps(ordered_map) data2 = loads(json, preserve_order=True) assert tuple(ordered_map.keys()) == tuple(data2.keys()) reverse = OrderedDict(reversed(tuple(ordered_map.items()))) json = dumps(reverse) data3 = loads(json, preserve_order=True) assert tuple(reverse.keys()) == tuple(data3.keys())
def with_nondict_hook(): """ Add a custom hook, to test that all future hooks handle non-dicts. """ # Prevent issue 26 from coming back. def test_hook(dct): if not isinstance(dct, dict): return return ValueError() loads('{"key": 42}', extra_obj_pairs_hooks=(test_hook,))
def test_cls_instance_default(): json = dumps(cls_instance) back = loads(json) assert (cls_instance.s == back.s) assert (cls_instance.dct == dict(back.dct)) json = dumps(cls_instance, primitives=True) back = loads(json) assert tuple(sorted(back.keys())) == ('dct', 's',) assert '7' in back['dct']
def test_set(): setdata = [{'set': set((3, exp(1), (-5, +7), False))}] json = dumps(setdata) back = loads(json) assert isinstance(back[0]['set'], set) assert setdata == back json = dumps(setdata, primitives=True) back = loads(json) assert isinstance(back[0]['set'], list) assert setdata[0]['set'] == set(tuple(q) if isinstance(q, list) else q for q in back[0]['set'])
def test_str_unicode_bytes(): text, pyrepr = u'{"mykey": "你好"}', {"mykey": u"你好"} assert loads(text) == pyrepr if is_py3: with raises(TypeError) as err: loads(text.encode('utf-8')) assert 'Cannot automatically encode' in str(err) assert loads(text.encode('utf-8'), conv_str_byte=True) == pyrepr else: assert loads('{"mykey": "nihao"}') == {'mykey': 'nihao'}
def test_cls_instance_inheritance(): inst = SubClass() json = dumps(inst) assert '42' not in json back = loads(json) assert inst == back inst.set_attr() json = dumps(inst) assert '42' in json back = loads(json) assert inst == back
def test_special_nr_parsing(): nr_li_json = '[1, 3.14]' res = loads(nr_li_json, parse_int=lambda s: int('7' + s), parse_float=lambda s: float('5' + s)) assert res == [71, 53.14], 'Special integer and/or float parsing not working' nr_li_json = '[1, 3.14]' res = loads(nr_li_json, parse_int=Decimal, parse_float=Decimal) assert isinstance(res[0], Decimal) assert isinstance(res[1], Decimal)
def test_order(): json = dumps(ordered_map) data2 = loads(json, preserve_order=True) assert tuple(ordered_map.keys()) == tuple(data2.keys()) reverse = OrderedDict(reversed(tuple(ordered_map.items()))) json = dumps(reverse) data3 = loads(json, preserve_order=True) assert tuple(reverse.keys()) == tuple(data3.keys()) json = dumps(ordered_map) data4 = loads(json, preserve_order=False) assert not isinstance(data4, OrderedDict)
def test_compression_with_comments(): if is_py3: test_json = bytes(test_json_with_comments, encoding=ENCODING) else: test_json = test_json_with_comments json = gzip_compress(test_json, compresslevel=9) ref = loads(test_json_without_comments) data2 = loads(json, decompression=True) assert ref == data2 data3 = loads(json, decompression=None) assert ref == data3
def test_cls_instance_custom(): json = dumps(cls_instance_custom) back = loads(json) assert (cls_instance_custom.relevant == back.relevant) assert (cls_instance_custom.irrelevant == 37) assert (back.irrelevant == 12) json = dumps(cls_instance_custom, primitives=True) back = loads(json) assert (cls_instance_custom.relevant == back['relevant']) assert (cls_instance_custom.irrelevant == 37) assert 'irrelevant' not in back
def test_naive_date_time(): json = dumps(DTOBJ) back = loads(json) assert DTOBJ == back for orig, bck in zip(DTOBJ, back): assert orig == bck assert type(orig) == type(bck) txt = '{"__datetime__": null, "year": 1988, "month": 3, "day": 15, "hour": 8, "minute": 3, ' \ '"second": 59, "microsecond": 7}' obj = loads(txt) assert obj == datetime(year=1988, month=3, day=15, hour=8, minute=3, second=59, microsecond=7)
def test_lambda_partial(): """ Test that a custom encoder/decoder works when wrapped in functools.partial, which caused problems before because inspect.getargspec does not support it. """ obj = dict(alpha=37.42, beta=[1, 2, 4, 8, 16, 32]) enc_dec_lambda = partial(lambda x, y: x, y=0) txt = dumps(obj, extra_obj_encoders=(enc_dec_lambda,)) back = loads(txt, extra_obj_pairs_hooks=(enc_dec_lambda,)) assert obj == back def enc_dec_fun(obj, primitives=False, another=True): return obj txt = dumps(obj, extra_obj_encoders=(partial(enc_dec_fun, another=True),)) back = loads(txt, extra_obj_pairs_hooks=(partial(enc_dec_fun, another=True),)) assert obj == back
def test_fraction(): fractions = [Fraction(0), Fraction(1, 3), Fraction(-pi), Fraction('1/3'), Fraction('1/3') / Fraction('1/6'), Fraction('9999999999999999999999999999999999999999999999999999'), Fraction('1/12345678901234567890123456789'),] txt = dumps(fractions) res = loads(txt) for x, y in zip(fractions, res): assert isinstance(y, Fraction) assert x == y assert str(x) == str(y) txt = dumps(fractions, primitives=True) res = loads(txt) for x, y in zip(fractions, res): assert isinstance(y, float) assert abs(x - y) < 1e-10
def test_cls_lookup_map_success(): class LocalCls(object): def __init__(self, val): self.value = val original = [LocalCls(37), LocalCls(42)] txt = dumps(original) back = loads(txt, cls_lookup_map=dict(LocalCls=LocalCls)) assert len(original) == len(back) == 2 assert original[0].value == back[0].value assert original[1].value == back[1].value back = loads(txt, properties=dict(cls_lookup_map=dict(LocalCls=LocalCls))) assert len(original) == len(back) == 2 assert original[0].value == back[0].value assert original[1].value == back[1].value
def test_cls_lookup_map_fail(): class LocalCls(object): def __init__(self, val): self.value = val original = [LocalCls(37), LocalCls(42)] txt = dumps(original) with raises(ImportError) as err: loads(txt) assert 'LocalCls' in str(err.value) assert 'cls_lookup_map' in str(err.value) with raises(ImportError) as err: loads(txt, cls_lookup_map=globals()) assert 'LocalCls' in str(err.value) assert 'cls_lookup_map' in str(err.value)
def test_str_unicode_bytes(): text, pyrepr = u'{"mykey": "你好"}', {"mykey": u"你好"} assert loads(text) == pyrepr if is_py3: with raises(TypeError) as err: loads(text.encode('utf-8')) if 'ExceptionInfo' in str(type(err)): # This check is needed because the type of err varies between versions # For some reason, isinstance(..., py.code.ExceptionInfo) does not work err = err.value assert 'Cannot automatically encode' in str(err) assert loads(text.encode('utf-8'), conv_str_byte=True) == pyrepr else: assert loads('{"mykey": "nihao"}') == {'mykey': 'nihao'}
def test_compression_with_comments(): sh = BytesIO() if is_py3: test_json = bytes(test_json_with_comments, encoding=ENCODING) else: test_json = test_json_with_comments with GzipFile(mode='wb', fileobj=sh, compresslevel=9) as zh: zh.write(test_json) json = sh.getvalue() ref = loads(test_json_without_comments) data2 = loads(json, decompression=True) assert ref == data2 data3 = loads(json, decompression=None) assert ref == data3
def test_complex_number(): objs = ( 4.2 + 3.7j, 1j, 1 + 0j, -999999.9999999 - 999999.9999999j, ) for obj in objs: json = dumps(obj) back = loads(json) assert obj == back, 'json en/decoding failed for complex number {0:}'.format(obj) txt = '{"__complex__": [4.2, 3.7]}' obj = loads(txt) assert obj == 4.2 + 3.7j
def load(self, video_id=None, filename=None): subs_for_video = None bare_fn = self.get_storage_filename(video_id) if video_id else filename json_path = self.get_json_data_path(bare_fn) basename = os.path.basename(json_path) #logger.debug("Loading subtitle storage data file: %s", basename) if os.path.exists(json_path): # new style data subs_for_video = JSONStoredVideoSubtitles() try: with self.threadkit.Lock(key="sub_storage_%s" % basename): if sys.platform == "win32": try: with open(json_path, 'rb') as f: s = zlib.decompress(f.read()) except zlib.error: # fallback to old gzip win32 implementation with gzip.open(json_path, 'rb', compresslevel=6) as f: s = f.read() else: with gzip.open(json_path, 'rb', compresslevel=6) as f: s = f.read() data = loads(s) except: logger.error("Couldn't load JSON data for %s: %s", bare_fn, traceback.format_exc()) return subs_for_video.deserialize(data) data = None if not subs_for_video: return # apply possible migrations cur_ver = old_ver = subs_for_video.version if cur_ver < self.version: success = False while cur_ver < self.version: cur_ver += 1 mig_func = "migrate_v%s" % cur_ver if hasattr(self, mig_func): logger.info("Migrating subtitle storage for %s %s>%s" % (subs_for_video.video_id, old_ver, cur_ver)) success = getattr(self, mig_func)(subs_for_video) if success is False: logger.error("Couldn't migrate %s, removing data", subs_for_video.video_id) self.delete(json_path) break if cur_ver > old_ver and success: logger.info("Storing migrated subtitle storage for %s" % subs_for_video.video_id) self.save(subs_for_video) elif not success: logger.info("Migration of %s %s>%s failed" % (subs_for_video.video_id, old_ver, cur_ver)) return subs_for_video
def test_decimal_primitives(): decimals = [Decimal(0), Decimal(-pi), Decimal('9999999999999')] txt = dumps(decimals, primitives=True) res = loads(txt) for x, y in zip(decimals, res): assert isinstance(y, float) assert x == y or x.is_nan()
def test_decimal(): decimals = [Decimal(0), Decimal(-pi), Decimal('9999999999999999999999999999999999999999999999999999'), Decimal('NaN'), Decimal('Infinity'), -Decimal('Infinity'), Decimal('+0'), Decimal('-0')] txt = dumps(decimals) res = loads(txt) for x, y in zip(decimals, res): assert isinstance(y, Decimal) assert x == y or x.is_nan() assert str(x) == str(y)
def test_complex_number(): objs = ( 4.2 + 3.7j, 1j, 1 + 0j, -999999.9999999 - 999999.9999999j, ) for obj in objs: json = dumps(obj) back = loads(json) assert back == obj, 'json en/decoding failed for complex number {0:}'.format(obj) json = dumps(obj, primitives=True) back = loads(json) assert back == [obj.real, obj.imag] assert complex(*back) == obj txt = '{"__complex__": [4.2, 3.7]}' obj = loads(txt) assert obj == 4.2 + 3.7j
def test_cls_slots(): slots = [SlotsBase(), SlotsDictABC(), SlotsStr(), SlotsABCDict(), SlotsABC()] txt = dumps(slots) res = loads(txt) for inputobj, outputobj in zip(slots, res): assert isinstance(outputobj, SlotsBase) assert inputobj == outputobj referenceobj = SlotsBase() for outputobj in res[1:]: assert outputobj != referenceobj
def test_date_time(): objs = ( datetime(year=1988, month=3, day=15, hour=8, minute=3, second=59, microsecond=7), datetime(year=1988, month=3, day=15, minute=3, second=59, microsecond=7, tzinfo=pytz.UTC), datetime(year=1988, month=3, day=15, microsecond=7, tzinfo=pytz.timezone('Europe/Amsterdam')), date(year=1988, month=3, day=15), time(hour=8, minute=3, second=59, microsecond=123), time(hour=8, second=59, microsecond=123, tzinfo=pytz.timezone('Europe/Amsterdam')), timedelta(days=2, seconds=3599), timedelta(days=0, seconds=-42, microseconds=123), [{'obj': [datetime(year=1988, month=3, day=15, microsecond=7, tzinfo=pytz.timezone('Europe/Amsterdam'))]}], ) for obj in objs: json = dumps(obj) back = loads(json) assert obj == back, 'json en/decoding failed for date/time object {0:}'.format(obj) txt = '{"__datetime__": null, "year": 1988, "month": 3, "day": 15, "hour": 8, "minute": 3, ' \ '"second": 59, "microsecond": 7, "tzinfo": "Europe/Amsterdam"}' obj = loads(txt) assert obj == datetime(year=1988, month=3, day=15, hour=8, minute=3, second=59, microsecond=7, tzinfo=pytz.timezone('Europe/Amsterdam'))
def load(self, video_id=None, filename=None): subs_for_video = None bare_fn = self.get_storage_filename(video_id) if video_id else filename json_path = self.get_json_data_path(bare_fn) if os.path.exists(json_path): # new style data subs_for_video = JSONStoredVideoSubtitles() try: with gzip.open(json_path, 'rb') as f: s = f.read() data = loads(s) except: logger.error("Couldn't load JSON data for %s: %s", bare_fn, traceback.format_exc()) return subs_for_video.deserialize(data) data = None elif not bare_fn.endswith(".json.gz") and os.path.exists( os.path.join(self.dataitems_path, bare_fn)): subs_for_video = self.migrate_legacy_data(bare_fn, json_path) if not subs_for_video: return # apply possible migrations cur_ver = old_ver = subs_for_video.version if cur_ver < self.version: success = False while cur_ver < self.version: cur_ver += 1 mig_func = "migrate_v%s" % cur_ver if hasattr(self, mig_func): logger.info("Migrating subtitle storage for %s %s>%s" % (subs_for_video.video_id, old_ver, cur_ver)) success = getattr(self, mig_func)(subs_for_video) if success is False: logger.error("Couldn't migrate %s, removing data", subs_for_video.video_id) self.delete(json_path) break if cur_ver > old_ver and success: logger.info("Storing migrated subtitle storage for %s" % subs_for_video.video_id) self.save(subs_for_video) elif not success: logger.info("Migration of %s %s>%s failed" % (subs_for_video.video_id, old_ver, cur_ver)) return subs_for_video
def test_primitive_naive_date_time(): json = dumps(DTOBJ, primitives=True) back = loads(json) for orig, bck in zip(DTOBJ, back): if isinstance(bck, (date, time, datetime,)): assert isinstance(bck, str if is_py3 else (str, unicode)) assert bck == orig.isoformat() elif isinstance(bck, (timedelta,)): assert isinstance(bck, float) assert bck == orig.total_seconds() dt = datetime(year=1988, month=3, day=15, hour=8, minute=3, second=59, microsecond=7) assert dumps(dt, primitives=True).strip('"') == '1988-03-15T08:03:59.000007'
def test_custom_enc_dec(): """ Test using a custom encoder/decoder. """ def silly_enc(obj): return {"val": 42} def silly_dec(dct): if not isinstance(dct, dict): return dct return [37] txt = dumps(lambda x: x * 2, extra_obj_encoders=(silly_enc,)) assert txt == '{"val": 42}' back = loads(txt, extra_obj_pairs_hooks=(silly_dec,)) assert back == [37]
def test_fallback_hooks(): from threading import RLock json = dumps(OrderedDict(( ('li', [1, 2, 3]), ('lock', RLock()), )), fallback_encoders=[fallback_ignore_unknown]) bck = loads(json) assert bck == OrderedDict(( ('li', [1, 2, 3]), ('lock', None), ))
def test_cls_attributes_unchanged(): """ Test that class attributes are not restored. This would be undesirable, because deserializing one instance could impact all other existing ones. """ SuperClass.cls_attr = 37 inst = SuperClass() json = dumps(inst) assert '37' not in json SuperClass.cls_attr = 42 back = loads(json) assert inst == back assert inst.cls_attr == back.cls_attr == 42 SuperClass.cls_attr = 37
def test_special_floats(): """ The official json standard doesn't support infinity or NaN, but the Python implementation does. """ special_floats = [float('NaN'), float('Infinity'), -float('Infinity'), float('+0'), float('-0')] txt = dumps(special_floats, allow_nan=True) assert txt == "[NaN, Infinity, -Infinity, 0.0, -0.0]" res = loads(txt) for x, y in zip(special_floats, res): """ Use strings since `+0 == -1` and `NaN != NaN` """ assert str(x) == str(y) with raises(ValueError): dumps(special_floats, allow_nan=False) with raises(ValueError): dumps(special_floats)
def test_empty_string_with_url(): """ Originally for https://github.com/mverleg/pyjson_tricks/issues/51 """ txt = '{"foo": "", "bar": "http://google.com"}' assert txt == strip_comments(txt), strip_comments(txt) txt = '{"foo": "", "bar": "http://google.com"}' assert txt == dumps(loads(txt, ignore_comments=False)) assert txt == dumps(loads(txt, ignore_comments=True)) txt = '{"a": "", "b": "//", "c": ""}' assert txt == dumps(loads(txt)) txt = '{"a": "", "b": "/*", "c": ""}' assert txt == dumps(loads(txt)) txt = '{"//": "//"}' assert txt == dumps(loads(txt)) txt = '{"///": "////*/*"}' assert txt == dumps(loads(txt))
def test_ignore_comments_deprecation(): # https://github.com/mverleg/pyjson_tricks/issues/74 # First time should have deprecation warning loads._ignore_comments_warned_ = False with warns(JsonTricksDeprecation): loads(test_json_with_comments) # Second time there should be no warning # noinspection PyTypeChecker with warns(None) as captured: loaded = loads(test_json_with_comments) assert len(captured) == 0 assert loaded == test_object_for_comment_strings # Passing a string without comments should not have a warning loads._ignore_comments_warned_ = False # noinspection PyTypeChecker with warns(None) as captured: loaded = loads(test_json_without_comments) assert len(captured) == 0 # Passing True for argument explicitly should not have a warning loads._ignore_comments_warned_ = False # noinspection PyTypeChecker with warns(None) as captured: loaded = loads(test_json_with_comments, ignore_comments=True) assert len(captured) == 0 assert loaded == test_object_for_comment_strings # Passing False for argument explicitly should not have a warning loads._ignore_comments_warned_ = False # noinspection PyTypeChecker with warns(None) as captured: loaded = loads(test_json_without_comments, ignore_comments=False) assert len(captured) == 0 assert loaded == test_object_for_comment_strings
def test_float_precision(): json = dumps([pi]) back = loads(json) assert back[0] - pi == 0, 'Precision lost while encoding and decoding float.'
def test_set(): data = [{'set': {3, exp(1), (-5, +7), False}}] json = dumps(data) back = loads(json) assert isinstance(back[0]['set'], set) assert data == back
def test_dumps_loads(): json = dumps(nonpdata) data2 = loads(json) assert nonpdata == data2
def test_cls_instance_default(): json = dumps(cls_instance) back = loads(json) assert (cls_instance.s == back.s) assert (cls_instance.dct == dict(back.dct))
def test_cls_instance_custom(): json = dumps(cls_instance_custom) back = loads(json) assert (cls_instance_custom.relevant == back.relevant) assert (cls_instance_custom.irrelevant == 37) assert (back.irrelevant == 12)
def test_duplicates(): loads(test_json_duplicates, allow_duplicates=True) with raises(DuplicateJsonKeyException): loads(test_json_duplicates, allow_duplicates=False)
def test_cls_instance_local(): json = '{"__instance_type__": [null, "CustomEncodeCls"], "attributes": {"relevant": 137}}' loads(json, cls_lookup_map=globals())