def test_version_check_during_deserialization(s): string = jsonify.dumps(s) # Change the version _obj = json.loads(string) _obj[jsonify.VERSION_KEY] = '0.1.bogus' string = json.dumps(_obj) with pytest.raises(exceptions.JSONVersionError): jsonify.loads(string)
def test_json_deserialization_non_pyphi_clasess(): class OtherObject: def __init__(self, x): self.x = x loaded = jsonify.loads(jsonify.dumps(OtherObject(1))) assert loaded == {'x': 1}
def test_json_deserialization(s, transition): objects = [ Direction.CAUSE, s.network, # Network s, # Subsystem models.Bipartition(models.Part((0, ), ()), models.Part((1, ), (2, 3))), models.KPartition(models.Part((0, ), ()), models.Part((1, ), (2, 3))), models.Tripartition(models.Part((0, ), ()), models.Part((1, ), (2, 3)), models.Part((3, ), (4, ))), models.Cut((0, ), (2, )), models.NullCut((0, 1)), models.KCut( Direction.CAUSE, models.KPartition(models.Part((0, ), ()), models.Part((1, ), (2, 3)))), s.concept((1, 2)), s.concept((1, )), compute.ces(s), compute.sia(s), transition, transition.find_actual_cause((0, ), (0, )), actual.account(transition), actual.sia(transition) ] for o in objects: loaded = jsonify.loads(jsonify.dumps(o)) assert loaded == o
def test_deserialization_memoizes_duplicate_objects(s): with config.override(PARALLEL_CUT_EVALUATION=True): sia = compute.sia(s) loaded = jsonify.loads(jsonify.dumps(sia)) l1 = loaded.subsystem l2 = loaded.ces.subsystem assert l1 == l2 assert hash(l1) == hash(l2) assert l1 is l2
def test_jsonify_numpy(): x = { 'ndarray': np.array([1, 2]), 'np.int32': np.int32(1), 'np.int64': np.int64(2), 'np.float64': np.float64(3), } answer = { 'ndarray': [1, 2], 'np.int32': 1, 'np.int64': 2, 'np.float64': 3.0, } assert answer == jsonify.loads(jsonify.dumps(x))
def test_jsonify_native(): x = { 'list': [1, 2.0, 3], 'tuple': (1, 2, 3), 'bool': [True, False], 'null': None } answer = { 'list': [1, 2.0, 3], 'tuple': [1, 2, 3], 'bool': [True, False], 'null': None } assert answer == jsonify.loads(jsonify.dumps(x))
def test_deserialization_memoizes_duplicate_objects(s): with config.override(PARALLEL_CUT_EVALUATION=True): sia = compute.sia(s) s1 = sia.subsystem # Computed in a parallel process, so has a different id s2 = sia.ces[0].subsystem assert s1 is not s2 assert s1 == s2 assert hash(s1) == hash(s2) loaded = jsonify.loads(jsonify.dumps(sia)) l1 = loaded.subsystem l2 = loaded.ces[0].subsystem assert l1 == l2 assert hash(l1) == hash(l2) assert l1 is l2
def test_jsonify_complexes(s, flushcache, restore_fs_cache): flushcache() complexes = compute.complexes(s.network, s.state) jsonify.loads(jsonify.dumps(complexes))
def test_jsonify_big_mip(s, flushcache, restore_fs_cache): flushcache() jsonify.loads(jsonify.dumps(compute.big_mip(s)))
def test_jsonify_network(s): loaded = jsonify.loads(jsonify.dumps(s.network)) assert np.array_equal(loaded['tpm'], s.network.tpm) assert np.array_equal(loaded['cm'], s.network.connectivity_matrix) assert loaded['size'] == s.network.size