def test_Serialized(): s = Serialized(*serialize(123)) t = Serialized(*serialize((1, 2))) u = Serialized(*serialize(123)) assert s == u assert not (s != u) assert s != t assert not (s == t)
def test_raise_error_on_serialize_write_permissions(): with tmpfile() as fn: with h5py.File(fn, mode='a') as f: x = f.create_dataset('/x', shape=(2, 2), dtype='i4') f.flush() with pytest.raises(TypeError): deserialize(*serialize(x)) with pytest.raises(TypeError): deserialize(*serialize(f))
def test_errors(): msg = {'data': {'foo': to_serialize(inc)}} header, frames = serialize(msg, serializers=['msgpack', 'pickle']) assert header['serializer'] == 'pickle' header, frames = serialize(msg, serializers=['msgpack']) assert header['serializer'] == 'error' with pytest.raises(TypeError): serialize(msg, serializers=['msgpack'], on_error='raise')
def test_serialize_deserialize_sparse(): x = np.random.random((2, 3, 4, 5)) x[x < 0.8] = 0 y = sparse.COO(x) header, frames = serialize(y) assert 'sparse' in header['type'] z = deserialize(*serialize(y)) assert_allclose(y.data, z.data) assert_allclose(y.coords, z.coords) assert_allclose(y.todense(), z.todense())
def test_compression_takes_advantage_of_itemsize(): pytest.importorskip('lz4') blosc = pytest.importorskip('blosc') x = np.arange(1000000, dtype='i8') assert (len(blosc.compress(x.data, typesize=8)) < len(blosc.compress(x.data, typesize=1))) _, a = serialize(x) aa = [maybe_compress(frame)[1] for frame in a] _, b = serialize(x.view('u1')) bb = [maybe_compress(frame)[1] for frame in b] assert sum(map(nbytes, aa)) < sum(map(nbytes, bb))
def test_dumps_serialize_numpy(x): header, frames = serialize(x) if 'compression' in header: frames = decompress(header, frames) y = deserialize(header, frames) np.testing.assert_equal(x, y)
def test_dumps_serialize_numpy(df): header, frames = serialize(df) if 'compression' in header: frames = decompress(header, frames) df2 = deserialize(header, frames) assert_eq(df, df2)
def check_deserialize_roundtrip(addr): """ Sanity check round-tripping with "deserialize" on and off. """ # Test with long bytestrings, large enough to be transferred # as a separate payload _uncompressible = os.urandom(1024 ** 2) * 4 # end size: 4 MB msg = {'op': 'update', 'x': _uncompressible, 'to_ser': [to_serialize(_uncompressible)], 'ser': Serialized(*serialize(_uncompressible)), } for should_deserialize in (True, False): a, b = yield get_comm_pair(addr, deserialize=should_deserialize) yield a.write(msg) got = yield b.read() yield b.write(got) got = yield a.read() assert sorted(got) == sorted(msg) for k in ('op', 'x'): assert got[k] == msg[k] if should_deserialize: assert isinstance(got['to_ser'][0], (bytes, bytearray)) assert isinstance(got['ser'], (bytes, bytearray)) else: assert isinstance(got['to_ser'][0], (to_serialize, Serialized)) assert isinstance(got['ser'], Serialized)
def test_dumps_serialize(): for x in [123, [1, 2, 3]]: header, frames = serialize(x) assert not header assert len(frames) == 1 result = deserialize(header, frames) assert result == x x = MyObj(123) header, frames = serialize(x) assert header['type'] assert len(frames) == 1 result = deserialize(header, frames) assert result.data == x.data
def test_dont_compress_uncompressable_data(): blosc = pytest.importorskip('blosc') x = np.random.randint(0, 255, size=100000).astype('uint8') header, [data] = serialize(x) assert 'compression' not in header assert data == x.data x = np.ones(1000000) header, [data] = serialize(x) assert header['compression'] == ['blosc'] assert data != x.data x = np.ones(100) header, [data] = serialize(x) assert 'compression' not in header if isinstance(data, memoryview): assert data.obj.ctypes.data == x.ctypes.data
def test_serialize_deserialize_sparse_large(): n = 100000000 x = np.arange(n) data = np.ones(n, dtype=np.int16) s = sparse.COO([x], data) header, frames = serialize(s) s2 = deserialize(header, frames)
def test_serialize_deserialize_variable(): with tmpfile() as fn: create_test_dataset(fn) with netCDF4.Dataset(fn, mode='r') as f: x = f.variables['x'] y = deserialize(*serialize(x)) assert isinstance(y, netCDF4.Variable) assert y.dimensions == ('x',) assert (x.dtype == y.dtype) assert (x[:] == y[:]).all()
def test_serialize(): x = np.ones((5, 5)) header, frames = serialize(x) assert header['type'] assert len(frames) == 1 if 'compression' in header: frames = decompress(header, frames) result = deserialize(header, frames) assert (result == x).all()
def test_serialize_deserialize_file(): with tmpfile() as fn: with h5py.File(fn, mode='a') as f: f.create_dataset('/x', shape=(2, 2), dtype='i4') with h5py.File(fn, mode='r') as f: g = deserialize(*serialize(f)) assert f.filename == g.filename assert isinstance(g, h5py.File) assert f.mode == g.mode assert g['x'].shape == (2, 2)
def test_serialize_deserialize_dataset(): with tmpfile() as fn: with h5py.File(fn, mode='a') as f: x = f.create_dataset('/group1/group2/x', shape=(2, 2), dtype='i4') with h5py.File(fn, mode='r') as f: x = f['group1/group2/x'] y = deserialize(*serialize(x)) assert isinstance(y, h5py.Dataset) assert x.name == y.name assert x.file.filename == y.file.filename assert (x[:] == y[:]).all()
def test_serialize_deserialize_dataset(): with tmpfile() as fn: create_test_dataset(fn) with netCDF4.Dataset(fn, mode='r') as f: g = deserialize(*serialize(f)) assert f.filepath() == g.filepath() assert isinstance(g, netCDF4.Dataset) assert g.variables['x'].dimensions == ('x',) assert g.variables['x'].dtype == np.int32 assert (g.variables['x'][:] == np.arange(3)).all()
def test_basic(): est = sklearn.linear_model.LinearRegression() est.fit([[0, 0], [1, 1], [2, 2]], [0, 1, 2]) header, frames = serialize(est) assert header['serializer'] == 'dask' est2 = deserialize(header, frames) inp = [[2, 3], [-1, 3]] assert (est.predict(inp) == est2.predict(inp)).all()
def test_serialize_raises(): class Foo(object): pass @dask_serialize.register(Foo) def dumps(f): raise Exception("Hello-123") with pytest.raises(Exception) as info: deserialize(*serialize(Foo())) assert 'Hello-123' in str(info.value)
def _(obj): headers = [] all_frames = [] for part in obj.parts: header, frames = serialize(part) header["frame-start-stop"] = [len(all_frames), len(all_frames) + len(frames)] headers.append(header) all_frames.extend(frames) header = {"sub-headers": headers, "is-cuda": obj.is_cuda, "main-header": obj.header} return header, all_frames
def test_serialize_raises(): class Foo(object): pass @dask_serialize.register(Foo) def dumps(f): raise Exception("Hello-123") with pytest.raises(Exception) as info: deserialize(*serialize(Foo())) assert "Hello-123" in str(info.value)
def test_serialize_deserialize_group(): with tmpfile() as fn: with h5py.File(fn, mode="a") as f: f.create_dataset("/group1/group2/x", shape=(2, 2), dtype="i4") with h5py.File(fn, mode="r") as f: group = f["/group1/group2"] group2 = deserialize(*serialize(group)) assert isinstance(group2, h5py.Group) assert group.file.filename == group2.file.filename assert group2["x"].shape == (2, 2)
def test_compression_numpy_list(): class MyObj: pass @dask_serialize.register(MyObj) def _(x): header = {"compression": [False]} frames = [b""] return header, frames header, frames = serialize([MyObj(), MyObj()]) assert header["compression"] == [False, False]
def test_dumps_serialize_numpy(x): header, frames = serialize(x) if "compression" in header: frames = decompress(header, frames) buffer_interface = memoryview for frame in frames: assert isinstance(frame, (bytes, buffer_interface)) y = deserialize(header, frames) np.testing.assert_equal(x, y) if x.flags.c_contiguous or x.flags.f_contiguous: assert x.strides == y.strides
def test_serialize_cupy(shape, dtype, order, serializers): x = cupy.arange(numpy.product(shape), dtype=dtype) x = cupy.ndarray(shape, dtype=x.dtype, memptr=x.data, order=order) header, frames = serialize(x, serializers=serializers) y = deserialize(header, frames, deserializers=serializers) if serializers[0] == "cuda": assert all(hasattr(f, "__cuda_array_interface__") for f in frames) elif serializers[0] == "dask": assert all(isinstance(f, memoryview) for f in frames) assert (x == y).all()
def test_serialize_deserialize_group(): with tmpfile() as fn: with h5py.File(fn, mode='a') as f: f.create_dataset('/group1/group2/x', shape=(2, 2), dtype='i4') with h5py.File(fn, mode='r') as f: group = f['/group1/group2'] group2 = deserialize(*serialize(group)) assert isinstance(group2, h5py.Group) assert group.file.filename == group2.file.filename assert group2['x'].shape == (2, 2)
def test_dumps_serialize_numpy(x): header, frames = serialize(x) if 'compression' in header: frames = decompress(header, frames) buffer_interface = buffer if PY2 else memoryview # noqa: F821 for frame in frames: assert isinstance(frame, (bytes, buffer_interface)) y = deserialize(header, frames) np.testing.assert_equal(x, y) if x.flags.c_contiguous or x.flags.f_contiguous: assert x.strides == y.strides
def test_dumps_serialize_numpy_custom_dtype(): from six.moves import builtins test_rational = pytest.importorskip('numpy.core.test_rational') rational = test_rational.rational try: builtins.rational = rational # Work around https://github.com/numpy/numpy/issues/9160 x = np.array([1], dtype=rational) header, frames = serialize(x) y = deserialize(header, frames) np.testing.assert_equal(x, y) finally: del builtins.rational
def test_serialize_cupy(dtype): ary = np.arange(100, dtype=dtype) x = cuda.to_device(ary) header, frames = serialize(x, serializers=("cuda", "dask", "pickle")) y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error")) hx = np.empty_like(ary) hy = np.empty_like(ary) x.copy_to_host(hx) y.copy_to_host(hy) assert (hx == hy).all()
def test_memmap(): with tmpfile('npy') as fn: with open(fn, 'wb') as f: # touch file pass x = np.memmap(fn, shape=(5, 5), dtype='i4', mode='readwrite') x[:] = 5 header, frames = serialize(x) if 'compression' in header: frames = decompress(header, frames) y = deserialize(header, frames) np.testing.assert_equal(x, y)
def test_serialize_deserialize_group(): with tmpfile() as fn: create_test_dataset(fn) with netCDF4.Dataset(fn, mode='r') as f: for path in ['group', 'group/group1']: g = f[path] h = deserialize(*serialize(g)) assert isinstance(h, netCDF4.Group) assert h.name == g.name assert list(g.groups) == list(h.groups) assert list(g.variables) == list(h.variables) vars = [f.variables['x'], f['group'].variables['y'], f['group/group1'].variables['z']] for x in vars: y = deserialize(*serialize(x)) assert isinstance(y, netCDF4.Variable) assert y.dimensions == ('x',) assert (x.dtype == y.dtype) assert (x[:] == y[:]).all()
def test_serialize_scipy_sparse(sparse_type, dtype): a = numpy.array([[0, 1, 0], [2, 0, 3], [0, 4, 0]], dtype=dtype) anz = a.nonzero() acoo = scipy_sparse.coo_matrix((a[anz], anz)) asp = sparse_type(acoo) header, frames = serialize(asp, serializers=["dask"]) asp2 = deserialize(header, frames) a2 = asp2.todense() assert (a == a2).all()
def test_serialize_bytestrings(): for b in (b"123", bytearray(b"4567")): header, frames = serialize(b) assert frames[0] is b bb = deserialize(header, frames) assert type(bb) == type(b) assert bb == b bb = deserialize(header, list(map(memoryview, frames))) assert type(bb) == type(b) assert bb == b bb = deserialize(header, [b"", *frames]) assert type(bb) == type(b) assert bb == b
def test_serialize_cupy_from_numba(dtype): numba = pytest.importorskip("numba") np = pytest.importorskip("numpy") size = 10 x_np = np.arange(size, dtype=dtype) x = numba.cuda.to_device(x_np) header, frames = serialize(x, serializers=("cuda", "dask", "pickle")) header["type-serialized"] = pickle.dumps(cupy.ndarray) y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error")) assert (x_np == cupy.asnumpy(y)).all()
def test_memmap(): with tmpfile("npy") as fn: with open(fn, "wb") as f: # touch file pass x = np.memmap(fn, shape=(5, 5), dtype="i4", mode="readwrite") x[:] = 5 header, frames = serialize(x) if "compression" in header: frames = decompress(header, frames) y = deserialize(header, frames) np.testing.assert_equal(x, y)
def test_serialize_datetime(): # Make frame with datetime column df = pd.DataFrame({ 'x': np.random.randint(0, 5, size=20), 'y': np.random.normal(size=20) }) ts = np.arange(0, len(df), dtype=np.dtype('datetime64[ms]')) df['timestamp'] = ts gdf = cudf.DataFrame.from_pandas(df) # (De)serialize roundtrip recreated = deserialize(*serialize(gdf)) # Check pd.util.testing.assert_frame_equal(recreated.to_pandas(), df)
def test_serialize_numba(dtype): if not cuda.is_available(): pytest.skip("CUDA is not available") ary = np.arange(100, dtype=dtype) x = cuda.to_device(ary) header, frames = serialize(x, serializers=("cuda", "dask", "pickle")) y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error")) hx = np.empty_like(ary) hy = np.empty_like(ary) x.copy_to_host(hx) y.copy_to_host(hy) assert (hx == hy).all()
def test_nested_deserialize(): x = {'op': 'update', 'x': [to_serialize(123), to_serialize(456), 789], 'y': {'a': ['abc', Serialized(*serialize('def'))], 'b': b'ghi'} } x_orig = copy.deepcopy(x) assert nested_deserialize(x) == {'op': 'update', 'x': [123, 456, 789], 'y': {'a': ['abc', 'def'], 'b': b'ghi'} } assert x == x_orig # x wasn't mutated
def test_nested_deserialize(): x = { "op": "update", "x": [to_serialize(123), to_serialize(456), 789], "y": {"a": ["abc", Serialized(*serialize("def"))], "b": b"ghi"}, } x_orig = copy.deepcopy(x) assert nested_deserialize(x) == { "op": "update", "x": [123, 456, 789], "y": {"a": ["abc", "def"], "b": b"ghi"}, } assert x == x_orig # x wasn't mutated
def test_malicious_exception(): class BadException(Exception): def __setstate__(self): return Exception("Sneaky deserialization code") class MyClass(object): def __getstate__(self): raise BadException() obj = MyClass() header, frames = serialize(obj, serializers=[]) with pytest.raises(Exception) as info: deserialize(header, frames) assert "Sneaky" not in str(info.value) assert "MyClass" in str(info.value) header, frames = serialize(obj, serializers=['pickle']) with pytest.raises(Exception) as info: deserialize(header, frames) assert "Sneaky" not in str(info.value) assert "BadException" in str(info.value)
def test_serialize_numba(shape, dtype, order): if not cuda.is_available(): pytest.skip("CUDA is not available") ary = np.arange(np.product(shape), dtype=dtype) ary = np.ndarray(shape, dtype=ary.dtype, buffer=ary.data, order=order) x = cuda.to_device(ary) header, frames = serialize(x, serializers=("cuda", "dask", "pickle")) y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error")) hx = np.empty_like(ary) hy = np.empty_like(ary) x.copy_to_host(hx) y.copy_to_host(hy) assert (hx == hy).all()
def test_serialize_deserialize_group(): with tmpfile() as fn: create_test_dataset(fn) with netCDF4.Dataset(fn, mode="r") as f: for path in ["group", "group/group1"]: g = f[path] h = deserialize(*serialize(g)) assert isinstance(h, netCDF4.Group) assert h.name == g.name assert list(g.groups) == list(h.groups) assert list(g.variables) == list(h.variables) vars = [ f.variables["x"], f["group"].variables["y"], f["group/group1"].variables["z"], ] for x in vars: y = deserialize(*serialize(x)) assert isinstance(y, netCDF4.Variable) assert y.dimensions == ("x", ) assert x.dtype == y.dtype assert (x[:] == y[:]).all()
def test_grad(requires_grad): x = np.arange(10) t = torch.tensor(x, dtype=torch.float, requires_grad=requires_grad) if requires_grad: t.grad = torch.zeros_like(t) + 1 t2 = deserialize(*serialize(t)) assert t2.requires_grad is requires_grad assert t.requires_grad is requires_grad assert np.allclose(t2.detach().numpy(), x) if requires_grad: assert np.allclose(t2.grad.numpy(), 1)
def test_malicious_exception(): class BadException(Exception): def __setstate__(self): return Exception("Sneaky deserialization code") class MyClass: def __getstate__(self): raise BadException() obj = MyClass() header, frames = serialize(obj, serializers=[]) with pytest.raises(Exception) as info: deserialize(header, frames) assert "Sneaky" not in str(info.value) assert "MyClass" in str(info.value) header, frames = serialize(obj, serializers=["pickle"]) with pytest.raises(Exception) as info: deserialize(header, frames) assert "Sneaky" not in str(info.value) assert "BadException" in str(info.value)
def test_serialize_deserialize_model(): model = keras.models.Sequential() model.add(keras.layers.Dense(5, input_dim=3)) model.add(keras.layers.Dense(2)) model.compile(optimizer='sgd', loss='mse') x = np.random.random((1, 3)) y = np.random.random((1, 2)) model.train_on_batch(x, y) loaded = deserialize(*serialize(model)) assert_allclose(loaded.predict(x), model.predict(x)) data = {'model': to_serialize(model)} frames = dumps(data) result = loads(frames) assert_allclose(result['model'].predict(x), model.predict(x))
def test_serialize_writeable_array_readonly_base_object(): # Regression test for https://github.com/dask/distributed/issues/3252 x = np.arange(3) # Create array which doesn't own it's own memory y = np.broadcast_to(x, (3, 3)) # Make y writeable and it's base object (x) read-only y.setflags(write=True) x.setflags(write=False) # Serialize / deserialize y z = deserialize(*serialize(y)) np.testing.assert_equal(z, y) # Ensure z and y have the same flags (including WRITEABLE) assert z.flags == y.flags
def test_raw_file_distributed_serializable(self): from distributed.protocol import deserialize from distributed.protocol import serialize # Arrange fs = fsspec.filesystem("file") path = TEST_DATA_DIR + "/betfair/1.166811431.bz2" r = RawFile(open_file=fs.open(path=path, compression="bz2")) # Act result1: RawFile = deserialize(*serialize(r)) # Assert assert result1.open_file.fs == r.open_file.fs assert result1.open_file.path == r.open_file.path assert result1.block_size == r.block_size assert result1.open_file.compression == "bz2"
def test_serialize_cupy_from_rmm(size): np = pytest.importorskip("numpy") rmm = pytest.importorskip("rmm") x_np = np.arange(size, dtype="u1") x_np_desc = x_np.__array_interface__ (x_np_ptr, _) = x_np_desc["data"] (x_np_size,) = x_np_desc["shape"] x = rmm.DeviceBuffer(ptr=x_np_ptr, size=x_np_size) header, frames = serialize(x, serializers=("cuda", "dask", "pickle")) header["type-serialized"] = pickle.dumps(cupy.ndarray) y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error")) assert (x_np == cupy.asnumpy(y)).all()
def test_serialize_deserialize_model(): from numpy.testing import assert_allclose model = keras.models.Sequential() model.add(keras.layers.Dense(5, input_dim=3)) model.add(keras.layers.Dense(2)) model.compile(optimizer="sgd", loss="mse") x = np.random.random((1, 3)) y = np.random.random((1, 2)) model.train_on_batch(x, y) loaded = deserialize(*serialize(model)) assert_allclose(loaded.predict(x), model.predict(x)) data = {"model": to_serialize(model)} frames = dumps(data) result = loads(frames) assert_allclose(result["model"].predict(x), model.predict(x))
def test_serialize_rmm_device_buffer(size, serializers): if not hasattr(rmm, "DeviceBuffer"): pytest.skip("RMM pre-0.11.0 does not have DeviceBuffer") x_np = numpy.arange(size, dtype="u1") x = rmm.DeviceBuffer(size=size) cuda.to_device(x_np, to=cuda.as_cuda_array(x)) header, frames = serialize(x, serializers=serializers) y = deserialize(header, frames, deserializers=serializers) y_np = y.copy_to_host() if serializers[0] == "cuda": assert all(hasattr(f, "__cuda_array_interface__") for f in frames) elif serializers[0] == "dask": assert all(isinstance(f, memoryview) for f in frames) assert (x_np == y_np).all()
async def _test_round_trip_tcp(model: TreeliteModel): client, server = await get_comm_pair('tcp://localhost') header, frames = serialize(model) print('Serialized model to Python buffer frames') msg = (header, frames) await client.write(msg) received_msg = await server.read() header, frames = received_msg received_model = deserialize(header, frames) print(f'Deserialized model from Python buffer frames') assert treelite2bytes(model) == treelite2bytes(received_model) print('Round trip preserved all bytes\n') await client.close() await server.close()
def test_empty(): e = Empty() e2 = deserialize(*serialize(e)) assert isinstance(e2, Empty)
def test_serialize_bytes(): b = b'123' header, frames = serialize(b) assert frames[0] is b
def test_roundtrip(obj): # Test that the serialize/deserialize functions actually # work independent of distributed header, frames = serialize(obj) new_obj = deserialize(header, frames) assert obj.equals(new_obj)
def check_deserialize(addr): """ Check the "deserialize" flag on connect() and listen(). """ # Test with Serialize and Serialized objects msg = {'op': 'update', 'x': b'abc', 'to_ser': [to_serialize(123)], 'ser': Serialized(*serialize(456)), } msg_orig = msg.copy() def check_out_false(out_value): # Check output with deserialize=False out_value = out_value.copy() # in case transport passed the object as-is to_ser = out_value.pop('to_ser') ser = out_value.pop('ser') expected_msg = msg_orig.copy() del expected_msg['ser'] del expected_msg['to_ser'] assert out_value == expected_msg assert isinstance(ser, Serialized) assert deserialize(ser.header, ser.frames) == 456 assert isinstance(to_ser, list) to_ser, = to_ser # The to_serialize() value could have been actually serialized # or not (it's a transport-specific optimization) if isinstance(to_ser, Serialized): assert deserialize(to_ser.header, to_ser.frames) == 123 else: assert to_ser == to_serialize(123) def check_out_true(out_value): # Check output with deserialize=True expected_msg = msg.copy() expected_msg['ser'] = 456 expected_msg['to_ser'] = [123] assert out_value == expected_msg yield check_listener_deserialize(addr, False, msg, check_out_false) yield check_connector_deserialize(addr, False, msg, check_out_false) yield check_listener_deserialize(addr, True, msg, check_out_true) yield check_connector_deserialize(addr, True, msg, check_out_true) # Test with long bytestrings, large enough to be transferred # as a separate payload _uncompressible = os.urandom(1024 ** 2) * 4 # end size: 8 MB msg = {'op': 'update', 'x': _uncompressible, 'to_ser': [to_serialize(_uncompressible)], 'ser': Serialized(*serialize(_uncompressible)), } msg_orig = msg.copy() def check_out(deserialize_flag, out_value): # Check output with deserialize=False assert sorted(out_value) == sorted(msg_orig) out_value = out_value.copy() # in case transport passed the object as-is to_ser = out_value.pop('to_ser') ser = out_value.pop('ser') expected_msg = msg_orig.copy() del expected_msg['ser'] del expected_msg['to_ser'] assert out_value == expected_msg if deserialize_flag: assert isinstance(ser, (bytes, bytearray)) assert bytes(ser) == _uncompressible else: assert isinstance(ser, Serialized) assert deserialize(ser.header, ser.frames) == _uncompressible assert isinstance(to_ser, list) to_ser, = to_ser # The to_serialize() value could have been actually serialized # or not (it's a transport-specific optimization) if isinstance(to_ser, Serialized): assert deserialize(to_ser.header, to_ser.frames) == _uncompressible else: assert to_ser == to_serialize(_uncompressible) yield check_listener_deserialize(addr, False, msg, partial(check_out, False)) yield check_connector_deserialize(addr, False, msg, partial(check_out, False)) yield check_listener_deserialize(addr, True, msg, partial(check_out, True)) yield check_connector_deserialize(addr, True, msg, partial(check_out, True))