def test_serializer_loads_bad_data(): class BirdSchema(BaseSchema): flying = fields.Boolean(required=True) serializer = MsgpackSerializer(BirdSchema) for raw in (packb(0), packb([]), packb({}), b"dummy"): with pytest.raises(SerdeError): serializer.loads(raw)
async def _client_handler(stream): # General exception handling try: # Stream handling try: unpacker = Unpacker() async for raw in stream: unpacker.feed(raw) for cmd in unpacker: cmd = cmd_req_serializer.load(cmd) rep = await cmd_handler(cmd) raw_rep = cmd_rep_serializer.dumps(rep) logger.info("Command processed", cmd=cmd["cmd"], rep_status=rep["status"]) await stream.send_all(raw_rep) except SerdeError as exc: await stream.send_all( packb({ "status": "invalid_format", "reason": str(exc) })) finally: await stream.aclose() except trio.BrokenResourceError: pass # Peer has closed the connection while we were sending a response except Exception: logger.exception("Unexpected crash")
def test_supports_legacy_is_admin_field(alice): # Manually craft a local user in legacy format raw_legacy_local_user = { "organization_addr": alice.organization_addr.to_url(), "device_id": str(alice.device_id), "signing_key": alice.signing_key.encode(), "private_key": alice.private_key.encode(), "is_admin": True, "user_manifest_id": UUID(alice.user_manifest_id.hex), "user_manifest_key": bytes(alice.user_manifest_key), "local_symkey": bytes(alice.local_symkey), } dumped_legacy_local_user = packb(raw_legacy_local_user) # Make sure the legacy format can be loaded legacy_local_user = LocalDevice.load(dumped_legacy_local_user) assert legacy_local_user == alice # Manually decode new format to check it is compatible with legacy dumped_local_user = alice.dump() raw_local_user = unpackb(dumped_local_user) assert raw_local_user == { **raw_legacy_local_user, "profile": alice.profile.value, "human_handle": None, "device_label": None, }
async def send_signal(conn, signal, **kwargs): # PostgreSQL's NOTIFY only accept string as payload, hence we must # use base64 on our payload... # Add UUID to ensure the payload is unique given it seems Postgresql can # drop duplicated NOTIFY (same channel/payload) # see: https://github.com/Scille/parsec-cloud/issues/199 raw_data = b64encode( packb({ "__id__": uuid4().hex, "__signal__": signal.value, **kwargs })).decode("ascii") await conn.execute("SELECT pg_notify($1, $2)", "app_notification", raw_data) logger.debug("notif sent", signal=signal, kwargs=kwargs)
def test_list_devices_support_legacy_file_with_meaningful_name(config_dir): # Legacy path might exceed the 256 characters limit in some cases (see issue #1356) # So we use the `\\?\` workaround: https://stackoverflow.com/a/57502760/2846140 if os.name == "nt": config_dir = Path("\\\\?\\" + str(config_dir.resolve())) # Device information user_id = uuid4().hex device_name = uuid4().hex organization_id = "Org" rvk_hash = (uuid4().hex)[:10] device_id = f"{user_id}@{device_name}" slug = f"{rvk_hash}#{organization_id}#{device_id}" human_label = "Billy Mc BillFace" human_email = "*****@*****.**" device_label = "My device" # Craft file data without the user_id, organization_id and # root_verify_key_hash fields key_file_data = packb({ "type": "password", "salt": b"12345", "ciphertext": b"whatever", "human_handle": (human_email.encode(), human_label.encode()), "device_label": device_label.encode(), }) key_file_path = get_devices_dir(config_dir) / slug / f"{slug}.keys" key_file_path.parent.mkdir(parents=True) key_file_path.write_bytes(key_file_data) devices = list_available_devices(config_dir) expected_device = AvailableDevice( key_file_path=key_file_path, organization_id=OrganizationID(organization_id), device_id=DeviceID(device_id), human_handle=HumanHandle(human_email, human_label), device_label=device_label, root_verify_key_hash=rvk_hash, ) assert devices == [expected_device]
def test_list_devices_support_legacy_file_without_labels(config_dir): # Craft file data without the labels fields key_file_data = packb({ "type": "password", "salt": b"12345", "ciphertext": b"whatever" }) slug = "9d84fbd57a#Org#Zack@PC1" key_file_path = fix_dir( get_devices_dir(config_dir) / slug / f"{slug}.keys") key_file_path.parent.mkdir(parents=True) key_file_path.write_bytes(key_file_data) devices = list_available_devices(config_dir) expected_device = AvailableDevice( key_file_path=key_file_path, organization_id=OrganizationID("Org"), device_id=DeviceID("Zack@PC1"), human_handle=None, device_label=None, root_verify_key_hash="9d84fbd57a", ) assert devices == [expected_device]
def test_user_certificate_supports_legacy_is_admin_field(alice, bob): now = pendulum_now() certif = UserCertificateContent( author=bob.device_id, timestamp=now, user_id=alice.user_id, human_handle=None, public_key=alice.public_key, profile=alice.profile, ) # Manually craft a certificate in legacy format raw_legacy_certif = { "type": "user_certificate", "author": bob.device_id, "timestamp": now, "user_id": alice.user_id, "public_key": alice.public_key.encode(), "is_admin": True, } dumped_legacy_certif = bob.signing_key.sign(zlib.compress(packb(raw_legacy_certif))) # Make sure the legacy format can be loaded legacy_certif = UserCertificateContent.verify_and_load( dumped_legacy_certif, author_verify_key=bob.verify_key, expected_author=bob.device_id, expected_user=alice.user_id, expected_human_handle=None, ) assert legacy_certif == certif # Manually decode new format to check it is compatible with legacy dumped_certif = certif.dump_and_sign(bob.signing_key) raw_certif = unpackb(zlib.decompress(bob.verify_key.verify(dumped_certif))) assert raw_certif == {**raw_legacy_certif, "profile": alice.profile.value, "human_handle": None}
def test_pack_uuid(): data = {"uuid": uuid.uuid4()} packed = packb(data) unpacked = unpackb(packed) assert unpacked == data assert isinstance(unpacked["uuid"], uuid.UUID)
def test_pack_datetime(): data = {"date": pendulum.now()} packed = packb(data) unpacked = unpackb(packed) assert unpacked == data assert isinstance(unpacked["date"], pendulum.DateTime)