def test_local_device(): from parsec.core.types.local_device import _RsLocalDevice, LocalDevice, _PyLocalDevice assert LocalDevice is _RsLocalDevice def _assert_local_device_eq(py, rs): assert isinstance(py, _PyLocalDevice) assert isinstance(rs, _RsLocalDevice) assert py.organization_addr == rs.organization_addr assert py.device_id == rs.device_id assert py.device_label == rs.device_label assert py.human_handle == rs.human_handle assert py.signing_key == rs.signing_key assert py.private_key == rs.private_key assert py.profile == rs.profile assert py.user_manifest_id == rs.user_manifest_id assert py.user_manifest_key == rs.user_manifest_key assert py.local_symkey == rs.local_symkey assert py.is_admin == rs.is_admin assert py.is_outsider == rs.is_outsider assert py.slug == rs.slug assert py.slughash == rs.slughash assert py.root_verify_key == rs.root_verify_key assert py.organization_id == rs.organization_id assert py.device_name == rs.device_name assert py.user_id == rs.user_id assert py.verify_key == rs.verify_key assert py.public_key == rs.public_key assert py.user_display == rs.user_display assert py.short_user_display == rs.short_user_display assert py.device_display == rs.device_display signing_key = SigningKey.generate() kwargs = { "organization_addr": BackendOrganizationAddr.build( BackendAddr.from_url("parsec://foo"), organization_id=OrganizationID("org"), root_verify_key=signing_key.verify_key, ), "device_id": DeviceID.new(), "device_label": None, "human_handle": None, "signing_key": signing_key, "private_key": PrivateKey.generate(), "profile": UserProfile.ADMIN, "user_manifest_id": EntryID.new(), "user_manifest_key": SecretKey.generate(), "local_symkey": SecretKey.generate(), } py_ba = _PyLocalDevice(**kwargs) rs_ba = LocalDevice(**kwargs) _assert_local_device_eq(py_ba, rs_ba)
def generate_new_device(device_id: DeviceID, organization_addr: BackendOrganizationAddr, is_admin: bool = False) -> LocalDevice: return LocalDevice( organization_addr=organization_addr, device_id=device_id, signing_key=SigningKey.generate(), private_key=PrivateKey.generate(), is_admin=is_admin, user_manifest_id=EntryID(uuid4().hex), user_manifest_key=SecretKey.generate(), local_symkey=SecretKey.generate(), )
def test_workspace_entry(): from parsec.api.data.manifest import _RsWorkspaceEntry, WorkspaceEntry, _PyWorkspaceEntry from parsec.api.data import EntryName assert WorkspaceEntry is _RsWorkspaceEntry def _assert_workspace_entry_eq(py, rs): assert isinstance(py, _PyWorkspaceEntry) assert isinstance(rs, _RsWorkspaceEntry) assert py.is_revoked() == rs.is_revoked() assert py.name == rs.name assert py.id == rs.id assert py.key == rs.key assert py.encryption_revision == rs.encryption_revision assert py.encrypted_on == rs.encrypted_on assert py.role_cached_on == rs.role_cached_on assert py.role == rs.role kwargs = { "name": EntryName("name"), "id": EntryID.new(), "key": SecretKey.generate(), "encryption_revision": 1, "encrypted_on": pendulum.now(), "role_cached_on": pendulum.now(), "role": RealmRole.OWNER, } py_we = _PyWorkspaceEntry(**kwargs) rs_we = WorkspaceEntry(**kwargs) _assert_workspace_entry_eq(py_we, rs_we) kwargs = { "name": EntryName("new_name"), "id": EntryID.new(), "key": SecretKey.generate(), "encryption_revision": 42, "encrypted_on": pendulum.now(), "role_cached_on": pendulum.now(), "role": None, } py_we = py_we.evolve(**kwargs) rs_we = rs_we.evolve(**kwargs) _assert_workspace_entry_eq(py_we, rs_we)
async def do_claim_device( self, requested_device_label: Optional[DeviceLabel]) -> LocalDevice: # Device key is generated here and kept in memory until the end of # the enrollment process. This mean we can lost it if something goes wrong. # This has no impact until step 4 (somewhere between data exchange and # confirmation exchange steps) where greeter upload our certificate in # the server. # This is considered acceptable given 1) the error window is small and # 2) if this occurs the inviter can revoke the device and retry the # enrollment process to fix this signing_key = SigningKey.generate() try: payload = InviteDeviceData( requested_device_label=requested_device_label, verify_key=signing_key.verify_key).dump_and_encrypt( key=self._shared_secret_key) except DataError as exc: raise InviteError( "Cannot generate InviteDeviceData payload") from exc rep = await self._cmds.invite_4_claimer_communicate(payload=payload) _check_rep(rep, step_name="step 4 (data exchange)") rep = await self._cmds.invite_4_claimer_communicate(payload=b"") _check_rep(rep, step_name="step 4 (confirmation exchange)") try: confirmation = InviteDeviceConfirmation.decrypt_and_load( rep["payload"], key=self._shared_secret_key) except DataError as exc: raise InviteError( "Invalid InviteDeviceConfirmation payload provided by peer" ) from exc organization_addr = BackendOrganizationAddr.build( backend_addr=self._cmds.addr.get_backend_addr(), organization_id=self._cmds.addr.organization_id, root_verify_key=confirmation.root_verify_key, ) return LocalDevice( organization_addr=organization_addr, device_id=confirmation.device_id, device_label=confirmation.device_label, human_handle=confirmation.human_handle, profile=confirmation.profile, private_key=confirmation.private_key, signing_key=signing_key, user_manifest_id=confirmation.user_manifest_id, user_manifest_key=confirmation.user_manifest_key, local_symkey=SecretKey.generate(), )
def new(cls: Type[WorkspaceEntryTypeVar], name: str) -> "WorkspaceEntry": now = pendulum_now() return WorkspaceEntry( name=name, id=EntryID(), key=SecretKey.generate(), encryption_revision=1, encrypted_on=now, role_cached_on=now, role=RealmRole.OWNER, )
def test_block_access(): from parsec.api.data.manifest import _RsBlockAccess, BlockAccess, _PyBlockAccess assert BlockAccess is _RsBlockAccess def _assert_block_access_eq(py, rs): assert isinstance(py, _PyBlockAccess) assert isinstance(rs, _RsBlockAccess) assert py.id == rs.id assert py.key == rs.key assert py.offset == rs.offset assert py.size == rs.size assert py.digest == rs.digest kwargs = { "id": BlockID.new(), "key": SecretKey.generate(), "offset": 0, "size": 1024, "digest": HashDigest.from_data(b"a"), } py_ba = _PyBlockAccess(**kwargs) rs_ba = BlockAccess(**kwargs) _assert_block_access_eq(py_ba, rs_ba) kwargs = { "id": BlockID.new(), "key": SecretKey.generate(), "offset": 64, "size": 2048, "digest": HashDigest.from_data(b"b"), } py_ba = py_ba.evolve(**kwargs) rs_ba = rs_ba.evolve(**kwargs) _assert_block_access_eq(py_ba, rs_ba) kwargs["size"] = 0 with pytest.raises(ValueError): BlockAccess(**kwargs)
def new(cls: Type[WorkspaceEntryTypeVar], name: EntryName, timestamp: DateTime) -> "WorkspaceEntry": assert isinstance(name, EntryName) return _PyWorkspaceEntry( name=name, id=EntryID.new(), key=SecretKey.generate(), encryption_revision=1, encrypted_on=timestamp, role_cached_on=timestamp, role=RealmRole.OWNER, )
async def do_claim_device( self, requested_device_label: Optional[str]) -> LocalDevice: signing_key = SigningKey.generate() try: payload = InviteDeviceData( requested_device_label=requested_device_label, verify_key=signing_key.verify_key).dump_and_encrypt( key=self._shared_secret_key) except DataError as exc: raise InviteError( "Cannot generate InviteDeviceData payload") from exc rep = await self._cmds.invite_4_claimer_communicate(payload=payload) if rep["status"] == "invalid_state": raise InvitePeerResetError() elif rep["status"] != "ok": raise InviteError( f"Backend error during step 4 (data exchange): {rep}") rep = await self._cmds.invite_4_claimer_communicate(payload=b"") if rep["status"] == "invalid_state": raise InvitePeerResetError() elif rep["status"] != "ok": raise InviteError( f"Backend error during step 4 (confirmation exchange): {rep}") try: confirmation = InviteDeviceConfirmation.decrypt_and_load( rep["payload"], key=self._shared_secret_key) except DataError as exc: raise InviteError( "Invalid InviteDeviceConfirmation payload provided by peer" ) from exc organization_addr = BackendOrganizationAddr.build( backend_addr=self._cmds.addr, organization_id=self._cmds.addr.organization_id, root_verify_key=confirmation.root_verify_key, ) return LocalDevice( organization_addr=organization_addr, device_id=confirmation.device_id, device_label=confirmation.device_label, human_handle=confirmation.human_handle, profile=confirmation.profile, private_key=confirmation.private_key, signing_key=signing_key, user_manifest_id=confirmation.user_manifest_id, user_manifest_key=confirmation.user_manifest_key, local_symkey=SecretKey.generate(), )
def generate_new_device( organization_addr: BackendOrganizationAddr, device_id: Optional[DeviceID] = None, profile: UserProfile = UserProfile.STANDARD, human_handle: Optional[HumanHandle] = None, device_label: Optional[DeviceLabel] = None, signing_key: Optional[SigningKey] = None, private_key: Optional[PrivateKey] = None, ) -> LocalDevice: return LocalDevice( organization_addr=organization_addr, device_id=device_id or DeviceID.new(), device_label=device_label, human_handle=human_handle, signing_key=signing_key or SigningKey.generate(), private_key=private_key or PrivateKey.generate(), profile=profile, user_manifest_id=EntryID.new(), user_manifest_key=SecretKey.generate(), local_symkey=SecretKey.generate(), )
async def workspace_start_reencryption( self, workspace_id: EntryID) -> ReencryptionJob: """ Raises: FSError FSBackendOfflineError FSWorkspaceNoAccess FSWorkspaceNotFoundError """ user_manifest = self.get_user_manifest() workspace_entry = user_manifest.get_workspace_entry(workspace_id) if not workspace_entry: raise FSWorkspaceNotFoundError( f"Unknown workspace `{workspace_id}`") now = pendulum_now() new_workspace_entry = workspace_entry.evolve( encryption_revision=workspace_entry.encryption_revision + 1, encrypted_on=now, key=SecretKey.generate(), ) while True: # In order to provide the new key to each participant, we must # encrypt a message for each of them participants = await self._retrieve_participants(workspace_entry.id ) reencryption_msgs = self._generate_reencryption_messages( new_workspace_entry, participants, now) # Actually ask the backend to start the reencryption ok = await self._send_start_reencryption_cmd( workspace_entry.id, new_workspace_entry.encryption_revision, now, reencryption_msgs) if not ok: # Participant list has changed concurrently logger.info( "Realm participants list has changed during start reencryption tentative, retrying", workspace_id=workspace_id, ) continue else: break # Note we don't update the user manifest here, this will be done when # processing the `realm.updated` message from the backend return ReencryptionJob(self.backend_cmds, new_workspace_entry, workspace_entry)
async def _create_new_device_for_self( original_device: LocalDevice, new_device_label: DeviceLabel) -> LocalDevice: """ Raises: BackendConnectionError """ new_device = LocalDevice( organization_addr=original_device.organization_addr, device_id=DeviceID(f"{original_device.user_id}@{DeviceName.new()}"), device_label=new_device_label, human_handle=original_device.human_handle, profile=original_device.profile, private_key=original_device.private_key, signing_key=SigningKey.generate(), user_manifest_id=original_device.user_manifest_id, user_manifest_key=original_device.user_manifest_key, local_symkey=SecretKey.generate(), ) now = pendulum_now() device_certificate = DeviceCertificateContent( author=original_device.device_id, timestamp=now, device_id=new_device.device_id, device_label=new_device.device_label, verify_key=new_device.verify_key, ) redacted_device_certificate = device_certificate.evolve(device_label=None) device_certificate = device_certificate.dump_and_sign( original_device.signing_key) redacted_device_certificate = redacted_device_certificate.dump_and_sign( original_device.signing_key) async with backend_authenticated_cmds_factory( addr=original_device.organization_addr, device_id=original_device.device_id, signing_key=original_device.signing_key, ) as cmds: rep = await cmds.device_create( device_certificate=device_certificate, redacted_device_certificate=redacted_device_certificate, ) if rep["status"] != "ok": raise BackendConnectionError(f"Cannot create recovery device: {rep}") return new_device
async def test_new_workspace(running_backend, alice, alice_user_fs, alice2_user_fs): with freeze_time("2000-01-02"): wid = await alice_user_fs.workspace_create(EntryName("w")) workspace = alice_user_fs.get_workspace(wid) with alice_user_fs.event_bus.listen() as spy: with freeze_time("2000-01-03"): await workspace.sync() spy.assert_events_occured([(CoreEvent.FS_ENTRY_SYNCED, { "workspace_id": wid, "id": wid }, datetime(2000, 1, 3))]) workspace2 = alice_user_fs.get_workspace(wid) await alice_user_fs.sync() await workspace2.sync() workspace_entry = workspace.get_workspace_entry() path_info = await workspace.path_info("/") assert path_info == { "type": "folder", "id": wid, "is_placeholder": False, "need_sync": False, "base_version": 1, "children": [], "created": datetime(2000, 1, 2), "updated": datetime(2000, 1, 2), "confinement_point": None, } KEY = SecretKey.generate() workspace_entry = workspace_entry.evolve(key=KEY) assert workspace_entry == WorkspaceEntry( name=EntryName("w"), id=wid, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 2), role_cached_on=datetime(2000, 1, 2), role=WorkspaceRole.OWNER, ) workspace_entry2 = workspace.get_workspace_entry() workspace_entry2 = workspace_entry2.evolve(key=KEY) path_info2 = await workspace.path_info("/") assert workspace_entry == workspace_entry2 assert path_info == path_info2
async def test_revoke_sharing_trigger_event(alice_core, bob_core, running_backend): KEY = SecretKey.generate() def _update_event(event): if event.event == CoreEvent.SHARING_UPDATED: event.kwargs["new_entry"] = event.kwargs["new_entry"].evolve( key=KEY, role_cached_on=datetime(2000, 1, 2) ) event.kwargs["previous_entry"] = event.kwargs["previous_entry"].evolve( key=KEY, role_cached_on=datetime(2000, 1, 2) ) return event with freeze_time("2000-01-02"): wid = await create_shared_workspace(EntryName("w"), alice_core, bob_core) with bob_core.event_bus.listen() as spy: with freeze_time("2000-01-03"): await alice_core.user_fs.workspace_share(wid, recipient=UserID("bob"), role=None) # Each workspace participant should get the message await spy.wait_with_timeout( CoreEvent.SHARING_UPDATED, { "new_entry": WorkspaceEntry( name=EntryName("w"), id=wid, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 2), role_cached_on=datetime(2000, 1, 2), role=None, ), "previous_entry": WorkspaceEntry( name=EntryName("w"), id=wid, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 2), role_cached_on=datetime(2000, 1, 2), role=WorkspaceRole.MANAGER, ), }, update_event_func=_update_event, )
def evolve_as_block(self, data: bytes) -> "Chunk": # No-op if self.is_block: return self # Check alignement if self.raw_offset != self.start: raise TypeError("This chunk is not aligned") # Craft access access = BlockAccess( id=BlockID(self.id), key=SecretKey.generate(), offset=self.start, size=self.stop - self.start, digest=HashDigest.from_data(data), ) # Evolve return self.evolve(access=access)
async def test_new_sharing_trigger_event(alice_core, bob_core, running_backend): KEY = SecretKey.generate() # First, create a folder and sync it on backend with freeze_time("2000-01-01"): wid = await alice_core.user_fs.workspace_create(EntryName("foo")) workspace = alice_core.user_fs.get_workspace(wid) with freeze_time("2000-01-02"): await workspace.sync() # Now we can share this workspace with Bob with bob_core.event_bus.listen() as spy: with freeze_time("2000-01-03"): await alice_core.user_fs.workspace_share( wid, recipient=UserID("bob"), role=WorkspaceRole.MANAGER ) def _update_event(event): if event.event == CoreEvent.SHARING_UPDATED: event.kwargs["new_entry"] = event.kwargs["new_entry"].evolve( key=KEY, role_cached_on=datetime(2000, 1, 1) ) return event # Bob should get a notification await spy.wait_with_timeout( CoreEvent.SHARING_UPDATED, { "new_entry": WorkspaceEntry( name=EntryName("foo"), id=wid, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 1), role_cached_on=datetime(2000, 1, 1), role=WorkspaceRole.MANAGER, ), "previous_entry": None, }, update_event_func=_update_event, )
def test_invite_device_data(): from parsec.api.data.invite import _RsInviteDeviceData, InviteDeviceData, _PyInviteDeviceData assert InviteDeviceData is _RsInviteDeviceData dl = DeviceLabel("label") sk = SigningKey.generate() vk = sk.verify_key sek = SecretKey.generate() py_idd = _PyInviteDeviceData(requested_device_label=dl, verify_key=vk) rs_idd = InviteDeviceData(requested_device_label=dl, verify_key=vk) assert rs_idd.requested_device_label.str == py_idd.requested_device_label.str rs_encrypted = rs_idd.dump_and_encrypt(key=sek) py_encrypted = py_idd.dump_and_encrypt(key=sek) # Decrypt Rust-encrypted with Rust rs_idd2 = InviteDeviceData.decrypt_and_load(rs_encrypted, sek) assert rs_idd.requested_device_label.str == rs_idd2.requested_device_label.str # Decrypt Python-encrypted with Python rs_idd3 = InviteDeviceData.decrypt_and_load(py_encrypted, sek) assert rs_idd.requested_device_label.str == rs_idd3.requested_device_label.str # Decrypt Rust-encrypted with Python py_idd2 = _PyInviteDeviceData.decrypt_and_load(rs_encrypted, sek) assert rs_idd.requested_device_label.str == py_idd2.requested_device_label.str # With requested_human_handle and requested_device_label as None py_idd = _PyInviteDeviceData(requested_device_label=None, verify_key=vk) rs_idd = InviteDeviceData(requested_device_label=None, verify_key=vk) assert py_idd.requested_device_label is None assert rs_idd.requested_device_label is None
def test_local_file_manifest(): from parsec.api.data.manifest import BlockAccess from parsec.core.types.manifest import ( _RsLocalFileManifest, LocalFileManifest, _PyLocalFileManifest, Chunk, ) assert LocalFileManifest is _RsLocalFileManifest def _assert_local_file_manifest_eq(py, rs, exclude_base=False, exclude_id=False): assert isinstance(py, _PyLocalFileManifest) assert isinstance(rs, _RsLocalFileManifest) if not exclude_base: assert py.base == rs.base assert py.need_sync == rs.need_sync assert py.updated == rs.updated assert py.size == rs.size assert py.blocksize == rs.blocksize assert len(py.blocks) == len(rs.blocks) assert isinstance(rs.blocks, type(py.blocks)) if len(py.blocks): assert isinstance(rs.blocks[0], type(py.blocks[0])) if not exclude_id: assert py.id == rs.id assert py.created == rs.created assert py.base_version == rs.base_version assert py.is_placeholder == rs.is_placeholder assert py.is_reshaped() == rs.is_reshaped() for (b1, b2) in zip(sorted(py.blocks), sorted(rs.blocks)): assert len(b1) == len(b2) assert all( isinstance(c2, Chunk) and c1.id == c2.id and c1.start == c2. start and c1.stop == c2.stop and c1.raw_offset == c2.raw_offset and c1.raw_size == c2.raw_size and c1.access == c2.access for (c1, c2) in zip(b1, b2)) def _assert_file_manifest_eq(py, rs): assert py.author == rs.author assert py.parent == rs.parent assert py.version == rs.version assert py.size == rs.size assert py.blocksize == rs.blocksize assert py.timestamp == rs.timestamp assert py.created == rs.created assert py.updated == rs.updated assert len(py.blocks) == len(rs.blocks) assert isinstance(rs.blocks, type(py.blocks)) assert all( isinstance(b2, BlockAccess) and b1.id == b2.id and b1.offset == b2.offset and b1.size == b2.size for (b1, b2) in zip(sorted(py.blocks), sorted(rs.blocks))) kwargs = { "base": FileManifest( author=DeviceID("user@device"), id=EntryID.new(), parent=EntryID.new(), version=42, size=1337, blocksize=85, timestamp=pendulum.now(), created=pendulum.now(), updated=pendulum.now(), blocks=(BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=0, size=1024, digest=HashDigest.from_data(b"a"), ), ), ), "need_sync": True, "updated": pendulum.now(), "size": 42, "blocksize": 64, "blocks": (( Chunk( id=ChunkID.new(), start=0, stop=250, raw_offset=0, raw_size=512, access=BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=0, size=512, digest=HashDigest.from_data(b"aa"), ), ), Chunk(id=ChunkID.new(), start=0, stop=250, raw_offset=250, raw_size=250, access=None), ), ), } py_lfm = _PyLocalFileManifest(**kwargs) rs_lfm = LocalFileManifest(**kwargs) _assert_local_file_manifest_eq(py_lfm, rs_lfm) kwargs = { "base": kwargs["base"].evolve( **{ "author": DeviceID("a@b"), "id": EntryID.new(), "parent": EntryID.new(), "version": 1337, "size": 4096, "blocksize": 512, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "blocks": (BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=64, size=2048, digest=HashDigest.from_data(b"b"), ), ), }), "need_sync": False, "updated": pendulum.now(), "size": 2048, "blocksize": 1024, "blocks": (( Chunk( id=ChunkID.new(), start=0, stop=1024, raw_offset=0, raw_size=1024, access=BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=0, size=1024, digest=HashDigest.from_data(b"bb"), ), ), Chunk( id=ChunkID.new(), start=1024, stop=2048, raw_offset=1024, raw_size=1024, access=None, ), ), ), } py_lfm = py_lfm.evolve(**kwargs) rs_lfm = rs_lfm.evolve(**kwargs) _assert_local_file_manifest_eq(py_lfm, rs_lfm) sk = SecretKey.generate() py_enc = py_lfm.dump_and_encrypt(sk) rs_enc = rs_lfm.dump_and_encrypt(sk) # Decrypt rust encrypted with Python and vice versa lfm1 = _PyLocalFileManifest.decrypt_and_load(rs_enc, sk) lfm2 = LocalFileManifest.decrypt_and_load(py_enc, sk) assert isinstance(lfm1, LocalFileManifest) assert isinstance(lfm2, LocalFileManifest) assert lfm1 == lfm2 py_lfm = py_lfm.evolve(**{"size": 1337}) rs_lfm = rs_lfm.evolve(**{"size": 1337}) _assert_local_file_manifest_eq(py_lfm, rs_lfm) with pytest.raises(AssertionError): py_lfm.assert_integrity() with pytest.raises(AssertionError): rs_lfm.assert_integrity() assert py_lfm.to_stats() == rs_lfm.to_stats() assert py_lfm.parent == rs_lfm.parent assert py_lfm.get_chunks(0) == rs_lfm.get_chunks(0) assert py_lfm.get_chunks(1000) == rs_lfm.get_chunks(1000) assert py_lfm.asdict() == rs_lfm.asdict() di = DeviceID("a@b") ts = pendulum.now() kwargs = { "size": 1024, "blocksize": 1024, "blocks": ((Chunk( id=ChunkID.new(), start=0, stop=1024, raw_offset=0, raw_size=1024, access=BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=0, size=1024, digest=HashDigest.from_data(b"bb"), ), ), ), ), } py_lfm = py_lfm.evolve(**kwargs) rs_lfm = rs_lfm.evolve(**kwargs) _assert_local_file_manifest_eq(py_lfm, rs_lfm) py_rfm = py_lfm.to_remote(author=di, timestamp=ts) rs_rfm = rs_lfm.to_remote(author=di, timestamp=ts) _assert_file_manifest_eq(py_rfm, rs_rfm) py_lfm2 = _PyLocalFileManifest.from_remote(py_rfm) rs_lfm2 = LocalFileManifest.from_remote(rs_rfm) _assert_local_file_manifest_eq(py_lfm2, rs_lfm2, exclude_base=True, exclude_id=True) py_lfm2 = _PyLocalFileManifest.from_remote_with_local_context( remote=py_rfm, prevent_sync_pattern=r".+", local_manifest=py_lfm2, timestamp=ts) rs_lfm2 = LocalFileManifest.from_remote_with_local_context( remote=rs_rfm, prevent_sync_pattern=r".+", local_manifest=rs_lfm2, timestamp=ts) assert py_lfm.match_remote(py_rfm) == rs_lfm.match_remote(rs_rfm) py_lfm = py_lfm.evolve_and_mark_updated(timestamp=ts, size=4096) rs_lfm = rs_lfm.evolve_and_mark_updated(timestamp=ts, size=4096) _assert_local_file_manifest_eq(py_lfm, rs_lfm, exclude_base=True, exclude_id=True) with pytest.raises(TypeError) as excinfo: py_lfm.evolve_and_mark_updated(timestamp=ts, need_sync=True) assert str(excinfo.value) == "Unexpected keyword argument `need_sync`" with pytest.raises(TypeError) as excinfo: rs_lfm.evolve_and_mark_updated(timestamp=ts, need_sync=True) assert str(excinfo.value) == "Unexpected keyword argument `need_sync`" ei = EntryID.new() # Without blocksize py_lfm = _PyLocalFileManifest.new_placeholder(author=di, parent=ei, timestamp=ts) rs_lfm = LocalFileManifest.new_placeholder(author=di, parent=ei, timestamp=ts) _assert_local_file_manifest_eq(py_lfm, rs_lfm, exclude_base=True, exclude_id=True) # With blocksize py_lfm = _PyLocalFileManifest.new_placeholder(author=di, parent=ei, timestamp=ts, blocksize=1024) rs_lfm = LocalFileManifest.new_placeholder(author=di, parent=ei, timestamp=ts, blocksize=1024) _assert_local_file_manifest_eq(py_lfm, rs_lfm, exclude_base=True, exclude_id=True)
async def test_concurrent_devices_agree_on_user_manifest( running_backend, backend_data_binder, data_base_dir, user_fs_factory, coolorg, alice, alice2, with_speculative, ): KEY = SecretKey.generate() async def _switch_running_backend_offline(task_status): should_switch_online = trio.Event() backend_online = trio.Event() async def _switch_backend_online(): should_switch_online.set() await backend_online.wait() with running_backend.offline(): task_status.started(_switch_backend_online) await should_switch_online.wait() backend_online.set() # I call this "diagonal programming"... async with trio.open_nursery() as nursery: switch_back_online = await nursery.start( _switch_running_backend_offline) with freeze_time("2000-01-01"): if with_speculative != "both": await user_storage_non_speculative_init( data_base_dir=data_base_dir, device=alice) async with user_fs_factory( alice, data_base_dir=data_base_dir) as user_fs1: wksp1_id = await user_fs1.workspace_create(EntryName("wksp1")) with freeze_time("2000-01-02"): if with_speculative not in ("both", "alice2"): await user_storage_non_speculative_init( data_base_dir=data_base_dir, device=alice2) async with user_fs_factory( alice2, data_base_dir=data_base_dir) as user_fs2: wksp2_id = await user_fs2.workspace_create( EntryName("wksp2")) with freeze_time("2000-01-03"): # Only now the backend appear offline, this is to ensure each # userfs has created a user manifest in isolation await backend_data_binder.bind_organization( coolorg, alice, initial_user_manifest="not_synced") await backend_data_binder.bind_device( alice2, certifier=alice) await switch_back_online() # Sync user_fs2 first to ensure created_on field is # kept even if further syncs have an earlier value with freeze_time("2000-01-04"): await user_fs2.sync() with freeze_time("2000-01-05"): await user_fs1.sync() with freeze_time("2000-01-06"): await user_fs2.sync() # Now, both user fs should have the same view on data expected_workspaces_entries = ( WorkspaceEntry( name=EntryName("wksp1"), id=wksp1_id, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 1), role_cached_on=datetime(2000, 1, 1), role=WorkspaceRole.OWNER, ), WorkspaceEntry( name=EntryName("wksp2"), id=wksp2_id, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 2), role_cached_on=datetime(2000, 1, 2), role=WorkspaceRole.OWNER, ), ) expected_user_manifest = LocalUserManifest( base=UserManifest( id=alice.user_manifest_id, version=2, timestamp=datetime(2000, 1, 5), author=alice.device_id, created=datetime(2000, 1, 2), updated=datetime(2000, 1, 2), last_processed_message=0, workspaces=expected_workspaces_entries, ), need_sync=False, updated=datetime(2000, 1, 2), last_processed_message=0, workspaces=expected_workspaces_entries, speculative=False, ) user_fs1_manifest = user_fs1.get_user_manifest() user_fs2_manifest = user_fs2.get_user_manifest() # We use to use ANY for the "key" argument in expected_user_manifest, # so that we could compare the two instances safely. Sadly, ANY doesn't # play nicely with the Rust bindings, so we instead update the instances # to change the key. user_fs1_manifest = user_fs1_manifest.evolve( workspaces=tuple( w.evolve(key=KEY) for w in user_fs1_manifest.workspaces), base=user_fs1_manifest.base.evolve( workspaces=tuple( w.evolve(key=KEY) for w in user_fs1_manifest.base.workspaces)), ) user_fs2_manifest = user_fs2_manifest.evolve( workspaces=tuple( w.evolve(key=KEY) for w in user_fs2_manifest.workspaces), base=user_fs2_manifest.base.evolve( workspaces=tuple( w.evolve(key=KEY) for w in user_fs2_manifest.base.workspaces)), ) assert user_fs1_manifest == expected_user_manifest assert user_fs2_manifest == expected_user_manifest
async def claim_device( organization_addr: BackendOrganizationAddr, new_device_id: DeviceID, token: str, keepalive: Optional[int] = None, ) -> LocalDevice: """ Raises: InviteClaimError InviteClaimBackendOfflineError InviteClaimValidationError InviteClaimPackingError InviteClaimCryptoError """ device_signing_key = SigningKey.generate() answer_private_key = PrivateKey.generate() try: async with backend_anonymous_cmds_factory(organization_addr, keepalive=keepalive) as cmds: # 1) Retrieve invitation creator try: invitation_creator_user, invitation_creator_device = await get_device_invitation_creator( cmds, organization_addr.root_verify_key, new_device_id) except RemoteDevicesManagerBackendOfflineError as exc: raise InviteClaimBackendOfflineError(str(exc)) from exc except RemoteDevicesManagerError as exc: raise InviteClaimError( f"Cannot retrieve invitation creator: {exc}") from exc # 2) Generate claim info for invitation creator try: encrypted_claim = DeviceClaimContent( token=token, device_id=new_device_id, verify_key=device_signing_key.verify_key, answer_public_key=answer_private_key.public_key, ).dump_and_encrypt_for( recipient_pubkey=invitation_creator_user.public_key) except DataError as exc: raise InviteClaimError( f"Cannot generate device claim message: {exc}") from exc # 3) Send claim rep = await cmds.device_claim(new_device_id, encrypted_claim) if rep["status"] != "ok": raise InviteClaimError(f"Claim request error: {rep}") # 4) Verify device certificate try: DeviceCertificateContent.verify_and_load( rep["device_certificate"], author_verify_key=invitation_creator_device.verify_key, expected_author=invitation_creator_device.device_id, expected_device=new_device_id, ) except DataError as exc: raise InviteClaimCryptoError(str(exc)) from exc try: answer = DeviceClaimAnswerContent.decrypt_and_load_for( rep["encrypted_answer"], recipient_privkey=answer_private_key) except DataError as exc: raise InviteClaimCryptoError( f"Cannot decrypt device claim answer: {exc}") from exc except BackendNotAvailable as exc: raise InviteClaimBackendOfflineError(str(exc)) from exc except BackendConnectionError as exc: raise InviteClaimError(f"Cannot claim device: {exc}") from exc return LocalDevice( organization_addr=organization_addr, device_id=new_device_id, signing_key=device_signing_key, private_key=answer.private_key, is_admin=invitation_creator_user.is_admin, user_manifest_id=answer.user_manifest_id, user_manifest_key=answer.user_manifest_key, local_symkey=SecretKey.generate(), )
def test_invite_user_data(): from parsec.api.data.invite import _RsInviteUserData, InviteUserData, _PyInviteUserData assert InviteUserData is _RsInviteUserData dl = DeviceLabel("label") hh = HumanHandle("*****@*****.**", "Hubert Farnsworth") pk = PrivateKey.generate() sik = SigningKey.generate() sek = SecretKey.generate() py_iud = _PyInviteUserData( requested_device_label=dl, requested_human_handle=hh, public_key=pk.public_key, verify_key=sik.verify_key, ) rs_iud = InviteUserData( requested_device_label=dl, requested_human_handle=hh, public_key=pk.public_key, verify_key=sik.verify_key, ) assert rs_iud.requested_device_label.str == py_iud.requested_device_label.str assert str(rs_iud.requested_human_handle) == str( py_iud.requested_human_handle) rs_encrypted = rs_iud.dump_and_encrypt(key=sek) py_encrypted = py_iud.dump_and_encrypt(key=sek) # Decrypt Rust-encrypted with Rust rs_iud2 = InviteUserData.decrypt_and_load(rs_encrypted, sek) assert rs_iud.requested_device_label.str == rs_iud2.requested_device_label.str assert str(rs_iud.requested_human_handle) == str( rs_iud2.requested_human_handle) # Decrypt Python-encrypted with Python rs_iud3 = InviteUserData.decrypt_and_load(py_encrypted, sek) assert rs_iud.requested_device_label.str == rs_iud3.requested_device_label.str assert str(rs_iud.requested_human_handle) == str( rs_iud3.requested_human_handle) # Decrypt Rust-encrypted with Python py_iud2 = _PyInviteUserData.decrypt_and_load(rs_encrypted, sek) assert rs_iud.requested_device_label.str == py_iud2.requested_device_label.str assert str(rs_iud.requested_human_handle) == str( py_iud2.requested_human_handle) # With requested_human_handle and requested_device_label as None py_iud = _PyInviteUserData( requested_device_label=None, requested_human_handle=None, public_key=pk.public_key, verify_key=sik.verify_key, ) rs_iud = InviteUserData( requested_device_label=None, requested_human_handle=None, public_key=pk.public_key, verify_key=sik.verify_key, ) assert py_iud.requested_device_label is None assert rs_iud.requested_device_label is None assert py_iud.requested_human_handle is None assert rs_iud.requested_human_handle is None
def test_invite_device_confirmation(): from parsec.api.data.invite import ( _RsInviteDeviceConfirmation, InviteDeviceConfirmation, _PyInviteDeviceConfirmation, ) assert InviteDeviceConfirmation is _RsInviteDeviceConfirmation di = DeviceID("a@b") dl = DeviceLabel("label") hh = HumanHandle("*****@*****.**", "Hubert Farnsworth") profile = UserProfile.STANDARD pk = PrivateKey.generate() umi = EntryID.new() umk = SecretKey.generate() sk = SigningKey.generate() vk = sk.verify_key sek = SecretKey.generate() py_idc = _PyInviteDeviceConfirmation( device_id=di, device_label=dl, human_handle=hh, profile=profile, private_key=pk, user_manifest_id=umi, user_manifest_key=umk, root_verify_key=vk, ) rs_idc = InviteDeviceConfirmation( device_id=di, device_label=dl, human_handle=hh, profile=profile, private_key=pk, user_manifest_id=umi, user_manifest_key=umk, root_verify_key=vk, ) assert rs_idc.device_label.str == py_idc.device_label.str assert str(rs_idc.human_handle) == str(py_idc.human_handle) assert rs_idc.device_id.str == py_idc.device_id.str assert rs_idc.profile == py_idc.profile assert rs_idc.user_manifest_id.hex == py_idc.user_manifest_id.hex rs_encrypted = rs_idc.dump_and_encrypt(key=sek) py_encrypted = py_idc.dump_and_encrypt(key=sek) # Decrypt Rust-encrypted with Rust rs_idc2 = InviteDeviceConfirmation.decrypt_and_load(rs_encrypted, sek) assert rs_idc.device_label.str == rs_idc2.device_label.str assert str(rs_idc.human_handle) == str(rs_idc2.human_handle) assert rs_idc.device_id.str == rs_idc2.device_id.str assert rs_idc.profile == rs_idc2.profile assert rs_idc.user_manifest_id.hex == rs_idc2.user_manifest_id.hex # Decrypt Python-encrypted with Python rs_idc3 = InviteDeviceConfirmation.decrypt_and_load(py_encrypted, sek) assert rs_idc.device_label.str == rs_idc3.device_label.str assert str(rs_idc.human_handle) == str(rs_idc3.human_handle) assert rs_idc.device_id.str == rs_idc3.device_id.str assert rs_idc.profile == rs_idc3.profile assert rs_idc.user_manifest_id.hex == rs_idc3.user_manifest_id.hex # Decrypt Rust-encrypted with Python py_idc2 = _PyInviteDeviceConfirmation.decrypt_and_load(rs_encrypted, sek) assert rs_idc.device_label.str == py_idc2.device_label.str assert str(rs_idc.human_handle) == str(py_idc2.human_handle) assert rs_idc.device_id.str == py_idc2.device_id.str assert rs_idc.profile == py_idc2.profile assert rs_idc.user_manifest_id.hex == rs_idc2.user_manifest_id.hex # With human_handle and device_label as None py_idc = _PyInviteDeviceConfirmation( device_id=di, device_label=None, human_handle=None, profile=profile, private_key=pk, user_manifest_id=umi, user_manifest_key=umk, root_verify_key=vk, ) rs_idc = InviteDeviceConfirmation( device_id=di, device_label=None, human_handle=None, profile=profile, private_key=pk, user_manifest_id=umi, user_manifest_key=umk, root_verify_key=vk, ) assert py_idc.device_label is None assert rs_idc.device_label is None assert py_idc.human_handle is None assert rs_idc.human_handle is None
def test_local_workspace_manifest(): from parsec.core.types.manifest import ( _RsLocalWorkspaceManifest, LocalWorkspaceManifest, _PyLocalWorkspaceManifest, ) assert LocalWorkspaceManifest is _RsLocalWorkspaceManifest def _assert_local_workspace_manifest_eq(py, rs, exclude_base=False, exclude_id=False): assert isinstance(py, _PyLocalWorkspaceManifest) assert isinstance(rs, _RsLocalWorkspaceManifest) if not exclude_base: assert py.base == rs.base if not exclude_id: assert py.id == rs.id assert py.need_sync == rs.need_sync assert py.updated == rs.updated assert py.speculative == rs.speculative assert len(py.children) == len(rs.children) assert isinstance(rs.children, type( py.children)), "Rust type is {}, should be {}".format( type(rs.children), type(py.children)) assert all( isinstance(name1, EntryName) and isinstance(id1, EntryID) and name1 == name2 and id1 == id2 for ((name1, id1), (name2, id2)) in zip( sorted(py.children.items()), sorted(rs.children.items()))) assert len(py.local_confinement_points) == len( rs.local_confinement_points) assert py.local_confinement_points == rs.local_confinement_points assert len(py.remote_confinement_points) == len( rs.remote_confinement_points) assert py.remote_confinement_points == rs.remote_confinement_points def _assert_workspace_manifest_eq(py, rs): assert py.author == rs.author assert py.version == rs.version assert py.timestamp == rs.timestamp assert py.created == rs.created assert py.updated == rs.updated assert py.children == rs.children kwargs = { "base": WorkspaceManifest( author=DeviceID("user@device"), id=EntryID.new(), version=42, timestamp=pendulum.now(), created=pendulum.now(), updated=pendulum.now(), children={EntryName("file1.txt"): EntryID.new()}, ), "need_sync": True, "updated": pendulum.now(), "children": { EntryName("wksp2"): EntryID.new() }, "local_confinement_points": frozenset({EntryID.new()}), "remote_confinement_points": frozenset({EntryID.new()}), "speculative": True, } py_lwm = _PyLocalWorkspaceManifest(**kwargs) rs_lwm = LocalWorkspaceManifest(**kwargs) _assert_local_workspace_manifest_eq(py_lwm, rs_lwm) kwargs = { "base": kwargs["base"].evolve( **{ "author": DeviceID("a@b"), "id": EntryID.new(), "version": 1337, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "children": { EntryName("file2.mp4"): EntryID.new() }, }), "need_sync": False, "updated": pendulum.now(), "children": { EntryName("wksp1"): EntryID.new() }, "local_confinement_points": frozenset({EntryID.new()}), "remote_confinement_points": frozenset({EntryID.new()}), "speculative": False, } py_lwm = py_lwm.evolve(**kwargs) rs_lwm = rs_lwm.evolve(**kwargs) _assert_local_workspace_manifest_eq(py_lwm, rs_lwm) sk = SecretKey.generate() py_enc = py_lwm.dump_and_encrypt(sk) rs_enc = py_lwm.dump_and_encrypt(sk) # Decrypt rust encrypted with Python and vice versa lwm1 = _PyLocalWorkspaceManifest.decrypt_and_load(rs_enc, sk) lwm2 = LocalWorkspaceManifest.decrypt_and_load(py_enc, sk) assert isinstance(lwm1, LocalWorkspaceManifest) assert isinstance(lwm2, LocalWorkspaceManifest) assert lwm1 == lwm2 assert py_lwm.to_stats() == rs_lwm.to_stats() assert py_lwm.asdict() == rs_lwm.asdict() ts = pendulum.now() ei = EntryID.new() di = DeviceID("a@b") # With optional parameters py_lwm = _PyLocalWorkspaceManifest.new_placeholder(author=di, id=ei, timestamp=ts, speculative=True) rs_lwm = LocalWorkspaceManifest.new_placeholder(author=di, id=ei, timestamp=ts, speculative=True) _assert_local_workspace_manifest_eq(py_lwm, rs_lwm, exclude_base=True, exclude_id=True) # Without optional parameters py_lwm = _PyLocalWorkspaceManifest.new_placeholder(author=di, timestamp=ts) rs_lwm = LocalWorkspaceManifest.new_placeholder(author=di, timestamp=ts) _assert_local_workspace_manifest_eq(py_lwm, rs_lwm, exclude_base=True, exclude_id=True) py_rwm = py_lwm.to_remote(author=di, timestamp=ts) rs_rwm = rs_lwm.to_remote(author=di, timestamp=ts) _assert_workspace_manifest_eq(py_rwm, rs_rwm) children = {EntryName("wksp1"): EntryID.new()} py_lwm = py_lwm.evolve_and_mark_updated(timestamp=ts, children=children) rs_lwm = rs_lwm.evolve_and_mark_updated(timestamp=ts, children=children) _assert_local_workspace_manifest_eq(py_lwm, rs_lwm, exclude_base=True, exclude_id=True) with pytest.raises(TypeError) as excinfo: py_lwm.evolve_and_mark_updated(timestamp=ts, need_sync=True) assert str(excinfo.value) == "Unexpected keyword argument `need_sync`" with pytest.raises(TypeError) as excinfo: rs_lwm.evolve_and_mark_updated(timestamp=ts, need_sync=True) assert str(excinfo.value) == "Unexpected keyword argument `need_sync`" py_lwm2 = _PyLocalWorkspaceManifest.from_remote(py_rwm, r".+") rs_lwm2 = LocalWorkspaceManifest.from_remote(rs_rwm, r".+") _assert_local_workspace_manifest_eq(py_lwm2, rs_lwm2, exclude_base=True, exclude_id=True) py_lwm2 = _PyLocalWorkspaceManifest.from_remote_with_local_context( remote=py_rwm, prevent_sync_pattern=r".+", local_manifest=py_lwm2, timestamp=ts) rs_lwm2 = LocalWorkspaceManifest.from_remote_with_local_context( remote=rs_rwm, prevent_sync_pattern=r".+", local_manifest=rs_lwm2, timestamp=ts) assert py_lwm.match_remote(py_rwm) == rs_lwm.match_remote(rs_rwm)
from pendulum import datetime from parsec.api.data import UserManifest, EntryID, EntryName from parsec.crypto import SecretKey from parsec.core.fs.remote_loader import MANIFEST_STAMP_AHEAD_US from parsec.core.types import ( WorkspaceEntry, WorkspaceRole, LocalUserManifest, LocalWorkspaceManifest, ) from parsec.core.fs import FSWorkspaceNotFoundError, FSBackendOfflineError from tests.common import freeze_time KEY = SecretKey.generate() def _update_user_manifest_key(um): return um.evolve( base=um.base.evolve(workspaces=tuple( w.evolve(key=KEY) for w in um.base.workspaces)), workspaces=tuple(w.evolve(key=KEY) for w in um.workspaces), ) @pytest.mark.trio async def test_get_manifest(alice_user_fs): um = alice_user_fs.get_user_manifest() assert um.base_version == 1 assert not um.need_sync
def test_local_user_manifest(): from parsec.core.types.manifest import ( _RsLocalUserManifest, LocalUserManifest, _PyLocalUserManifest, ) assert LocalUserManifest is _RsLocalUserManifest def _assert_local_user_manifest_eq(py, rs, exclude_base=False, exclude_id=False): assert isinstance(py, _PyLocalUserManifest) assert isinstance(rs, _RsLocalUserManifest) if not exclude_base: assert py.base == rs.base if not exclude_id: assert py.id == rs.id assert py.need_sync == rs.need_sync assert py.updated == rs.updated assert py.last_processed_message == rs.last_processed_message assert len(py.workspaces) == len(rs.workspaces) assert isinstance(rs.workspaces, type(py.workspaces)) assert py.workspaces == rs.workspaces assert py.speculative == rs.speculative def _assert_user_manifest_eq(py, rs): assert py.author == rs.author assert py.version == rs.version assert py.timestamp == rs.timestamp assert py.created == rs.created assert py.updated == rs.updated assert py.last_processed_message == rs.last_processed_message assert py.workspaces == rs.workspaces assert isinstance(rs.workspaces, type(py.workspaces)) kwargs = { "base": UserManifest( author=DeviceID("user@device"), id=EntryID.new(), version=42, timestamp=pendulum.now(), created=pendulum.now(), updated=pendulum.now(), last_processed_message=0, workspaces=(WorkspaceEntry.new(EntryName("user"), pendulum.now()), ), ), "need_sync": True, "updated": pendulum.now(), "last_processed_message": 0, "workspaces": (), "speculative": True, } py_lum = _PyLocalUserManifest(**kwargs) rs_lum = LocalUserManifest(**kwargs) _assert_local_user_manifest_eq(py_lum, rs_lum) kwargs = { "base": kwargs["base"].evolve( **{ "author": DeviceID("a@b"), "id": EntryID.new(), "version": 1337, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "last_processed_message": 1, "workspaces": ( WorkspaceEntry.new(EntryName("user"), pendulum.now()), ), }), "need_sync": False, "updated": pendulum.now(), "last_processed_message": 1, "workspaces": (WorkspaceEntry.new(EntryName("wk"), pendulum.now()), ), "speculative": False, } py_lum = py_lum.evolve(**kwargs) rs_lum = rs_lum.evolve(**kwargs) _assert_local_user_manifest_eq(py_lum, rs_lum) sk = SecretKey.generate() py_enc = py_lum.dump_and_encrypt(sk) rs_enc = py_lum.dump_and_encrypt(sk) # Decrypt rust encrypted with Python and vice versa lum1 = _PyLocalUserManifest.decrypt_and_load(rs_enc, sk) lum2 = LocalUserManifest.decrypt_and_load(py_enc, sk) assert isinstance(lum1, LocalUserManifest) assert isinstance(lum2, LocalUserManifest) assert lum1 == lum2 assert py_lum.to_stats() == rs_lum.to_stats() assert py_lum.asdict() == rs_lum.asdict() ts = pendulum.now() ei = EntryID.new() di = DeviceID("a@b") # With optional parameters py_lum = _PyLocalUserManifest.new_placeholder(author=di, id=ei, timestamp=ts, speculative=True) rs_lum = LocalUserManifest.new_placeholder(author=di, id=ei, timestamp=ts, speculative=True) _assert_local_user_manifest_eq(py_lum, rs_lum, exclude_base=True, exclude_id=True) # Without optional parameters py_lum = _PyLocalUserManifest.new_placeholder(author=di, timestamp=ts) rs_lum = LocalUserManifest.new_placeholder(author=di, timestamp=ts) _assert_local_user_manifest_eq(py_lum, rs_lum, exclude_base=True, exclude_id=True) py_rum = py_lum.to_remote(author=di, timestamp=ts) rs_rum = rs_lum.to_remote(author=di, timestamp=ts) _assert_user_manifest_eq(py_rum, rs_rum) assert py_lum.match_remote(py_rum) == rs_lum.match_remote(rs_rum) py_lum2 = _PyLocalUserManifest.from_remote(py_rum) rs_lum2 = LocalUserManifest.from_remote(rs_rum) _assert_local_user_manifest_eq(py_lum2, rs_lum2, exclude_base=True, exclude_id=True) _PyLocalUserManifest.from_remote_with_local_context( remote=py_rum, prevent_sync_pattern=r".+", local_manifest=py_lum2, timestamp=ts) LocalUserManifest.from_remote_with_local_context( remote=rs_rum, prevent_sync_pattern=r".+", local_manifest=rs_lum2, timestamp=ts)
async def test_share_workspace_then_conflict_on_rights( running_backend, alice_user_fs, alice2_user_fs, bob_user_fs, alice, alice2, bob, first_to_sync): # Bob shares a workspace with Alice... with freeze_time("2000-01-01"): wid = await bob_user_fs.workspace_create(EntryName("w")) with freeze_time("2000-01-02"): await bob_user_fs.workspace_share(wid, alice.user_id, WorkspaceRole.MANAGER) # ...but only Alice's first device get the information with freeze_time("2000-01-03"): await alice_user_fs.process_last_messages() # Now Bob change the sharing rights... with freeze_time("2000-01-04"): await bob_user_fs.workspace_share(wid, alice.user_id, WorkspaceRole.CONTRIBUTOR) # ...this time it's Alice's second device which get the info with freeze_time("2000-01-05"): # Note we will process the 2 sharing messages bob sent us, this # will attribute role_cached_on to the first message timestamp even # if we cache the second message role... await alice2_user_fs.process_last_messages() if first_to_sync == "alice": first = alice_user_fs second = alice2_user_fs synced_timestamp = datetime(2000, 1, 7) synced_version = 3 else: first = alice2_user_fs second = alice_user_fs synced_timestamp = datetime(2000, 1, 6) synced_version = 2 # Finally Alice devices try to reconciliate with freeze_time("2000-01-06"): await first.sync() with freeze_time("2000-01-07"): await second.sync() # Resync first device to get changes from the 2nd with freeze_time("2000-01-08"): await first.sync() KEY = SecretKey.generate() am = alice_user_fs.get_user_manifest() a2m = alice2_user_fs.get_user_manifest() expected_remote = UserManifest( author=alice2.device_id, timestamp=synced_timestamp, id=alice2.user_manifest_id, version=synced_version, created=datetime(2000, 1, 1), updated=datetime(2000, 1, 5), last_processed_message=2, workspaces=(WorkspaceEntry( name=EntryName("w"), id=wid, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 1), role_cached_on=datetime(2000, 1, 5), role=WorkspaceRole.CONTRIBUTOR, ), ), ) expected = LocalUserManifest( base=expected_remote, need_sync=False, updated=expected_remote.updated, last_processed_message=expected_remote.last_processed_message, workspaces=expected_remote.workspaces, speculative=False, ) am = am.evolve( base=am.base.evolve(workspaces=tuple( w.evolve(key=KEY) for w in am.base.workspaces)), workspaces=tuple(w.evolve(key=KEY) for w in am.workspaces), ) a2m = a2m.evolve( base=a2m.base.evolve(workspaces=tuple( w.evolve(key=KEY) for w in a2m.base.workspaces)), workspaces=tuple(w.evolve(key=KEY) for w in a2m.workspaces), ) assert am == expected assert a2m == expected a_w = alice_user_fs.get_workspace(wid) a2_w = alice2_user_fs.get_workspace(wid) a_w_stat = await a_w.path_info("/") a2_w_stat = await a2_w.path_info("/") a_w_entry = a_w.get_workspace_entry() a2_w_entry = a2_w.get_workspace_entry() assert a_w_stat == { "type": "folder", "is_placeholder": False, "id": wid, "created": ANY, "updated": ANY, "base_version": 1, "need_sync": False, "children": [], "confinement_point": None, } assert a_w_stat == a2_w_stat a_w_entry = a_w_entry.evolve(key=KEY) assert a_w_entry == WorkspaceEntry( name=EntryName("w"), id=wid, key=KEY, encryption_revision=1, encrypted_on=datetime(2000, 1, 1), role_cached_on=datetime(2000, 1, 5), role=WorkspaceRole.CONTRIBUTOR, ) a2_w_entry = a2_w_entry.evolve(key=KEY) assert a2_w_entry == a_w_entry
def test_file_manifest(): from parsec.api.data.manifest import _RsFileManifest, FileManifest, _PyFileManifest, BlockAccess assert FileManifest is _RsFileManifest def _assert_file_manifest_eq(py, rs): assert isinstance(py, _PyFileManifest) assert isinstance(rs, _RsFileManifest) assert py.author == rs.author assert py.id == rs.id assert py.parent == rs.parent assert py.version == rs.version assert py.size == rs.size assert py.blocksize == rs.blocksize assert py.timestamp == rs.timestamp assert py.created == rs.created assert py.updated == rs.updated assert len(py.blocks) == len(rs.blocks) assert all( isinstance(b2, BlockAccess) and b1.id == b2.id and b1.offset == b2.offset and b1.size == b2.size for (b1, b2) in zip(py.blocks, rs.blocks)) kwargs = { "author": DeviceID("user@device"), "id": EntryID.new(), "parent": EntryID.new(), "version": 42, "size": 1337, "blocksize": 64, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "blocks": (BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=0, size=1024, digest=HashDigest.from_data(b"a"), ), ), } py_fm = _PyFileManifest(**kwargs) rs_fm = FileManifest(**kwargs) _assert_file_manifest_eq(py_fm, rs_fm) kwargs = { "author": DeviceID("a@b"), "id": EntryID.new(), "parent": EntryID.new(), "version": 1337, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "blocks": (BlockAccess( id=BlockID.new(), key=SecretKey.generate(), offset=64, size=2048, digest=HashDigest.from_data(b"b"), ), ), } py_fm = py_fm.evolve(**kwargs) rs_fm = rs_fm.evolve(**kwargs) _assert_file_manifest_eq(py_fm, rs_fm)
def test_workspace_manifest(): from parsec.api.data.manifest import ( _RsWorkspaceManifest, WorkspaceManifest, _PyWorkspaceManifest, ) assert WorkspaceManifest is _RsWorkspaceManifest def _assert_workspace_manifest_eq(py, rs): assert isinstance(py, _PyWorkspaceManifest) assert isinstance(rs, _RsWorkspaceManifest) assert py.author == rs.author assert py.id == rs.id assert py.version == rs.version assert py.timestamp == rs.timestamp assert py.created == rs.created assert py.updated == rs.updated assert py.children == rs.children kwargs = { "author": DeviceID("user@device"), "id": EntryID.new(), "version": 42, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "children": { EntryName("file1.txt"): EntryID.new() }, } py_wm = _PyWorkspaceManifest(**kwargs) rs_wm = WorkspaceManifest(**kwargs) _assert_workspace_manifest_eq(py_wm, rs_wm) kwargs = { "author": DeviceID("a@b"), "id": EntryID.new(), "version": 1337, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "children": { EntryName("file2.mp4"): EntryID.new() }, } py_wm = py_wm.evolve(**kwargs) rs_wm = rs_wm.evolve(**kwargs) _assert_workspace_manifest_eq(py_wm, rs_wm) signing_key = SigningKey(b"a" * 32) secret_key = SecretKey.generate() py_signed_and_encrypted = py_wm.dump_sign_and_encrypt( signing_key, secret_key) rs_signed_and_encrypted = rs_wm.dump_sign_and_encrypt( signing_key, secret_key) wm1 = WorkspaceManifest.decrypt_verify_and_load(py_signed_and_encrypted, secret_key, signing_key.verify_key, py_wm.author, py_wm.timestamp) wm2 = _PyWorkspaceManifest.decrypt_verify_and_load(rs_signed_and_encrypted, secret_key, signing_key.verify_key, py_wm.author, py_wm.timestamp) assert isinstance(wm1, WorkspaceManifest) assert isinstance(wm2, WorkspaceManifest) assert wm1 == wm2
def test_user_manifest(): from parsec.api.data.manifest import ( _RsUserManifest, UserManifest, _PyUserManifest, WorkspaceEntry, ) assert UserManifest is _RsUserManifest def _assert_user_manifest_eq(py, rs): assert isinstance(py, _PyUserManifest) assert isinstance(rs, _RsUserManifest) assert py.author == rs.author assert py.version == rs.version assert py.id == rs.id assert py.timestamp == rs.timestamp assert py.created == rs.created assert py.updated == rs.updated assert py.last_processed_message == rs.last_processed_message assert py.workspaces == rs.workspaces kwargs = { "author": DeviceID("user@device"), "id": EntryID.new(), "version": 42, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "last_processed_message": 4, "workspaces": [ WorkspaceEntry( name=EntryName("name"), id=EntryID.new(), key=SecretKey.generate(), encryption_revision=1, encrypted_on=pendulum.now(), role_cached_on=pendulum.now(), role=RealmRole.OWNER, ) ], } py_um = _PyUserManifest(**kwargs) rs_um = UserManifest(**kwargs) _assert_user_manifest_eq(py_um, rs_um) kwargs = { "author": DeviceID("a@b"), "id": EntryID.new(), "version": 1337, "timestamp": pendulum.now(), "created": pendulum.now(), "updated": pendulum.now(), "last_processed_message": 7, "workspaces": [ WorkspaceEntry( name=EntryName("name"), id=EntryID.new(), key=SecretKey.generate(), encryption_revision=1, encrypted_on=pendulum.now(), role_cached_on=pendulum.now(), role=RealmRole.OWNER, ), WorkspaceEntry( name=EntryName("other_name"), id=EntryID.new(), key=SecretKey.generate(), encryption_revision=2, encrypted_on=pendulum.now(), role_cached_on=pendulum.now(), role=RealmRole.CONTRIBUTOR, ), ], } py_wm = py_um.evolve(**kwargs) rs_wm = rs_um.evolve(**kwargs) _assert_user_manifest_eq(py_wm, rs_wm)