async def _decrypt_metadata(self, encrypted_metadata, user_vault_key): import zipfile from io import BytesIO from syncrypt.pipes import SnappyDecompress import umsgpack # decrypt package export_pipe = Once(user_vault_key) \ >> DecryptRSA_PKCS1_OAEP(self.identity.private_key) package_info = await export_pipe.readall() zipf = zipfile.ZipFile(BytesIO(package_info), 'r') vault_public_key = zipf.read('.vault/id_rsa.pub') vault_key = zipf.read('.vault/id_rsa') vault_identity = Identity.from_key(vault_public_key, self.config, private_key=vault_key) sink = Once(encrypted_metadata) \ >> DecryptRSA_PKCS1_OAEP(vault_identity.private_key) \ >> SnappyDecompress() serialized_metadata = await sink.readall() return umsgpack.unpackb(serialized_metadata)
async def test_rsa_pipe_pkcs1_oaep(local_vault, local_app): vault = local_vault bundle = BundleManager(local_app).get_bundle_for_relpath( 'hello.txt', local_vault) for i in (2, 10, 1242): input = b'a' * i + b'b' * int(i / 2) pipe = Once(input) \ >> EncryptRSA_PKCS1_OAEP(bundle.vault.identity.public_key) intermediate = await pipe.readall() pipe = Once(intermediate) \ >> DecryptRSA_PKCS1_OAEP(bundle.vault.identity.private_key) output = await pipe.readall() assert input == output
async def test_url_put_chunked(asyncio_loop): data = b'Ifooqu1oong3phie2iHeefohb0eej1oo'\ b'x2iJei7aijawae9jah7Xa7ai7aaFa7za'\ b'e4ieVu9kooY3Ohngavae0hie6ahkee1a'\ b'cej6koofeiwaeWahmoo9ogh0aeshaeme' times = 10 # repeat test data this many times chunksize = 112 chunks = math.ceil((len(data) * times * 1.0) / chunksize) urls = [ 'https://httpbin.org/put?chunk={0}'.format(c) for c in range(chunks) ] data_pipe = Once(data) >> Repeat(times) writer = data_pipe >> ChunkedURLWriter( urls, chunksize=chunksize, total_size=len(data) * 10) complete_data = '' while True: returned_content = (await writer.read()) if len(returned_content) == 0: break # The httpbin API will return a JSON object with the data. obj = json.loads(returned_content.decode('utf-8')) complete_data += obj['data'] await writer.close() assert writer.bytes_written == len(data) * times assert complete_data.encode('utf-8') == data * times
async def download(self, bundle): vault = self.vault assert vault is not None assert self.stream is not None self.logger.info('Downloading %s', bundle) # download key and file await self.write_term('download', bundle.store_hash) response = await self.read_term() if len(response) == 3: server_info = rewrite_atoms_dict(response[1]) server_info.update(**rewrite_atoms_dict(response[2])) url = None elif len(response) == 2 and isinstance( response[1], tuple) and response[1][0] == Atom('url'): response_obj = response[1] url = response_obj[1].decode() server_info = rewrite_atoms_dict(response_obj[2]) server_info.update(**rewrite_atoms_dict(response_obj[3])) else: raise Exception('xx') content_hash = server_info['content_hash'].decode() metadata = server_info['metadata'] file_size = server_info['size'] assert type(file_size) == int if url: self.logger.debug( 'Downloading content ({} bytes) from URL: {}'.format( file_size, url)) else: self.logger.debug( 'Downloading content ({} bytes) from stream.'.format( file_size)) # read content hash self.logger.debug('Content hash: %s', content_hash) await bundle.write_encrypted_metadata(Once(metadata)) await bundle.load_key() if url: stream_source = URLReader(url) else: stream_source = TrioStreamReader(self.stream) >> Limit(file_size) hash_ok = await vault.crypt_engine.write_encrypted_stream( bundle, stream_source, assert_hash=content_hash) if not hash_ok: # alert server of hash mismatch await self.write_term('invalid_content_hash', bundle.store_hash, vault.revision)
async def clone(self, vault_id, local_directory, async_init: bool = False): backend = await self.open_backend() logger.info('Retrieving encrypted key for vault %s (Fingerprint: %s)', vault_id, format_fingerprint(self.identity.get_fingerprint())) auth_token, package_info = await \ backend.get_user_vault_key(self.identity.get_fingerprint(), vault_id) # decrypt package export_pipe = Once(package_info) \ >> DecryptRSA_PKCS1_OAEP(self.identity.private_key) decrypted_package_info = await export_pipe.readall() original_vault = self.get_vault_by_path(local_directory) if original_vault: if original_vault.config.id == vault_id: logger.warning('Same vault already exists in the given location, continuing...') vault = original_vault else: raise VaultAlreadyExists(original_vault.folder) else: # There is no vault present, but we want to make sure that this folder is nonexistent or # empty: if os.path.exists(local_directory) and not is_empty(local_directory): raise FolderExistsAndIsNotEmpty(local_directory) vault = Vault.from_package_info(decrypted_package_info, local_directory, auth_token) if not async_init: await self.pull_vault(vault, full=True) await self.add_vault(vault, async_init=async_init) return vault
async def test_compression(): compressed = FileReader("tests/testbinaryvault/README.md") >> SnappyCompress() contents = await compressed.read() await compressed.close() assert len(contents) < 640 compressed = Once(contents) >> SnappyDecompress() contents = await compressed.read() await compressed.close() assert len(contents) == 640
async def generate_key(self): logger.debug('Generating key for %s', self) self.key = os.urandom(self.key_size) if not os.path.exists(os.path.dirname(self.path_metadata)): os.makedirs(os.path.dirname(self.path_metadata)) assert len(self.key) == self.key_size sink = Once(self.serialized_metadata) >> FileWriter(self.path_metadata) await sink.consume()
async def test_buffered_2(): stream = Once(b"ab") buffered = stream >> Repeat(24) >> Buffered(6) for i in range(8): contents = await buffered.read() assert contents == b"ab" * 3 contents = await buffered.read() assert contents == b""
async def test_plain(): stream = Once(b"0123456789abcdef") repeated = stream >> Repeat(3) contents = b"" while True: buf = await repeated.read() if len(buf) == 0: break contents += buf assert contents == b"0123456789abcdef0123456789abcdef0123456789abcdef"
def package_info(self): """ return a pipe that will contain the identity info such as private and public key """ memview = BytesIO() zipf = zipfile.ZipFile(memview, "w", zipfile.ZIP_DEFLATED) # include private and public key def include(f): zipf.write(f, arcname=os.path.basename(f)) include(self.id_rsa_path) include(self.id_rsa_pub_path) zipf.close() memview.seek(0) return Once(memview.read())
async def test_url_put(asyncio_loop): times = 3 # repeat test data this many times url = 'https://httpbin.org/put' data = b'Ifooqu1oong3phie2iHeefohb0eej1oo'\ b'x2iJei7aijawae9jah7Xa7ai7aaFa7za'\ b'e4ieVu9kooY3Ohngavae0hie6ahkee1a'\ b'cej6koofeiwaeWahmoo9ogh0aeshaeme' data_pipe = Once(data) >> Repeat(times) >> Buffered(50) writer = data_pipe >> URLWriter(url, len(data) * times) returned_content = await writer.readall() assert writer.bytes_written == len(data) * times # The httpbin API will return a JSON object with the data. obj = json.loads(returned_content.decode('utf-8')) assert obj['data'].encode('utf-8') == data * times
def package_info(self): """ return a pipe that will contain vault info such as id, private and public key """ memview = BytesIO() zipf = zipfile.ZipFile(memview, "w", zipfile.ZIP_DEFLATED) cloned_config = configparser.ConfigParser() cloned_config.read_dict(self.config._config) # include config but strip auth information if "remote" in cloned_config: for key in ("auth", "username", "password"): if key in cloned_config["remote"]: del cloned_config["remote"][key] # also vault info such as revision if "vault" in cloned_config: for key in ("revision",): if key in cloned_config["vault"]: del cloned_config["vault"][key] temp_config = StringIO() cloned_config.write(temp_config) temp_config.seek(0) zipf.writestr(".vault/config", temp_config.read().encode(self.config.encoding)) # include private and public key def include(f): zipf.write(f, arcname=os.path.relpath(f, self.folder)) include(self.identity.id_rsa_path) include(self.identity.id_rsa_pub_path) zipf.close() memview.seek(0) return Once(memview.read())
async def test_buffered(): stream = Once(b"ac") buffered = stream >> Repeat(24) >> Buffered(1024) contents = await buffered.read() assert contents == b"ac" * 24
async def apply(self, revision: Revision, vault: Vault): if inspect(vault).session: raise ValueError('Vault object is bound to a session') revision.assert_valid() # 1. Check preconditions for this to be a valid revision (current revision must be parent) if vault.revision != revision.parent_id: raise UnexpectedParentInRevision("Expected parent to be {0}, but is {1}"\ .format(revision.parent_id, vault.revision)) smokesignal.emit('pre_apply_revision', vault=vault, revision=revision) with store.session() as session: # 2. Check if signing user's key is in the user vault key list if revision.operation != RevisionOp.CreateVault: signer_key = self.app.user_vault_keys.find_key( vault, revision.user_fingerprint) if not signer_key: raise InvalidRevision( "Key {0} is not allowed to generate revisions for vault {1}" .format(revision.user_fingerprint, vault)) else: # CreateVault is the only operation that is allowed to provide its own key signer_key = UserVaultKey( vault_id=vault.id, user_id=revision.user_id, fingerprint=revision.user_fingerprint, public_key=revision.user_public_key, ) # 3. Verify revision signature revision.verify(signer_key.get_identity(self.app.config)) # 4. Based on the revision type, perform an action to our state of the vault logger.debug( "Applying %s (%s) to %s", revision.operation, revision.revision_id, vault.id, ) if revision.operation == RevisionOp.CreateVault: session.add(vault) session.add(signer_key) session.add( VaultUser(vault_id=vault.id, user_id=revision.user_id)) session.commit() elif revision.operation == RevisionOp.Upload: try: bundle = await self.app.bundles.get_bundle_by_hash( vault, revision.file_hash) session.delete(bundle) except FileNotFoundError: pass bundle = await self.create_bundle_from_revision( revision, vault) session.add(bundle) session.commit() revision.path = bundle.relpath elif revision.operation == RevisionOp.SetMetadata: await vault.write_encrypted_metadata( Once(revision.revision_metadata)) elif revision.operation == RevisionOp.RemoveFile: bundle = await self.app.bundles.get_bundle_by_hash( vault, revision.file_hash) session.delete(bundle) session.commit() revision.path = bundle.relpath elif revision.operation == RevisionOp.AddUser: self.app.vault_users.add(vault, revision.user_id) elif revision.operation == RevisionOp.RemoveUser: self.app.vault_users.remove(vault, revision.user_id) elif revision.operation == RevisionOp.AddUserKey: new_identity = Identity.from_key(revision.user_public_key, self.app.config) self.app.user_vault_keys.add(vault, revision.user_id, new_identity) elif revision.operation == RevisionOp.RemoveUserKey: new_identity = Identity.from_key(revision.user_public_key, self.app.config) self.app.user_vault_keys.remove(vault, revision.user_id, new_identity) else: raise NotImplementedError(revision.operation) # 5. Store the revision in config and db revision.local_vault_id = vault.id revision.creator_id = signer_key.user_id session.add(revision) session.commit() vault.revision_count = (session.query(Revision).filter( Revision.local_vault_id == vault.id).count()) if revision.operation in (RevisionOp.Upload, RevisionOp.RemoveFile): vault.file_count = (session.query(Bundle).filter( Bundle.vault_id == vault.id).count()) if revision.operation in (RevisionOp.CreateVault, RevisionOp.AddUser, RevisionOp.RemoveUser): vault.user_count = (session.query(VaultUser).filter( VaultUser.vault_id == vault.id).count()) vault.modification_date = revision.created_at logger.debug( "Vault state revision_count=%s file_count=%s user_count=%s", vault.revision_count, vault.file_count, vault.user_count) # vault.revision = revision.id session.add(vault) vault.update_revision(revision) session.commit() smokesignal.emit('post_apply_revision', vault=vault, revision=revision)
async def decrypt_metadata(self, metadata): sink = self.encrypted_metadata_decoder(Once(metadata)) serialized_metadata = await sink.read() return umsgpack.unpackb(serialized_metadata)
def encrypted_metadata_reader(self): return ( Once(self.serialized_metadata) >> SnappyCompress() >> EncryptRSA_PKCS1_OAEP(self.identity.public_key) )
async def test_once(): stream = Once(b"0123456789abcdef") contents = await stream.read() assert contents == b"0123456789abcdef" contents = await stream.read() assert contents == b""
async def add_bundle_by_metadata(self, store_hash, metadata): bundle = Bundle(None, vault=self, store_hash=store_hash) await bundle.write_encrypted_metadata(Once(metadata)) return bundle
async def test_count_2(): stream = Once(b"abcdefgh") counter = stream >> Repeat(241) >> Buffered(100) >> Count() await counter.consume() assert counter.count == 241 * 8