def test_discard(self, file): content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] # Original content chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000004>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) # Already synchronized sequence = [ (EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': blob, 'version': 1 })), (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))), (EBlockDelete('5678'), noop), (EBlockDelete('6789'), noop), (EVlobDelete('1234'), conste(VlobNotFound('Block not found.')) ) # TODO vlob OR block exceptin ] ret = perform_sequence(sequence, file.discard()) assert ret is False # Not already synchronized file.dirty = True file.version = 0 sequence = [(EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': blob, 'version': 1 })), (EBlockDelete('4567'), noop), (EBlockDelete('5678'), noop), (EBlockDelete('6789'), noop), (EVlobDelete('1234'), noop)] ret = perform_sequence(sequence, file.discard()) assert ret is True assert file.dirty is False
def test_reencrypt(self, file): old_vlob = file.get_vlob() content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] # Original content chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) sequence = [ (EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': blob, 'version': 1 })), ( EVlobCreate(blob), # TODO check re-encryption const({ 'id': '2345', 'read_trust_seed': '21', 'write_trust_seed': '22' })) ] ret = perform_sequence(sequence, file.reencrypt()) assert ret is None file.reencrypt() new_vlob = file.get_vlob() for property in old_vlob.keys(): assert old_vlob[property] != new_vlob[property]
def test_perform_file_create(app, alice_identity, file): vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'} block_id = '4567' # Already exist blob = [{ 'blocks': [{ 'block': block_id, 'digest': digest(b''), 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) eff = app.perform_file_create(EFileCreate('/foo')) sequence = [ (EBlockCreate(''), const(block_id)), (EVlobCreate(blob), const(vlob)), (EIdentityGet(), const(alice_identity)), (EVlobRead(vlob['id'], vlob['read_trust_seed'], 1), const({ 'id': vlob['id'], 'blob': blob, 'version': 1 })), (EBlockDelete(block_id), noop), (EVlobDelete(vlob['id']), noop), ] with pytest.raises(ManifestError): perform_sequence(sequence, eff)
def test_perform_file_history(app, file, alice_identity): vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'} blob = [{ 'blocks': [{ 'block': '4567', 'digest': digest(b''), 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) eff = app.perform_file_history(EFileHistory('/foo', 1, 1)) sequence = [ (EIdentityGet(), const(alice_identity)), (EIdentityGet(), const(alice_identity)), (EVlobRead(vlob['id'], vlob['read_trust_seed']), const({ 'id': vlob['id'], 'blob': blob, 'version': 1 })), (EVlobList(), const([])), ] perform_sequence(sequence, eff)
def test_perform_undelete(app, alice_identity, file): vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'} blob = [{ 'blocks': [{ 'block': '4567', 'digest': digest(b''), 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) eff = app.perform_delete(EDelete('/foo')) sequence = [ (EIdentityGet(), const(alice_identity)), (EVlobRead(vlob['id'], vlob['read_trust_seed']), const({ 'id': vlob['id'], 'blob': blob, 'version': 1 })), (EVlobList(), const([])), (EVlobRead(vlob['id'], vlob['read_trust_seed'], 1), const({ 'id': vlob['id'], 'blob': blob, 'version': 1 })), (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))), (EVlobDelete('2345'), conste(VlobNotFound('Vlob not found.'))) ] ret = perform_sequence(sequence, eff) eff = app.perform_undelete(EUndelete('2345')) sequence = [(EIdentityGet(), const(alice_identity))] ret = perform_sequence(sequence, eff) assert ret is None
def test_build_file_blocks(self, file, length): file.dirty = False block_size = 4096 content = b''.join( [str(random.randint(1, 9)).encode() for i in range(0, length)]) chunks = [ content[i:i + block_size] for i in range(0, len(content), block_size) ] if not chunks: chunks = [b''] sequence = [] for chunk in chunks: sequence.append((EBlockCreate(to_jsonb64(chunk)), const('4567'))) blocks = perform_sequence(sequence, file._build_file_blocks(content)) assert sorted(blocks.keys()) == ['blocks', 'key'] assert isinstance(blocks['blocks'], list) required_blocks = int(len(content) / block_size) if not len(content) or len(content) % block_size: required_blocks += 1 assert len(blocks['blocks']) == required_blocks for index, block in enumerate(blocks['blocks']): assert sorted(block.keys()) == ['block', 'digest', 'size'] assert block['block'] length = len(content) - index * block_size length = block_size if length > block_size else length assert block['size'] == length assert block['digest'] == digest(content[index * block_size:index + 1 * block_size]) assert file.dirty is True
def test_perform_dustbin_show(app, alice_identity, file): with freeze_time('2012-01-01') as frozen_datetime: vlob = { 'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43' } blob = [{ 'blocks': [{ 'block': '4567', 'digest': digest(b''), 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) eff = app.perform_delete(EDelete('/foo')) sequence = [ (EIdentityGet(), const(alice_identity)), (EVlobRead(vlob['id'], vlob['read_trust_seed']), const({ 'id': vlob['id'], 'blob': blob, 'version': 1 })), (EVlobList(), const([])), (EVlobRead(vlob['id'], vlob['read_trust_seed'], 1), const({ 'id': vlob['id'], 'blob': blob, 'version': 1 })), (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))), (EVlobDelete('2345'), conste(VlobNotFound('Vlob not found.'))), ] perform_sequence(sequence, eff) eff = app.perform_dustbin_show(EDustbinShow()) sequence = [(EIdentityGet(), const(alice_identity))] dustbin = perform_sequence(sequence, eff) vlob['path'] = '/foo' vlob['removed_date'] = frozen_datetime().isoformat() vlob['key'] = to_jsonb64(b'<dummy-key-00000000000000000002>') assert dustbin == [vlob]
def file(app, alice_identity, mock_crypto_passthrough): vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'} block_id = '4567' blob = [{ 'blocks': [{ 'block': block_id, 'digest': digest(b''), 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) eff = app.perform_file_create(EFileCreate('/foo')) sequence = [(EBlockCreate(''), const(block_id)), (EVlobCreate(blob), const(vlob)), (EIdentityGet(), const(alice_identity))] ret = perform_sequence(sequence, eff) assert ret is None File.files = {}
def test_get_blocks(self, file): file.dirty = False file.version = 1 vlob_id = '1234' block_ids = ['4567', '5678', '6789'] chunk_digest = digest(b'') blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': chunk_digest, 'size': 0 }, { 'block': block_ids[1], 'digest': chunk_digest, 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': chunk_digest, 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) sequence = [ (EVlobRead(vlob_id, '42', 1), const({ 'id': vlob_id, 'blob': blob, 'version': 1 })), ] ret = perform_sequence(sequence, file.get_blocks()) assert ret == block_ids assert file.dirty is False assert file.version == 1
def _build_file_blocks(self, data): # Create chunks chunk_size = 4096 # TODO modify size chunks = [data[i:i + chunk_size] for i in range(0, len(data), chunk_size)] # Force a chunk even if the data is empty if not chunks: chunks = [b''] encryptor = generate_sym_key() blocks = [] for chunk in chunks: cypher_chunk = encryptor.encrypt(chunk) cypher_chunk = to_jsonb64(cypher_chunk) block_id = yield Effect(EBlockCreate(cypher_chunk)) blocks.append({'block': block_id, 'digest': digest(chunk), 'size': len(chunk)}) # New vlob atom block_key = to_jsonb64(encryptor.key) blob = {'blocks': blocks, 'key': block_key} self.dirty = True return blob
def file(mock_crypto_passthrough): block_id = '4567' blob = [{ 'blocks': [{ 'block': block_id, 'digest': digest(b''), 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) sequence = [ (EBlockCreate(''), const(block_id)), (EVlobCreate(blob), const({ 'id': '1234', 'read_trust_seed': '42', 'write_trust_seed': '43' })), ] return perform_sequence(sequence, File.create())
def read(self, size=None, offset=0): yield self.flush() # Get data matching_blocks = yield self._find_matching_blocks(size, offset) data = matching_blocks['pre_included_data'] for blocks_and_key in matching_blocks['included_blocks']: block_key = blocks_and_key['key'] decoded_block_key = from_jsonb64(block_key) encryptor = load_sym_key(decoded_block_key) for block_properties in blocks_and_key['blocks']: block = yield Effect(EBlockRead(block_properties['block'])) # Decrypt # TODO: clean this hack if isinstance(block['content'], str): block_content = from_jsonb64(block['content']) else: block_content = from_jsonb64(block['content'].decode()) chunk_data = encryptor.decrypt(block_content) # Check integrity assert digest(chunk_data) == block_properties['digest'] assert len(chunk_data) == block_properties['size'] data += chunk_data data += matching_blocks['post_included_data'] return data
def test_read(self, file): file.dirty = False file.version = 1 # Empty file vlob_id = '1234' chunk_digest = digest(b'') blob = [{ 'blocks': [{ 'block': '4567', 'digest': chunk_digest, 'size': 0 }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) sequence = [ (EVlobRead(vlob_id, '42', 1), const({ 'id': vlob_id, 'blob': blob, 'version': 1 })), ] read_content = perform_sequence(sequence, file.read()) assert read_content == b'' # Not empty file content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) sequence = [(EVlobRead(vlob_id, '42', 1), const({ 'id': vlob_id, 'blob': blob, 'version': 1 })), (EBlockRead(block_ids[0]), const({ 'content': to_jsonb64(chunk_1), 'creation_date': '2012-01-01T00:00:00' })), (EBlockRead(block_ids[1]), const({ 'content': to_jsonb64(chunk_2), 'creation_date': '2012-01-01T00:00:00' })), (EBlockRead(block_ids[2]), const({ 'content': to_jsonb64(chunk_3), 'creation_date': '2012-01-01T00:00:00' }))] read_content = perform_sequence(sequence, file.read()) assert read_content == content # Offset offset = 5 sequence = [(EVlobRead(vlob_id, '42', 1), const({ 'id': vlob_id, 'blob': blob, 'version': 1 })), (EBlockRead(block_ids[1]), const({ 'content': to_jsonb64(chunk_2), 'creation_date': '2012-01-01T00:00:00' })), (EBlockRead(block_ids[2]), const({ 'content': to_jsonb64(chunk_3), 'creation_date': '2012-01-01T00:00:00' }))] read_content = perform_sequence(sequence, file.read(offset=offset)) assert read_content == content[offset:] # Size size = 9 sequence = [(EVlobRead(vlob_id, '42', 1), const({ 'id': vlob_id, 'blob': blob, 'version': 1 })), (EBlockRead(block_ids[1]), const({ 'content': to_jsonb64(chunk_2), 'creation_date': '2012-01-01T00:00:00' }))] read_content = perform_sequence(sequence, file.read(offset=offset, size=size)) assert read_content == content[offset:][:size] assert file.dirty is False assert file.version == 1
def test_stat(self, file): vlob_id = '1234' content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] # Original content chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) sequence = [(EVlobRead(vlob_id, '42', 1), const({ 'id': vlob_id, 'blob': blob, 'version': 1 }))] ret = perform_sequence(sequence, file.stat()) assert ret == { 'type': 'file', 'id': vlob_id, 'created': '2012-01-01T00:00:00', 'updated': '2012-01-01T00:00:00', 'size': 23, 'version': 1 } # TODO check created and updated time are different # Truncate in buffer file.truncate(20) ret = perform_sequence(sequence, file.stat()) assert ret == { 'type': 'file', 'id': vlob_id, 'created': '2012-01-01T00:00:00', 'updated': '2012-01-01T00:00:00', 'size': 20, 'version': 1 } # Write in buffer file.write(b'foo', 30) ret = perform_sequence(sequence, file.stat()) assert ret == { 'type': 'file', 'id': vlob_id, 'created': '2012-01-01T00:00:00', 'updated': '2012-01-01T00:00:00', 'size': 33, 'version': 1 }
def test_restore(self, file): vlob_id = '1234' content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] # Original content chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) # New content new_chuck_2 = b'is A test' new_block_id = '7654' new_blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': new_block_id, 'digest': digest(new_chuck_2), 'size': len(new_chuck_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] new_blob = ejson_dumps(new_blob).encode() new_blob = to_jsonb64(new_blob) # Restore not commited file with version = 1 file.dirty = False with pytest.raises(FileError): perform_sequence([], file.restore()) assert file.dirty is False file.dirty = True # Restore commited file with version = 1 file.dirty = False file.version = 1 with pytest.raises(FileError): perform_sequence([], file.restore()) assert file.dirty is False # Restore not commited file with version = current version file.dirty = True file.version = 5 with pytest.raises(FileError): perform_sequence([], file.restore(6)) assert file.dirty is True # Restore commited file with version = current version file.dirty = False file.version = 6 with pytest.raises(FileError): perform_sequence([], file.restore(6)) assert file.dirty is False # Restore previous version sequence = [ ( EVlobRead(vlob_id, '42', 6), # Discard const({ 'id': vlob_id, 'blob': blob, 'version': 6 })), (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))), (EBlockDelete('5678'), noop), (EBlockDelete('6789'), noop), (EVlobDelete('1234'), noop), (EVlobRead('1234', '42', 5), const({ 'id': vlob_id, 'blob': new_blob, 'version': 5 })), (EVlobUpdate(vlob_id, '43', 7, new_blob), noop) ] ret = perform_sequence(sequence, file.restore()) assert ret is None assert file.dirty is True assert file.version == 6 # Restore specific version sequence = [ (EVlobRead(vlob_id, '42', 7), const({ 'id': vlob_id, 'blob': new_blob, 'version': 7 })), (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))), (EBlockDelete('7654'), noop), (EBlockDelete('6789'), noop), (EVlobDelete('1234'), noop), (EVlobRead('1234', '42', 2), const({ 'id': vlob_id, 'blob': blob, 'version': 2 })), (EVlobUpdate(vlob_id, '43', 7, blob), noop) ] ret = perform_sequence(sequence, file.restore(2)) assert ret is None assert file.dirty is True assert file.version == 6
def test_flush(self, file): file.truncate(9) file.write(b'IS', 5) file.write(b'IS a nice test content.', 5) file.dirty = False file.version = 2 vlob_id = '1234' content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] # Original content chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000002>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) # New content after truncate new_chuck_2 = b'is a' new_block_id = '7654' new_blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': new_block_id, 'digest': digest(new_chuck_2), 'size': len(new_chuck_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }] new_blob = ejson_dumps(new_blob).encode() new_blob = to_jsonb64(new_blob) # New content after write new_block_2_id = '6543' new_chunk_4 = b'IS a nice test content.' new_blob_2 = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000001>') }, { 'blocks': [{ 'block': new_block_2_id, 'digest': digest(new_chunk_4), 'size': len(new_chunk_4) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000004>') }] new_blob_2 = ejson_dumps(new_blob_2).encode() new_blob_2 = to_jsonb64(new_blob_2) sequence = [ ( EVlobRead(vlob_id, '42', 2), # Get blocks const({ 'id': vlob_id, 'blob': blob, 'version': 2 })), ( EVlobRead(vlob_id, '42', 2), # Matching blocks const({ 'id': vlob_id, 'blob': blob, 'version': 2 })), (EBlockRead(block_ids[1]), const({ 'content': to_jsonb64(chunk_2), 'creation_date': '2012-01-01T00:00:00' })), (EBlockCreate(to_jsonb64(new_chuck_2)), const(new_block_id)), (EVlobUpdate(vlob_id, '43', 3, new_blob), noop), ( EVlobRead(vlob_id, '42', 3), # Matching blocks const({ 'id': vlob_id, 'blob': new_blob, 'version': 3 })), (EBlockCreate(to_jsonb64(new_chunk_4)), const(new_block_2_id)), (EVlobUpdate(vlob_id, '43', 3, new_blob_2), noop), (EVlobRead(vlob_id, '42', 3), const({ 'id': vlob_id, 'blob': new_blob_2, 'version': 3 })), (EBlockDelete('5678'), conste(BlockNotFound('Block not found.'))), (EBlockDelete('6789'), noop), ] ret = perform_sequence(sequence, file.flush()) assert ret is None assert file.dirty is True assert file.version == 2
def test_commit(self, file): vlob_id = '1234' content = b'This is a test content.' block_ids = ['4567', '5678', '6789'] new_vlob = { 'id': '2345', 'read_trust_seed': 'ABC', 'write_trust_seed': 'DEF' } # Original content chunk_1 = content[:5] chunk_2 = content[5:14] chunk_3 = content[14:] blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }, { 'block': block_ids[1], 'digest': digest(chunk_2), 'size': len(chunk_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }, { 'blocks': [{ 'block': block_ids[2], 'digest': digest(chunk_3), 'size': len(chunk_3) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000004>') }] blob = ejson_dumps(blob).encode() blob = to_jsonb64(blob) # New content after truncate new_chuck_2 = b'is a' new_block_id = '7654' new_blob = [{ 'blocks': [{ 'block': block_ids[0], 'digest': digest(chunk_1), 'size': len(chunk_1) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }, { 'blocks': [{ 'block': new_block_id, 'digest': digest(new_chuck_2), 'size': len(new_chuck_2) }], 'key': to_jsonb64(b'<dummy-key-00000000000000000003>') }] new_blob = ejson_dumps(new_blob).encode() new_blob = to_jsonb64(new_blob) file.truncate(9) sequence = [ (EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': blob, 'version': 1 })), (EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': blob, 'version': 1 })), (EBlockRead(block_ids[1]), const({ 'content': to_jsonb64(chunk_2), 'creation_date': '2012-01-01T00:00:00' })), (EBlockCreate(to_jsonb64(new_chuck_2)), const(new_block_id)), (EVlobUpdate(vlob_id, '43', 1, new_blob), noop), (EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': new_blob, 'version': 1 })), (EBlockDelete('5678'), conste(BlockNotFound('Block not found.'))), (EBlockDelete('6789'), noop), (EVlobRead('1234', '42', 1), const({ 'id': '1234', 'blob': new_blob, 'version': 1 })), (EBlockSynchronize('4567'), const(True)), (EBlockSynchronize('7654'), const(False)), (EVlobSynchronize('1234'), const(new_vlob)) ] ret = perform_sequence(sequence, file.commit()) new_vlob['key'] = to_jsonb64(b'<dummy-key-00000000000000000002>') assert ret == new_vlob assert file.dirty is False assert file.version == 1