Exemple #1
0
 def test_discard(self, file):
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000004>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     # Already synchronized
     sequence = [
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('5678'), noop),
         (EBlockDelete('6789'), noop),
         (EVlobDelete('1234'), conste(VlobNotFound('Block not found.'))
          )  # TODO vlob OR block exceptin
     ]
     ret = perform_sequence(sequence, file.discard())
     assert ret is False
     # Not already synchronized
     file.dirty = True
     file.version = 0
     sequence = [(EVlobRead('1234', '42', 1),
                  const({
                      'id': '1234',
                      'blob': blob,
                      'version': 1
                  })), (EBlockDelete('4567'), noop),
                 (EBlockDelete('5678'), noop), (EBlockDelete('6789'), noop),
                 (EVlobDelete('1234'), noop)]
     ret = perform_sequence(sequence, file.discard())
     assert ret is True
     assert file.dirty is False
Exemple #2
0
 def diff_versions(self, old_version=None, new_version=None):
     empty_entries = {'/': None}
     empty_manifest = {'entries': empty_entries, 'dustbin': [], 'versions': {}}
     # Old manifest
     if old_version and old_version > 0:
         old_vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, old_version))
         old_blob = from_jsonb64(old_vlob['blob'])
         content = self.encryptor.decrypt(old_blob)
         old_manifest = ejson_loads(content.decode())
     elif old_version == 0:
         old_manifest = empty_manifest
     else:
         old_manifest = self.original_manifest
     # New manifest
     if new_version and new_version > 0:
         new_vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, new_version))
         blob = from_jsonb64(new_vlob['blob'])
         content = self.encryptor.decrypt(blob)
         new_manifest = ejson_loads(content.decode())
     elif new_version == 0:
         new_manifest = empty_manifest
     else:
         dump = yield self.dumps()
         new_manifest = ejson_loads(dump)
     return self.diff(old_manifest, new_manifest)
Exemple #3
0
def test_perform_file_read(app, file, alice_identity):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    blob = [{
        'blocks': [{
            'block': '4567',
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_file_read(EFileRead('/foo'))
    sequence = [(EIdentityGet(), const(alice_identity)),
                (EIdentityGet(), const(alice_identity)),
                (EVlobRead(vlob['id'], vlob['read_trust_seed']),
                 const({
                     'id': vlob['id'],
                     'blob': blob,
                     'version': 1
                 })), (EVlobList(), const([vlob['id']])),
                (EVlobRead(vlob['id'], vlob['read_trust_seed'], 1),
                 const({
                     'id': vlob['id'],
                     'blob': blob,
                     'version': 1
                 }))]
    file = perform_sequence(sequence, eff)
    assert file == b''
Exemple #4
0
def test_perform_undelete(app, alice_identity, file):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    blob = [{
        'blocks': [{
            'block': '4567',
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_delete(EDelete('/foo'))
    sequence = [
        (EIdentityGet(), const(alice_identity)),
        (EVlobRead(vlob['id'], vlob['read_trust_seed']),
         const({
             'id': vlob['id'],
             'blob': blob,
             'version': 1
         })), (EVlobList(), const([])),
        (EVlobRead(vlob['id'], vlob['read_trust_seed'],
                   1), const({
                       'id': vlob['id'],
                       'blob': blob,
                       'version': 1
                   })),
        (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
        (EVlobDelete('2345'), conste(VlobNotFound('Vlob not found.')))
    ]
    ret = perform_sequence(sequence, eff)
    eff = app.perform_undelete(EUndelete('2345'))
    sequence = [(EIdentityGet(), const(alice_identity))]
    ret = perform_sequence(sequence, eff)
    assert ret is None
Exemple #5
0
 def test_load_file(self, file):
     vlob_id = '1234'
     other_vlob_id = '5678'
     read_trust_seed = '42'
     version = 1
     # Load from open files
     file2 = perform_sequence(
         [],
         File.load(vlob_id, to_jsonb64(b'<dummy-key-00000000000000000001>'),
                   read_trust_seed, '43'))
     assert file == file2
     File.files = {}
     # Test reloading commited and not commited file
     for synchronizer_vlob_list in [[vlob_id, other_vlob_id],
                                    [other_vlob_id]]:
         key = to_jsonb64(b'<dummy-key-00000000000000000001>')
         sequence = [
             (EVlobRead(vlob_id, read_trust_seed, None),
              const({
                  'id': vlob_id,
                  'blob': 'foo',
                  'version': version
              })),
             (EVlobList(), const(synchronizer_vlob_list)),
         ]
         file = perform_sequence(
             sequence, File.load(vlob_id, key, read_trust_seed, '43'))
         assert file.dirty is (vlob_id in synchronizer_vlob_list)
         assert file.version == (version - 1 if file.dirty else version)
         File.files = {}
Exemple #6
0
def test_perform_file_create(app, alice_identity, file):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    block_id = '4567'
    # Already exist
    blob = [{
        'blocks': [{
            'block': block_id,
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000003>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_file_create(EFileCreate('/foo'))
    sequence = [
        (EBlockCreate(''), const(block_id)),
        (EVlobCreate(blob), const(vlob)),
        (EIdentityGet(), const(alice_identity)),
        (EVlobRead(vlob['id'], vlob['read_trust_seed'],
                   1), const({
                       'id': vlob['id'],
                       'blob': blob,
                       'version': 1
                   })),
        (EBlockDelete(block_id), noop),
        (EVlobDelete(vlob['id']), noop),
    ]
    with pytest.raises(ManifestError):
        perform_sequence(sequence, eff)
Exemple #7
0
 def stat(self):
     version = self.get_version()
     vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, version))
     encrypted_blob = vlob['blob']
     encrypted_blob = from_jsonb64(encrypted_blob)
     blob = self.encryptor.decrypt(encrypted_blob)
     blob = ejson_loads(blob.decode())
     size = 0
     for blocks_and_key in blob:
         for block in blocks_and_key['blocks']:
             size += block['size']
     for modification in self.modifications:
         if modification[0] == self.write:
             end_offset = modification[2] + len(modification[1])
             if size < end_offset:
                 size = end_offset
         elif modification[0] == self.truncate:
             if size > modification[1]:
                 size = modification[1]
         else:
             raise NotImplementedError()
     # TODO don't provide atime field if we don't know it?
     # TODO real date
     return {
         'id': self.id,
         'type': 'file',
         'created': '2012-01-01T00:00:00',
         'updated': '2012-01-01T00:00:00',
         'size': size,
         'version': vlob['version']
     }
Exemple #8
0
def test_perform_vlob_read(app, app_no_cache):
    local_blob = 'foo'
    eff = app.perform_vlob_update(EVlobUpdate('123', '43', 1, local_blob))
    perform_sequence([], eff)
    # Read remote vlob
    assert app.vlob_cache.currsize == 0
    remote_blob = b'bar'
    eff = app.perform_vlob_read(EVlobRead('123', 'ABC', 2))
    sequence = [(EBackendVlobRead('123', 'ABC',
                                  2), const(VlobAtom('123', 2, remote_blob)))]
    vlob = perform_sequence(sequence, eff)
    assert sorted(list(vlob.keys())) == ['blob', 'id', 'version']
    assert vlob['id'] == '123'
    assert vlob['blob'] == remote_blob.decode()  # TODO decode?
    assert vlob['version'] == 2
    assert app.vlob_cache.currsize == 1
    # Read remote vlob with cache disabled
    assert app_no_cache.vlob_cache.currsize == 0
    remote_blob = b'bar'
    eff = app_no_cache.perform_vlob_read(EVlobRead('123', 'ABC', 2))
    sequence = [(EBackendVlobRead('123', 'ABC',
                                  2), const(VlobAtom('123', 2, remote_blob)))]
    vlob = perform_sequence(sequence, eff)
    assert sorted(list(vlob.keys())) == ['blob', 'id', 'version']
    assert vlob['id'] == '123'
    assert vlob['blob'] == remote_blob.decode()  # TODO decode?
    assert vlob['version'] == 2
    assert app_no_cache.vlob_cache.currsize == 0
    # Read vlob in cache
    remote_blob = b'bar'
    eff = app.perform_vlob_read(EVlobRead('123', 'ABC', 2))
    vlob = perform_sequence([], eff)
    assert sorted(list(vlob.keys())) == ['blob', 'id', 'version']
    assert vlob['id'] == '123'
    assert vlob['blob'] == remote_blob.decode()  # TODO decode?
    assert vlob['version'] == 2
    # Delete vlob from cache
    eff = app.perform_vlob_delete(EVlobDelete('123', 2))
    perform_sequence([], eff)
    # Read local vlob
    eff = app.perform_vlob_read(EVlobRead('123', '43', 1))
    vlob = perform_sequence([], eff)
    assert sorted(list(vlob.keys())) == ['blob', 'id', 'version']
    assert vlob['id'] == '123'
    assert vlob['blob'] == local_blob  # TODO decode?
    assert vlob['version'] == 1
Exemple #9
0
 def restore(self, version=None):
     if version is None:
         version = self.version - 1 if self.version > 1 else 1
     if version > 0 and version < self.version:
         vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, version))
         yield Effect(EVlobUpdate(self.id, self.write_trust_seed, self.version, vlob['blob']))
     elif version < 1 or version > self.version:
         raise ManifestError('bad_version', 'Bad version number.')
     yield self.reload(reset=True)
Exemple #10
0
def test_perform_dustbin_show(app, alice_identity, file):
    with freeze_time('2012-01-01') as frozen_datetime:
        vlob = {
            'id': '2345',
            'read_trust_seed': '42',
            'write_trust_seed': '43'
        }
        blob = [{
            'blocks': [{
                'block': '4567',
                'digest': digest(b''),
                'size': 0
            }],
            'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
        }]
        blob = ejson_dumps(blob).encode()
        blob = to_jsonb64(blob)
        eff = app.perform_delete(EDelete('/foo'))
        sequence = [
            (EIdentityGet(), const(alice_identity)),
            (EVlobRead(vlob['id'], vlob['read_trust_seed']),
             const({
                 'id': vlob['id'],
                 'blob': blob,
                 'version': 1
             })),
            (EVlobList(), const([])),
            (EVlobRead(vlob['id'], vlob['read_trust_seed'],
                       1), const({
                           'id': vlob['id'],
                           'blob': blob,
                           'version': 1
                       })),
            (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
            (EVlobDelete('2345'), conste(VlobNotFound('Vlob not found.'))),
        ]
        perform_sequence(sequence, eff)
        eff = app.perform_dustbin_show(EDustbinShow())
        sequence = [(EIdentityGet(), const(alice_identity))]
        dustbin = perform_sequence(sequence, eff)
        vlob['path'] = '/foo'
        vlob['removed_date'] = frozen_datetime().isoformat()
        vlob['key'] = to_jsonb64(b'<dummy-key-00000000000000000002>')
        assert dustbin == [vlob]
Exemple #11
0
 def get_vlobs_versions(self):
     versions = {}
     for entry in [self.entries[entry] for entry in sorted(self.entries)] + self.dustbin:
         if entry:
             try:
                 vlob = yield Effect(EVlobRead(entry['id'], entry['read_trust_seed']))
             except VlobNotFound:
                 versions[entry['id']] = None
             else:
                 versions[entry['id']] = vlob['version']
     return versions
Exemple #12
0
 def restore(self, version=None):
     if version is None:
         version = self.get_version() - 1
     if version < 1 or version >= self.get_version():
         raise FileError('bad_version', 'Bad version number.')
     yield self.discard()
     vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, version))
     yield Effect(EVlobUpdate(self.id,
                              self.write_trust_seed,
                              self.version + 1,
                              vlob['blob']))
     self.dirty = True
Exemple #13
0
 def test_reencrypt(self, file):
     old_vlob = file.get_vlob()
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (
             EVlobCreate(blob),  # TODO check re-encryption
             const({
                 'id': '2345',
                 'read_trust_seed': '21',
                 'write_trust_seed': '22'
             }))
     ]
     ret = perform_sequence(sequence, file.reencrypt())
     assert ret is None
     file.reencrypt()
     new_vlob = file.get_vlob()
     for property in old_vlob.keys():
         assert old_vlob[property] != new_vlob[property]
Exemple #14
0
def test_perform_vlob_create(app):
    blob = 'foo'
    with freeze_time('2012-01-01') as frozen_datetime:
        eff = app.perform_vlob_create(EVlobCreate(blob))
        vlob = perform_sequence([], eff)
        assert app.last_modified == Arrow.fromdatetime(frozen_datetime())
    vlob_id = vlob['id']
    read_trust_seed = vlob['read_trust_seed']
    assert sorted(list(
        vlob.keys())) == ['id', 'read_trust_seed', 'write_trust_seed']
    eff = app.perform_vlob_read(EVlobRead(vlob_id, read_trust_seed))
    vlob = perform_sequence([], eff)
    assert vlob['blob'] == blob
Exemple #15
0
 def check_consistency(self, manifest):
     consistency = yield super().check_consistency(manifest)
     if consistency is False:
         return False
     for entry in manifest['groups'].values():
         try:
             vlob = yield Effect(EVlobRead(entry['id'], entry['read_trust_seed']))
             encrypted_blob = vlob['blob']
             key = from_jsonb64(entry['key']) if entry['key'] else None
             encryptor = load_sym_key(key)
             encryptor.decrypt(encrypted_blob)
         except VlobNotFound:
             return False
     return True
Exemple #16
0
def test_perform_vlob_update(app):
    with freeze_time('2012-01-01') as frozen_datetime:
        eff = app.perform_vlob_update(EVlobUpdate('123', 'ABC', 1, 'foo'))
        perform_sequence([], eff)
        assert app.last_modified == Arrow.fromdatetime(frozen_datetime())
    blob = 'bar'
    eff = app.perform_vlob_update(EVlobUpdate('123', 'ABC', 1, blob))
    perform_sequence([], eff)
    eff = app.perform_vlob_read(EVlobRead('123', 'ABC'))
    vlob = perform_sequence([], eff)
    assert sorted(list(vlob.keys())) == ['blob', 'id', 'version']
    assert vlob['id'] == '123'
    assert vlob['blob'] == blob
    assert vlob['version'] == 1
Exemple #17
0
 def check_consistency(self, manifest):
     entries = [entry for entry in list(manifest['entries'].values()) if entry]
     entries += manifest['dustbin']
     for entry in entries:
         try:
             vlob = yield Effect(EVlobRead(entry['id'],
                                           entry['read_trust_seed'],
                                           manifest['versions'][entry['id']]))
             encrypted_blob = vlob['blob']
             encrypted_blob = from_jsonb64(encrypted_blob)
             key = from_jsonb64(entry['key']) if entry['key'] else None
             encryptor = load_sym_key(key)
             encryptor.decrypt(encrypted_blob)  # TODO check exception
         except VlobNotFound:
             return False
     return True
Exemple #18
0
 def reencrypt(self):
     yield self.flush()
     version = self.get_version()
     old_vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, version))
     old_blob = old_vlob['blob']
     old_encrypted_blob = from_jsonb64(old_blob)
     new_blob = self.encryptor.decrypt(old_encrypted_blob)
     self.encryptor = generate_sym_key()
     new_encrypted_blob = self.encryptor.encrypt(new_blob)
     new_encrypted_blob = to_jsonb64(new_encrypted_blob)
     new_vlob = yield Effect(EVlobCreate(new_encrypted_blob))
     del File.files[self.id]
     self.id = new_vlob['id']
     self.read_trust_seed = new_vlob['read_trust_seed']
     self.write_trust_seed = new_vlob['write_trust_seed']
     File.files[self.id] = self
     self.dirty = True
Exemple #19
0
 def reload(self, reset=False):
     # Subscribe to events
     # yield Effect(EConnectEvent('on_vlob_updated', self.id, self.handler)) # TODO call
     vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed))
     blob = from_jsonb64(vlob['blob'])
     content = self.encryptor.decrypt(blob)
     if not reset and vlob['version'] <= self.version:
         return
     new_manifest = ejson_loads(content.decode())
     backup_new_manifest = deepcopy(new_manifest)
     consistency = yield self.check_consistency(new_manifest)
     if not consistency:
         raise ManifestError('not_consistent', 'Group manifest not consistent.')
     if not reset:
         diff = yield self.diff_versions()
         new_manifest = self.patch(new_manifest, diff)
     self.entries = new_manifest['entries']
     self.dustbin = new_manifest['dustbin']
     self.version = vlob['version']
     self.original_manifest = backup_new_manifest
     versions = new_manifest['versions']
     file_vlob = None
     for vlob_id, version in sorted(versions.items()):
         for entry in self.entries.values():
             if entry and entry['id'] == vlob_id:
                 file_vlob = entry
                 break
         if not file_vlob:
             for entry in self.dustbin:
                 if entry['id'] == vlob_id:
                     file_vlob = {'id': entry['id'],
                                  'read_trust_seed': entry['read_trust_seed'],
                                  'write_trust_seed': entry['write_trust_seed'],
                                  'key': entry['key']}
                     break
         if file_vlob:
             file_vlob = None
             file = yield File.load(entry['id'],
                                    entry['key'],
                                    entry['read_trust_seed'],
                                    entry['write_trust_seed'])
             try:
                 yield file.restore(version)
             except FileError:
                 pass
Exemple #20
0
 def load(cls, id, key, read_trust_seed, write_trust_seed, version=None):
     if id in File.files:
         return File.files[id]
     self = File()
     self.id = id
     self.read_trust_seed = read_trust_seed
     self.write_trust_seed = write_trust_seed
     self.encryptor = load_sym_key(from_jsonb64(key))
     vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, version))
     self.version = vlob['version']
     self.dirty = False
     vlob_list = yield Effect(EVlobList())
     if vlob['id'] in vlob_list:
         self.dirty = True
         self.version -= 1
     self.modifications = []
     File.files[self.id] = self
     return self
Exemple #21
0
 def test_get_blocks(self, file):
     file.dirty = False
     file.version = 1
     vlob_id = '1234'
     block_ids = ['4567', '5678', '6789']
     chunk_digest = digest(b'')
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': chunk_digest,
             'size': 0
         }, {
             'block': block_ids[1],
             'digest': chunk_digest,
             'size': 0
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': chunk_digest,
             'size': 0
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [
         (EVlobRead(vlob_id, '42',
                    1), const({
                        'id': vlob_id,
                        'blob': blob,
                        'version': 1
                    })),
     ]
     ret = perform_sequence(sequence, file.get_blocks())
     assert ret == block_ids
     assert file.dirty is False
     assert file.version == 1
Exemple #22
0
 def test_restore(self, file):
     vlob_id = '1234'
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     # New content
     new_chuck_2 = b'is A test'
     new_block_id = '7654'
     new_blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': new_block_id,
             'digest': digest(new_chuck_2),
             'size': len(new_chuck_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     new_blob = ejson_dumps(new_blob).encode()
     new_blob = to_jsonb64(new_blob)
     # Restore not commited file with version = 1
     file.dirty = False
     with pytest.raises(FileError):
         perform_sequence([], file.restore())
     assert file.dirty is False
     file.dirty = True
     # Restore commited file with version = 1
     file.dirty = False
     file.version = 1
     with pytest.raises(FileError):
         perform_sequence([], file.restore())
     assert file.dirty is False
     # Restore not commited file with version = current version
     file.dirty = True
     file.version = 5
     with pytest.raises(FileError):
         perform_sequence([], file.restore(6))
     assert file.dirty is True
     # Restore commited file with version = current version
     file.dirty = False
     file.version = 6
     with pytest.raises(FileError):
         perform_sequence([], file.restore(6))
     assert file.dirty is False
     # Restore previous version
     sequence = [
         (
             EVlobRead(vlob_id, '42', 6),  # Discard
             const({
                 'id': vlob_id,
                 'blob': blob,
                 'version': 6
             })),
         (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('5678'), noop),
         (EBlockDelete('6789'), noop),
         (EVlobDelete('1234'), noop),
         (EVlobRead('1234', '42', 5),
          const({
              'id': vlob_id,
              'blob': new_blob,
              'version': 5
          })),
         (EVlobUpdate(vlob_id, '43', 7, new_blob), noop)
     ]
     ret = perform_sequence(sequence, file.restore())
     assert ret is None
     assert file.dirty is True
     assert file.version == 6
     # Restore specific version
     sequence = [
         (EVlobRead(vlob_id, '42', 7),
          const({
              'id': vlob_id,
              'blob': new_blob,
              'version': 7
          })),
         (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('7654'), noop), (EBlockDelete('6789'), noop),
         (EVlobDelete('1234'), noop),
         (EVlobRead('1234', '42',
                    2), const({
                        'id': vlob_id,
                        'blob': blob,
                        'version': 2
                    })), (EVlobUpdate(vlob_id, '43', 7, blob), noop)
     ]
     ret = perform_sequence(sequence, file.restore(2))
     assert ret is None
     assert file.dirty is True
     assert file.version == 6
Exemple #23
0
 def test_stat(self, file):
     vlob_id = '1234'
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [(EVlobRead(vlob_id, '42', 1),
                  const({
                      'id': vlob_id,
                      'blob': blob,
                      'version': 1
                  }))]
     ret = perform_sequence(sequence, file.stat())
     assert ret == {
         'type': 'file',
         'id': vlob_id,
         'created': '2012-01-01T00:00:00',
         'updated': '2012-01-01T00:00:00',
         'size': 23,
         'version': 1
     }
     # TODO check created and updated time are different
     # Truncate in buffer
     file.truncate(20)
     ret = perform_sequence(sequence, file.stat())
     assert ret == {
         'type': 'file',
         'id': vlob_id,
         'created': '2012-01-01T00:00:00',
         'updated': '2012-01-01T00:00:00',
         'size': 20,
         'version': 1
     }
     # Write in buffer
     file.write(b'foo', 30)
     ret = perform_sequence(sequence, file.stat())
     assert ret == {
         'type': 'file',
         'id': vlob_id,
         'created': '2012-01-01T00:00:00',
         'updated': '2012-01-01T00:00:00',
         'size': 33,
         'version': 1
     }
Exemple #24
0
 def test_flush(self, file):
     file.truncate(9)
     file.write(b'IS', 5)
     file.write(b'IS a nice test content.', 5)
     file.dirty = False
     file.version = 2
     vlob_id = '1234'
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     # New content after truncate
     new_chuck_2 = b'is a'
     new_block_id = '7654'
     new_blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': new_block_id,
             'digest': digest(new_chuck_2),
             'size': len(new_chuck_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }]
     new_blob = ejson_dumps(new_blob).encode()
     new_blob = to_jsonb64(new_blob)
     # New content after write
     new_block_2_id = '6543'
     new_chunk_4 = b'IS a nice test content.'
     new_blob_2 = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': new_block_2_id,
             'digest': digest(new_chunk_4),
             'size': len(new_chunk_4)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000004>')
     }]
     new_blob_2 = ejson_dumps(new_blob_2).encode()
     new_blob_2 = to_jsonb64(new_blob_2)
     sequence = [
         (
             EVlobRead(vlob_id, '42', 2),  # Get blocks
             const({
                 'id': vlob_id,
                 'blob': blob,
                 'version': 2
             })),
         (
             EVlobRead(vlob_id, '42', 2),  # Matching blocks
             const({
                 'id': vlob_id,
                 'blob': blob,
                 'version': 2
             })),
         (EBlockRead(block_ids[1]),
          const({
              'content': to_jsonb64(chunk_2),
              'creation_date': '2012-01-01T00:00:00'
          })),
         (EBlockCreate(to_jsonb64(new_chuck_2)), const(new_block_id)),
         (EVlobUpdate(vlob_id, '43', 3, new_blob), noop),
         (
             EVlobRead(vlob_id, '42', 3),  # Matching blocks
             const({
                 'id': vlob_id,
                 'blob': new_blob,
                 'version': 3
             })),
         (EBlockCreate(to_jsonb64(new_chunk_4)), const(new_block_2_id)),
         (EVlobUpdate(vlob_id, '43', 3, new_blob_2), noop),
         (EVlobRead(vlob_id, '42', 3),
          const({
              'id': vlob_id,
              'blob': new_blob_2,
              'version': 3
          })),
         (EBlockDelete('5678'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('6789'), noop),
     ]
     ret = perform_sequence(sequence, file.flush())
     assert ret is None
     assert file.dirty is True
     assert file.version == 2
Exemple #25
0
 def test_commit(self, file):
     vlob_id = '1234'
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     new_vlob = {
         'id': '2345',
         'read_trust_seed': 'ABC',
         'write_trust_seed': 'DEF'
     }
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000004>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     # New content after truncate
     new_chuck_2 = b'is a'
     new_block_id = '7654'
     new_blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': new_block_id,
             'digest': digest(new_chuck_2),
             'size': len(new_chuck_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }]
     new_blob = ejson_dumps(new_blob).encode()
     new_blob = to_jsonb64(new_blob)
     file.truncate(9)
     sequence = [
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (EBlockRead(block_ids[1]),
          const({
              'content': to_jsonb64(chunk_2),
              'creation_date': '2012-01-01T00:00:00'
          })), (EBlockCreate(to_jsonb64(new_chuck_2)), const(new_block_id)),
         (EVlobUpdate(vlob_id, '43', 1, new_blob), noop),
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': new_blob,
                        'version': 1
                    })),
         (EBlockDelete('5678'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('6789'), noop),
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': new_blob,
                        'version': 1
                    })), (EBlockSynchronize('4567'), const(True)),
         (EBlockSynchronize('7654'), const(False)),
         (EVlobSynchronize('1234'), const(new_vlob))
     ]
     ret = perform_sequence(sequence, file.commit())
     new_vlob['key'] = to_jsonb64(b'<dummy-key-00000000000000000002>')
     assert ret == new_vlob
     assert file.dirty is False
     assert file.version == 1
Exemple #26
0
    def test_find_matching_blocks(self, file):
        vlob_id = '1234'
        block_size = 4096
        # Contents
        contents = {}
        total_length = 0
        for index, length in enumerate([
                block_size + 1, block_size - 1, block_size, 2 * block_size + 2,
                2 * block_size - 2, 2 * block_size
        ]):
            content = b''.join(
                [str(random.randint(1, 9)).encode() for i in range(0, length)])
            contents[index] = content
            total_length += length
        # Blocks

        def generator():
            i = 2000
            while True:
                yield str(i)
                i += 1

        gen = generator()

        blocks = {}
        block_contents = {}
        block_id = 2000
        for index, content in contents.items():
            chunks = [
                content[i:i + block_size]
                for i in range(0, len(content), block_size)
            ]
            if not chunks:
                chunks = [b'']
            sequence = []
            for chunk in chunks:
                encoded_chunk = to_jsonb64(chunk)
                sequence.append((EBlockCreate(encoded_chunk),
                                 lambda id=id: next(gen)))  # TODO dirty
                block_contents[str(block_id)] = encoded_chunk
                block_id += 1
            blocks[index] = perform_sequence(sequence,
                                             file._build_file_blocks(content))
        # Create file
        blob = ejson_dumps([blocks[i] for i in range(0, len(blocks))]).encode()
        blob = to_jsonb64(blob)
        # All matching blocks
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     }))]
        matching_blocks = perform_sequence(sequence,
                                           file._find_matching_blocks())
        assert matching_blocks == {
            'pre_excluded_blocks': [],
            'pre_excluded_data': b'',
            'pre_included_data': b'',
            'included_blocks': [blocks[i] for i in range(0, len(blocks))],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': []
        }
        # With offset
        delta = 10
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'] - delta)
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     })),
                    (EBlockRead('2003'),
                     const({
                         'content': block_contents['2003'],
                         'creation_date': '2012-01-01T00:00:00'
                     }))]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(None, offset))
        pre_excluded_data = contents[2][:blocks[2]['blocks'][0]['size'] -
                                        delta]
        pre_included_data = contents[2][-delta:]
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1]],
            'pre_excluded_data': pre_excluded_data,
            'pre_included_data': pre_included_data,
            'included_blocks': [blocks[i] for i in range(3, 6)],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': []
        }
        # With small size
        delta = 10
        size = 5
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'] - delta)
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     })),
                    (EBlockRead(id='2003'),
                     const({
                         'content': block_contents['2003'],
                         'creation_date': '2012-01-01T00:00:00'
                     }))]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(size, offset))
        pre_excluded_data = contents[2][:blocks[2]['blocks'][0]['size'] -
                                        delta]
        pre_included_data = contents[2][-delta:][:size]
        post_excluded_data = contents[2][-delta:][size:]
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1]],
            'pre_excluded_data': pre_excluded_data,
            'pre_included_data': pre_included_data,
            'included_blocks': [],
            'post_included_data': b'',
            'post_excluded_data': post_excluded_data,
            'post_excluded_blocks': [blocks[i] for i in range(3, 6)]
        }
        # With big size
        delta = 10
        size = delta
        size += blocks[3]['blocks'][0]['size']
        size += blocks[3]['blocks'][1]['size']
        size += blocks[3]['blocks'][2]['size']
        size += 2 * delta
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'] - delta)
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     })),
                    (EBlockRead('2003'),
                     const({
                         'content': block_contents['2003'],
                         'creation_date': '2012-01-01T00:00:00'
                     })),
                    (EBlockRead('2007'),
                     const({
                         'content': block_contents['2007'],
                         'creation_date': '2012-01-01T00:00:00'
                     }))]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(size, offset))
        pre_excluded_data = contents[2][:-delta]
        pre_included_data = contents[2][-delta:]
        post_included_data = contents[4][:2 * delta]
        post_excluded_data = contents[4][:block_size][2 * delta:]
        partial_block_4 = deepcopy(blocks[4])
        del partial_block_4['blocks'][0]
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1]],
            'pre_excluded_data': pre_excluded_data,
            'pre_included_data': pre_included_data,
            'included_blocks': [blocks[3]],
            'post_included_data': post_included_data,
            'post_excluded_data': post_excluded_data,
            'post_excluded_blocks': [partial_block_4, blocks[5]]
        }
        # With big size and no delta
        size = blocks[3]['blocks'][0]['size']
        size += blocks[3]['blocks'][1]['size']
        size += blocks[3]['blocks'][2]['size']
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'])
        sequence = [
            (EVlobRead(vlob_id, '42',
                       1), const({
                           'id': vlob_id,
                           'blob': blob,
                           'version': 1
                       })),
        ]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(size, offset))
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1], blocks[2]],
            'pre_excluded_data': b'',
            'pre_included_data': b'',
            'included_blocks': [blocks[3]],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': [blocks[4], blocks[5]]
        }
        # With total size
        sequence = [
            (EVlobRead(vlob_id, '42',
                       1), const({
                           'id': vlob_id,
                           'blob': blob,
                           'version': 1
                       })),
        ]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(total_length, 0))
        assert matching_blocks == {
            'pre_excluded_blocks': [],
            'pre_excluded_data': b'',
            'pre_included_data': b'',
            'included_blocks': [blocks[i] for i in range(0, 6)],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': []
        }
Exemple #27
0
 def _find_matching_blocks(self, size=None, offset=0):
     if size is None:
         size = sys.maxsize
     pre_excluded_blocks = []
     post_excluded_blocks = []
     version = self.get_version()
     vlob = yield Effect(EVlobRead(self.id, self.read_trust_seed, version))
     blob = vlob['blob']
     encrypted_blob = from_jsonb64(blob)
     blob = self.encryptor.decrypt(encrypted_blob)
     blob = ejson_loads(blob.decode())
     pre_excluded_blocks = []
     included_blocks = []
     post_excluded_blocks = []
     cursor = 0
     pre_excluded_data = b''
     pre_included_data = b''
     post_included_data = b''
     post_excluded_data = b''
     for blocks_and_key in blob:
         block_key = blocks_and_key['key']
         decoded_block_key = from_jsonb64(block_key)
         encryptor = load_sym_key(decoded_block_key)
         for block_properties in blocks_and_key['blocks']:
             cursor += block_properties['size']
             if cursor <= offset:
                 if len(pre_excluded_blocks) and pre_excluded_blocks[-1]['key'] == block_key:
                     pre_excluded_blocks[-1]['blocks'].append(block_properties)
                 else:
                     pre_excluded_blocks.append({'blocks': [block_properties], 'key': block_key})
             elif cursor > offset and cursor - block_properties['size'] < offset:
                 delta = cursor - offset
                 block = yield Effect(EBlockRead(block_properties['block']))
                 content = from_jsonb64(block['content'])
                 block_data = encryptor.decrypt(content)
                 pre_excluded_data = block_data[:-delta]
                 pre_included_data = block_data[-delta:][:size]
                 if size < len(block_data[-delta:]):
                     post_excluded_data = block_data[-delta:][size:]
             elif cursor > offset and cursor <= offset + size:
                 if len(included_blocks) and included_blocks[-1]['key'] == block_key:
                     included_blocks[-1]['blocks'].append(block_properties)
                 else:
                     included_blocks.append({'blocks': [block_properties], 'key': block_key})
             elif cursor > offset + size and cursor - block_properties['size'] < offset + size:
                 delta = offset + size - (cursor - block_properties['size'])
                 block = yield Effect(EBlockRead(block_properties['block']))
                 content = from_jsonb64(block['content'])
                 block_data = encryptor.decrypt(content)
                 post_included_data = block_data[:delta]
                 post_excluded_data = block_data[delta:]
             else:
                 if len(post_excluded_blocks) and post_excluded_blocks[-1]['key'] == block_key:
                     post_excluded_blocks[-1]['blocks'].append(block_properties)
                 else:
                     post_excluded_blocks.append({'blocks': [block_properties],
                                                  'key': block_key})
     return {
         'pre_excluded_blocks': pre_excluded_blocks,
         'pre_excluded_data': pre_excluded_data,
         'pre_included_data': pre_included_data,
         'included_blocks': included_blocks,
         'post_included_data': post_included_data,
         'post_excluded_data': post_excluded_data,
         'post_excluded_blocks': post_excluded_blocks
     }
Exemple #28
0
 def test_read(self, file):
     file.dirty = False
     file.version = 1
     # Empty file
     vlob_id = '1234'
     chunk_digest = digest(b'')
     blob = [{
         'blocks': [{
             'block': '4567',
             'digest': chunk_digest,
             'size': 0
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [
         (EVlobRead(vlob_id, '42',
                    1), const({
                        'id': vlob_id,
                        'blob': blob,
                        'version': 1
                    })),
     ]
     read_content = perform_sequence(sequence, file.read())
     assert read_content == b''
     # Not empty file
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [(EVlobRead(vlob_id, '42', 1),
                  const({
                      'id': vlob_id,
                      'blob': blob,
                      'version': 1
                  })),
                 (EBlockRead(block_ids[0]),
                  const({
                      'content': to_jsonb64(chunk_1),
                      'creation_date': '2012-01-01T00:00:00'
                  })),
                 (EBlockRead(block_ids[1]),
                  const({
                      'content': to_jsonb64(chunk_2),
                      'creation_date': '2012-01-01T00:00:00'
                  })),
                 (EBlockRead(block_ids[2]),
                  const({
                      'content': to_jsonb64(chunk_3),
                      'creation_date': '2012-01-01T00:00:00'
                  }))]
     read_content = perform_sequence(sequence, file.read())
     assert read_content == content
     # Offset
     offset = 5
     sequence = [(EVlobRead(vlob_id, '42', 1),
                  const({
                      'id': vlob_id,
                      'blob': blob,
                      'version': 1
                  })),
                 (EBlockRead(block_ids[1]),
                  const({
                      'content': to_jsonb64(chunk_2),
                      'creation_date': '2012-01-01T00:00:00'
                  })),
                 (EBlockRead(block_ids[2]),
                  const({
                      'content': to_jsonb64(chunk_3),
                      'creation_date': '2012-01-01T00:00:00'
                  }))]
     read_content = perform_sequence(sequence, file.read(offset=offset))
     assert read_content == content[offset:]
     # Size
     size = 9
     sequence = [(EVlobRead(vlob_id, '42', 1),
                  const({
                      'id': vlob_id,
                      'blob': blob,
                      'version': 1
                  })),
                 (EBlockRead(block_ids[1]),
                  const({
                      'content': to_jsonb64(chunk_2),
                      'creation_date': '2012-01-01T00:00:00'
                  }))]
     read_content = perform_sequence(sequence,
                                     file.read(offset=offset, size=size))
     assert read_content == content[offset:][:size]
     assert file.dirty is False
     assert file.version == 1