Exemple #1
0
 def dumps(self, original_manifest=False):
     if original_manifest:
         return ejson_dumps(self.original_manifest)
     else:
         versions = yield self.get_vlobs_versions()
         return ejson_dumps({'entries': self.entries,
                             'dustbin': self.dustbin,
                             'versions': versions})
Exemple #2
0
 def dumps(self, original_manifest=False):
     if original_manifest:
         manifest = deepcopy(self.original_manifest)
         manifest['groups'] = self.get_group_vlobs()
         return ejson_dumps(manifest)
     else:
         versions = yield self.get_vlobs_versions()
         return ejson_dumps({'entries': self.entries,
                             'dustbin': self.dustbin,
                             'groups': self.get_group_vlobs(),
                             'versions': versions})
Exemple #3
0
def test_perform_undelete(app, alice_identity, file):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    blob = [{
        'blocks': [{
            'block': '4567',
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_delete(EDelete('/foo'))
    sequence = [
        (EIdentityGet(), const(alice_identity)),
        (EVlobRead(vlob['id'], vlob['read_trust_seed']),
         const({
             'id': vlob['id'],
             'blob': blob,
             'version': 1
         })), (EVlobList(), const([])),
        (EVlobRead(vlob['id'], vlob['read_trust_seed'],
                   1), const({
                       'id': vlob['id'],
                       'blob': blob,
                       'version': 1
                   })),
        (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
        (EVlobDelete('2345'), conste(VlobNotFound('Vlob not found.')))
    ]
    ret = perform_sequence(sequence, eff)
    eff = app.perform_undelete(EUndelete('2345'))
    sequence = [(EIdentityGet(), const(alice_identity))]
    ret = perform_sequence(sequence, eff)
    assert ret is None
Exemple #4
0
def test_perform_file_create(app, alice_identity, file):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    block_id = '4567'
    # Already exist
    blob = [{
        'blocks': [{
            'block': block_id,
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000003>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_file_create(EFileCreate('/foo'))
    sequence = [
        (EBlockCreate(''), const(block_id)),
        (EVlobCreate(blob), const(vlob)),
        (EIdentityGet(), const(alice_identity)),
        (EVlobRead(vlob['id'], vlob['read_trust_seed'],
                   1), const({
                       'id': vlob['id'],
                       'blob': blob,
                       'version': 1
                   })),
        (EBlockDelete(block_id), noop),
        (EVlobDelete(vlob['id']), noop),
    ]
    with pytest.raises(ManifestError):
        perform_sequence(sequence, eff)
Exemple #5
0
 async def send_cmd(self, cmd, **kwargs):
     msg = {'cmd': cmd, **kwargs}
     raw_msg = ejson_dumps(msg).encode()
     self.writer.write(raw_msg)
     self.writer.write(b'\n')
     raw_resp = await self.reader.readline()
     return ejson_loads(raw_resp.decode())
Exemple #6
0
 async def open_connection(self, identity):
     logger.debug('Connection to backend opened')
     assert not self._websocket, "Connection to backend already opened"
     try:
         self._websocket = await websockets.connect(self.url)
         # Handle handshake
         raw = await self._websocket.recv()
         challenge = ejson_loads(raw)
         answer = identity.private_key.sign(challenge['challenge'].encode())
         await self._websocket.send(
             ejson_dumps({
                 'handshake': 'answer',
                 'identity': identity.id,
                 'answer': to_jsonb64(answer)
             }))
         resp = ejson_loads(await self._websocket.recv())
         if resp['status'] != 'ok':
             await self.close_connection()
             raise exception_from_status(resp['status'])(resp['label'])
         self._ws_recv_handler_task = asyncio.ensure_future(
             self._ws_recv_handler(), loop=self.loop)
         if self.watchdog_time:
             self._watchdog_task = asyncio.ensure_future(self._watchdog(),
                                                         loop=self.loop)
     except (ConnectionRefusedError,
             websockets.exceptions.ConnectionClosed) as exc:
         raise BackendConnectionError('Cannot connect to backend (%s)' %
                                      exc)
Exemple #7
0
def app(mock_crypto_passthrough, alice_identity):
    # app = FSComponent()
    # identity_component = IdentityComponent()
    fs_component = FSComponent()
    # synchronizer_component = SynchronizerComponent()
    # identity_component = IdentityComponent()
    # app = app_factory(
    #     fs_component.get_dispatcher(),
    #     synchronizer_component.get_dispatcher(),
    #     identity_component.get_dispatcher()
    # )
    blob = {
        'dustbin': [],
        'entries': {
            '/': None
        },
        'groups': {},
        'versions': {}
    }
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    sequence = [(EIdentityGet(), const(alice_identity)),
                (EUserVlobRead(), const({
                    'blob': '',
                    'version': 0
                })), (EUserVlobUpdate(1, blob), noop)]
    perform_sequence(sequence, fs_component._get_manifest())
    return fs_component
Exemple #8
0
def test_perform_file_history(app, file, alice_identity):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    blob = [{
        'blocks': [{
            'block': '4567',
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_file_history(EFileHistory('/foo', 1, 1))
    sequence = [
        (EIdentityGet(), const(alice_identity)),
        (EIdentityGet(), const(alice_identity)),
        (EVlobRead(vlob['id'], vlob['read_trust_seed']),
         const({
             'id': vlob['id'],
             'blob': blob,
             'version': 1
         })),
        (EVlobList(), const([])),
    ]
    perform_sequence(sequence, eff)
Exemple #9
0
 def test_discard(self, file):
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000004>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     # Already synchronized
     sequence = [
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('5678'), noop),
         (EBlockDelete('6789'), noop),
         (EVlobDelete('1234'), conste(VlobNotFound('Block not found.'))
          )  # TODO vlob OR block exceptin
     ]
     ret = perform_sequence(sequence, file.discard())
     assert ret is False
     # Not already synchronized
     file.dirty = True
     file.version = 0
     sequence = [(EVlobRead('1234', '42', 1),
                  const({
                      'id': '1234',
                      'blob': blob,
                      'version': 1
                  })), (EBlockDelete('4567'), noop),
                 (EBlockDelete('5678'), noop), (EBlockDelete('6789'), noop),
                 (EVlobDelete('1234'), noop)]
     ret = perform_sequence(sequence, file.discard())
     assert ret is True
     assert file.dirty is False
Exemple #10
0
 def send_cmd(self, **msg):
     with self._socket_lock:
         req = ejson_dumps(msg).encode() + b'\n'
         logger.debug('Send: %r' % req)
         self.sock.send(req)
         raw_reps = self.sock.recv(4096)
         while raw_reps[-1] != ord(b'\n'):
             raw_reps += self.sock.recv(4096)
         logger.debug('Received: %r' % raw_reps)
         return ejson_loads(raw_reps[:-1].decode())
Exemple #11
0
 async def perform_group_add_identities(self, intent):
     async with self.connection.acquire() as conn:
         async with conn.cursor() as cur:
             await cur.execute('SELECT body FROM groups WHERE id=%s', (intent.name, ))
             ret = await cur.fetchone()
             if ret is None:
                 raise GroupNotFound('Group not found.')
             group = ejson_loads(ret[0])
             group_entry = 'admins' if intent.admin else 'users'
             group[group_entry] = list(set(group[group_entry]) | set(intent.identities))
             await cur.execute('UPDATE groups SET body=%s WHERE id=%s',
                 (ejson_dumps(group), intent.name))
Exemple #12
0
 def test_reencrypt(self, file):
     old_vlob = file.get_vlob()
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (
             EVlobCreate(blob),  # TODO check re-encryption
             const({
                 'id': '2345',
                 'read_trust_seed': '21',
                 'write_trust_seed': '22'
             }))
     ]
     ret = perform_sequence(sequence, file.reencrypt())
     assert ret is None
     file.reencrypt()
     new_vlob = file.get_vlob()
     for property in old_vlob.keys():
         assert old_vlob[property] != new_vlob[property]
Exemple #13
0
def test_perform_group_create(app, alice_identity):
    blob = {'dustbin': [], 'entries': {'/': None}, 'versions': {}}
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_group_create(EGroupCreate('share'))
    sequence = [(EIdentityGet(), const(alice_identity)),
                (EVlobCreate(),
                 const({
                     'id': '1234',
                     'read_trust_seed': '42',
                     'write_trust_seed': '43'
                 })), (EVlobUpdate('1234', '43', 1, blob), noop)]
    ret = perform_sequence(sequence, eff)
    assert ret is None
Exemple #14
0
def execute_raw_cmd(raw_cmd):
    params = parse_cmd(raw_cmd)
    if not params:
        ret = {'status': 'bad_msg', 'label': 'Message is not a valid JSON.'}
    else:
        cmd_type = params.pop('cmd', None)
        if not isinstance(cmd_type, str):
            ret = {
                'status': 'bad_msg',
                'label': '`cmd` string field is mandatory.'
            }
        else:
            ret = yield execute_cmd(cmd_type, params)
    return ejson_dumps(ret).encode('utf-8')
Exemple #15
0
 async def send_cmd(self, msg):
     if not self._websocket:
         raise BackendConnectionError(
             'BackendAPIService cannot send command in current state')
     try:
         await self._websocket.send(ejson_dumps(msg))
     except websockets.exceptions.ConnectionClosed as exc:
         raise BackendConnectionError('Cannot connect to backend (%s)' %
                                      exc)
     ret = await self._resp_queue.get()
     status = ret['status']
     if status == 'ok':
         return ret
     else:
         raise exception_from_status(status)(ret['label'])
def execute_raw_cmd(raw_cmd: str):
    try:
        params = ejson_loads(raw_cmd)
    except json.decoder.JSONDecodeError:
        ret = {'status': 'bad_msg', 'label': 'Message is not a valid JSON.'}
    else:
        cmd_type = params.pop('cmd', None)
        if not isinstance(cmd_type, str):
            ret = {
                'status': 'bad_msg',
                'label': '`cmd` string field is mandatory.'
            }
        else:
            ret = yield execute_cmd(cmd_type, params)
    return ejson_dumps(ret)
Exemple #17
0
 def create(cls):
     self = File()
     blob = yield self._build_file_blocks(b'')
     blob = [blob]
     blob = ejson_dumps(blob).encode()
     self.encryptor = generate_sym_key()
     encrypted_blob = self.encryptor.encrypt(blob)
     encrypted_blob = to_jsonb64(encrypted_blob)
     vlob = yield Effect(EVlobCreate(encrypted_blob))
     self.id = vlob['id']
     self.read_trust_seed = vlob['read_trust_seed']
     self.write_trust_seed = vlob['write_trust_seed']
     self.dirty = True
     self.version = 0
     self.modifications = []
     File.files[self.id] = self
     return self
Exemple #18
0
def test_perform_synchronize(app, alice_identity):
    blob = {
        'dustbin': [],
        'entries': {
            '/': None
        },
        'groups': {},
        'versions': {}
    }
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_synchronize(ESynchronize())
    sequence = [(EIdentityGet(), const(alice_identity)),
                (EVlobList(), const([])), (EUserVlobUpdate(1, blob), noop),
                (EUserVlobSynchronize(), noop)]
    ret = perform_sequence(sequence, eff)
    assert ret is None
Exemple #19
0
 async def run():
     try:
         reader, writer = await asyncio.open_unix_connection(path=socket)
     except (FileNotFoundError, ConnectionRefusedError):
         raise SystemExit('ERROR: Cannot connect to parsec core at %s' %
                          socket)
     msg = {
         'cmd': 'identity_signup',
         'id': identity,
         'password': password,
         'key_size': key_size
     }
     writer.write(ejson_dumps(msg).encode())
     writer.write(b'\n')
     raw_resp = await reader.readline()
     resp = ejson_loads(raw_resp.decode())
     writer.close()
     print(resp)
Exemple #20
0
def test_perform_dustbin_show(app, alice_identity, file):
    with freeze_time('2012-01-01') as frozen_datetime:
        vlob = {
            'id': '2345',
            'read_trust_seed': '42',
            'write_trust_seed': '43'
        }
        blob = [{
            'blocks': [{
                'block': '4567',
                'digest': digest(b''),
                'size': 0
            }],
            'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
        }]
        blob = ejson_dumps(blob).encode()
        blob = to_jsonb64(blob)
        eff = app.perform_delete(EDelete('/foo'))
        sequence = [
            (EIdentityGet(), const(alice_identity)),
            (EVlobRead(vlob['id'], vlob['read_trust_seed']),
             const({
                 'id': vlob['id'],
                 'blob': blob,
                 'version': 1
             })),
            (EVlobList(), const([])),
            (EVlobRead(vlob['id'], vlob['read_trust_seed'],
                       1), const({
                           'id': vlob['id'],
                           'blob': blob,
                           'version': 1
                       })),
            (EBlockDelete('4567'), conste(BlockNotFound('Block not found.'))),
            (EVlobDelete('2345'), conste(VlobNotFound('Vlob not found.'))),
        ]
        perform_sequence(sequence, eff)
        eff = app.perform_dustbin_show(EDustbinShow())
        sequence = [(EIdentityGet(), const(alice_identity))]
        dustbin = perform_sequence(sequence, eff)
        vlob['path'] = '/foo'
        vlob['removed_date'] = frozen_datetime().isoformat()
        vlob['key'] = to_jsonb64(b'<dummy-key-00000000000000000002>')
        assert dustbin == [vlob]
Exemple #21
0
def file(app, alice_identity, mock_crypto_passthrough):
    vlob = {'id': '2345', 'read_trust_seed': '42', 'write_trust_seed': '43'}
    block_id = '4567'
    blob = [{
        'blocks': [{
            'block': block_id,
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    eff = app.perform_file_create(EFileCreate('/foo'))
    sequence = [(EBlockCreate(''), const(block_id)),
                (EVlobCreate(blob), const(vlob)),
                (EIdentityGet(), const(alice_identity))]
    ret = perform_sequence(sequence, eff)
    assert ret is None
    File.files = {}
Exemple #22
0
 def test_get_blocks(self, file):
     file.dirty = False
     file.version = 1
     vlob_id = '1234'
     block_ids = ['4567', '5678', '6789']
     chunk_digest = digest(b'')
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': chunk_digest,
             'size': 0
         }, {
             'block': block_ids[1],
             'digest': chunk_digest,
             'size': 0
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000001>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': chunk_digest,
             'size': 0
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000002>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     sequence = [
         (EVlobRead(vlob_id, '42',
                    1), const({
                        'id': vlob_id,
                        'blob': blob,
                        'version': 1
                    })),
     ]
     ret = perform_sequence(sequence, file.get_blocks())
     assert ret == block_ids
     assert file.dirty is False
     assert file.version == 1
Exemple #23
0
def file(mock_crypto_passthrough):
    block_id = '4567'
    blob = [{
        'blocks': [{
            'block': block_id,
            'digest': digest(b''),
            'size': 0
        }],
        'key': to_jsonb64(b'<dummy-key-00000000000000000001>')
    }]
    blob = ejson_dumps(blob).encode()
    blob = to_jsonb64(blob)
    sequence = [
        (EBlockCreate(''), const(block_id)),
        (EVlobCreate(blob),
         const({
             'id': '1234',
             'read_trust_seed': '42',
             'write_trust_seed': '43'
         })),
    ]
    return perform_sequence(sequence, File.create())
Exemple #24
0
 def handshake(self):
     if self.id:
         raise HandshakeError('Handshake already done.')
     challenge = _generate_challenge()
     query = {'handshake': 'challenge', 'challenge': challenge}
     yield Effect(EHandshakeSend(ejson_dumps(query)))
     raw_resp = yield Effect(EHandshakeRecv())
     try:
         resp = ejson_loads(raw_resp)
     except (TypeError, json.JSONDecodeError):
         error = HandshakeError('Invalid challenge response format')
         yield Effect(EHandshakeSend(error.to_raw()))
         raise error
     resp = HandshakeAnswerSchema().load(resp)
     claimed_identity = resp['identity']
     try:
         pubkey = yield Effect(EPubKeyGet(claimed_identity))
         pubkey.verify(resp['answer'], challenge.encode())
         yield Effect(EHandshakeSend('{"status": "ok", "handshake": "done"}'))
         self.id = claimed_identity
     except (TypeError, PubKeyNotFound, InvalidSignature):
         error = HandshakeError('Invalid signature, challenge or identity')
         yield Effect(EHandshakeSend(error.to_raw()))
         raise error
Exemple #25
0
 def on_event(sender):
     payload = ejson_dumps({'event': intent.event, 'sender': sender})
     context.queued_pushed_events.put_nowait(payload)
Exemple #26
0
 def test_commit(self, file):
     vlob_id = '1234'
     content = b'This is a test content.'
     block_ids = ['4567', '5678', '6789']
     new_vlob = {
         'id': '2345',
         'read_trust_seed': 'ABC',
         'write_trust_seed': 'DEF'
     }
     # Original content
     chunk_1 = content[:5]
     chunk_2 = content[5:14]
     chunk_3 = content[14:]
     blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }, {
             'block': block_ids[1],
             'digest': digest(chunk_2),
             'size': len(chunk_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': block_ids[2],
             'digest': digest(chunk_3),
             'size': len(chunk_3)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000004>')
     }]
     blob = ejson_dumps(blob).encode()
     blob = to_jsonb64(blob)
     # New content after truncate
     new_chuck_2 = b'is a'
     new_block_id = '7654'
     new_blob = [{
         'blocks': [{
             'block': block_ids[0],
             'digest': digest(chunk_1),
             'size': len(chunk_1)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }, {
         'blocks': [{
             'block': new_block_id,
             'digest': digest(new_chuck_2),
             'size': len(new_chuck_2)
         }],
         'key':
         to_jsonb64(b'<dummy-key-00000000000000000003>')
     }]
     new_blob = ejson_dumps(new_blob).encode()
     new_blob = to_jsonb64(new_blob)
     file.truncate(9)
     sequence = [
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': blob,
                        'version': 1
                    })),
         (EBlockRead(block_ids[1]),
          const({
              'content': to_jsonb64(chunk_2),
              'creation_date': '2012-01-01T00:00:00'
          })), (EBlockCreate(to_jsonb64(new_chuck_2)), const(new_block_id)),
         (EVlobUpdate(vlob_id, '43', 1, new_blob), noop),
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': new_blob,
                        'version': 1
                    })),
         (EBlockDelete('5678'), conste(BlockNotFound('Block not found.'))),
         (EBlockDelete('6789'), noop),
         (EVlobRead('1234', '42',
                    1), const({
                        'id': '1234',
                        'blob': new_blob,
                        'version': 1
                    })), (EBlockSynchronize('4567'), const(True)),
         (EBlockSynchronize('7654'), const(False)),
         (EVlobSynchronize('1234'), const(new_vlob))
     ]
     ret = perform_sequence(sequence, file.commit())
     new_vlob['key'] = to_jsonb64(b'<dummy-key-00000000000000000002>')
     assert ret == new_vlob
     assert file.dirty is False
     assert file.version == 1
Exemple #27
0
    def test_find_matching_blocks(self, file):
        vlob_id = '1234'
        block_size = 4096
        # Contents
        contents = {}
        total_length = 0
        for index, length in enumerate([
                block_size + 1, block_size - 1, block_size, 2 * block_size + 2,
                2 * block_size - 2, 2 * block_size
        ]):
            content = b''.join(
                [str(random.randint(1, 9)).encode() for i in range(0, length)])
            contents[index] = content
            total_length += length
        # Blocks

        def generator():
            i = 2000
            while True:
                yield str(i)
                i += 1

        gen = generator()

        blocks = {}
        block_contents = {}
        block_id = 2000
        for index, content in contents.items():
            chunks = [
                content[i:i + block_size]
                for i in range(0, len(content), block_size)
            ]
            if not chunks:
                chunks = [b'']
            sequence = []
            for chunk in chunks:
                encoded_chunk = to_jsonb64(chunk)
                sequence.append((EBlockCreate(encoded_chunk),
                                 lambda id=id: next(gen)))  # TODO dirty
                block_contents[str(block_id)] = encoded_chunk
                block_id += 1
            blocks[index] = perform_sequence(sequence,
                                             file._build_file_blocks(content))
        # Create file
        blob = ejson_dumps([blocks[i] for i in range(0, len(blocks))]).encode()
        blob = to_jsonb64(blob)
        # All matching blocks
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     }))]
        matching_blocks = perform_sequence(sequence,
                                           file._find_matching_blocks())
        assert matching_blocks == {
            'pre_excluded_blocks': [],
            'pre_excluded_data': b'',
            'pre_included_data': b'',
            'included_blocks': [blocks[i] for i in range(0, len(blocks))],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': []
        }
        # With offset
        delta = 10
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'] - delta)
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     })),
                    (EBlockRead('2003'),
                     const({
                         'content': block_contents['2003'],
                         'creation_date': '2012-01-01T00:00:00'
                     }))]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(None, offset))
        pre_excluded_data = contents[2][:blocks[2]['blocks'][0]['size'] -
                                        delta]
        pre_included_data = contents[2][-delta:]
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1]],
            'pre_excluded_data': pre_excluded_data,
            'pre_included_data': pre_included_data,
            'included_blocks': [blocks[i] for i in range(3, 6)],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': []
        }
        # With small size
        delta = 10
        size = 5
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'] - delta)
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     })),
                    (EBlockRead(id='2003'),
                     const({
                         'content': block_contents['2003'],
                         'creation_date': '2012-01-01T00:00:00'
                     }))]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(size, offset))
        pre_excluded_data = contents[2][:blocks[2]['blocks'][0]['size'] -
                                        delta]
        pre_included_data = contents[2][-delta:][:size]
        post_excluded_data = contents[2][-delta:][size:]
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1]],
            'pre_excluded_data': pre_excluded_data,
            'pre_included_data': pre_included_data,
            'included_blocks': [],
            'post_included_data': b'',
            'post_excluded_data': post_excluded_data,
            'post_excluded_blocks': [blocks[i] for i in range(3, 6)]
        }
        # With big size
        delta = 10
        size = delta
        size += blocks[3]['blocks'][0]['size']
        size += blocks[3]['blocks'][1]['size']
        size += blocks[3]['blocks'][2]['size']
        size += 2 * delta
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'] - delta)
        sequence = [(EVlobRead(vlob_id, '42', 1),
                     const({
                         'id': vlob_id,
                         'blob': blob,
                         'version': 1
                     })),
                    (EBlockRead('2003'),
                     const({
                         'content': block_contents['2003'],
                         'creation_date': '2012-01-01T00:00:00'
                     })),
                    (EBlockRead('2007'),
                     const({
                         'content': block_contents['2007'],
                         'creation_date': '2012-01-01T00:00:00'
                     }))]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(size, offset))
        pre_excluded_data = contents[2][:-delta]
        pre_included_data = contents[2][-delta:]
        post_included_data = contents[4][:2 * delta]
        post_excluded_data = contents[4][:block_size][2 * delta:]
        partial_block_4 = deepcopy(blocks[4])
        del partial_block_4['blocks'][0]
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1]],
            'pre_excluded_data': pre_excluded_data,
            'pre_included_data': pre_included_data,
            'included_blocks': [blocks[3]],
            'post_included_data': post_included_data,
            'post_excluded_data': post_excluded_data,
            'post_excluded_blocks': [partial_block_4, blocks[5]]
        }
        # With big size and no delta
        size = blocks[3]['blocks'][0]['size']
        size += blocks[3]['blocks'][1]['size']
        size += blocks[3]['blocks'][2]['size']
        offset = (blocks[0]['blocks'][0]['size'] +
                  blocks[0]['blocks'][1]['size'] +
                  blocks[1]['blocks'][0]['size'] +
                  blocks[2]['blocks'][0]['size'])
        sequence = [
            (EVlobRead(vlob_id, '42',
                       1), const({
                           'id': vlob_id,
                           'blob': blob,
                           'version': 1
                       })),
        ]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(size, offset))
        assert matching_blocks == {
            'pre_excluded_blocks': [blocks[0], blocks[1], blocks[2]],
            'pre_excluded_data': b'',
            'pre_included_data': b'',
            'included_blocks': [blocks[3]],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': [blocks[4], blocks[5]]
        }
        # With total size
        sequence = [
            (EVlobRead(vlob_id, '42',
                       1), const({
                           'id': vlob_id,
                           'blob': blob,
                           'version': 1
                       })),
        ]
        matching_blocks = perform_sequence(
            sequence, file._find_matching_blocks(total_length, 0))
        assert matching_blocks == {
            'pre_excluded_blocks': [],
            'pre_excluded_data': b'',
            'pre_included_data': b'',
            'included_blocks': [blocks[i] for i in range(0, 6)],
            'post_included_data': b'',
            'post_excluded_data': b'',
            'post_excluded_blocks': []
        }
Exemple #28
0
 def to_raw(self):
     return tools.ejson_dumps(self.to_dict())
 def perform_client_event(intent):
     payload = ejson_dumps({'event': intent.event, 'sender': intent.sender})
     client_context.queued_pushed_events.put_nowait(payload)
Exemple #30
0
 def flush(self):
     if not self.modifications:
         return
     # Merge all modifications to build final content
     builder = ContentBuilder()
     shortest_truncate = None
     for modification in self.modifications:
         if modification[0] == self.write:
             builder.write(*modification[1:])
         elif modification[0] == self.truncate:
             builder.truncate(modification[1])
             if not shortest_truncate or shortest_truncate > modification[1]:
                 shortest_truncate = modification[1]
         else:
             raise NotImplementedError()
     self.modifications = []
     # Truncate file
     previous_block_ids = yield self.get_blocks()
     if shortest_truncate is not None:
         matching_blocks = yield self._find_matching_blocks(shortest_truncate, 0)
         blob = []
         blob += matching_blocks['included_blocks']
         new_blocks = yield self._build_file_blocks(matching_blocks['post_included_data'])
         blob.append(new_blocks)
         blob = ejson_dumps(blob)
         blob = blob.encode()
         encrypted_blob = self.encryptor.encrypt(blob)
         encrypted_blob = to_jsonb64(encrypted_blob)
         yield Effect(EVlobUpdate(self.id,
                                  self.write_trust_seed,
                                  self.version + 1,
                                  encrypted_blob))
         self.dirty = True
     # Write new contents
     for offset, content in builder.contents.items():
         matching_blocks = yield self._find_matching_blocks(len(content), offset)
         new_data = matching_blocks['pre_excluded_data']
         new_data += content
         new_data += matching_blocks['post_excluded_data']
         blob = []
         blob += matching_blocks['pre_excluded_blocks']
         new_blocks = yield self._build_file_blocks(new_data)
         blob.append(new_blocks)
         blob += matching_blocks['post_excluded_blocks']
         blob = ejson_dumps(blob)
         blob = blob.encode()
         encrypted_blob = self.encryptor.encrypt(blob)
         encrypted_blob = to_jsonb64(encrypted_blob)
         yield Effect(EVlobUpdate(self.id,
                                  self.write_trust_seed,
                                  self.version + 1,
                                  encrypted_blob))
         self.dirty = True
     # Clean blocks
     current_block_ids = yield self.get_blocks()
     for block_id in previous_block_ids:
         if block_id not in current_block_ids:
             try:
                 yield Effect(EBlockDelete(block_id))
             except BlockNotFound:
                 pass