Ejemplo n.º 1
0
    def on_sync_success(self, result) -> None:
        """
        Called when syncronisation of data via the API succeeds
        """
        remote_sources, remote_submissions, remote_replies = result

        storage.update_local_storage(self.session, remote_sources,
                                     remote_submissions, remote_replies,
                                     self.data_dir)

        # Set last sync flag.
        with open(self.sync_flag, 'w') as f:
            f.write(arrow.now().format())

        # import keys into keyring
        for source in remote_sources:
            if source.key and source.key.get('type', None) == 'PGP':
                pub_key = source.key.get('public', None)
                if not pub_key:
                    continue
                try:
                    self.gpg.import_key(source.uuid, pub_key)
                except CryptoError:
                    logger.warning('Failed to import key for source {}'.format(
                        source.uuid))

        self.update_sources()
Ejemplo n.º 2
0
def test_update_local_storage(homedir, mocker):
    """
    Assuming no errors getting data, check the expected functions to update
    the state of the local database are called with the necessary data.
    """
    remote_source = make_remote_source()
    remote_message = mocker.Mock(filename='1-foo.msg.gpg')
    remote_file = mocker.Mock(filename='2-foo.gpg')
    remote_submissions = [remote_message, remote_file]
    remote_reply = mocker.MagicMock()
    # Some local source, submission and reply objects from the local database.
    mock_session = mocker.MagicMock()
    local_source = mocker.MagicMock()
    local_file = mocker.MagicMock()
    local_message = mocker.MagicMock()
    local_reply = mocker.MagicMock()
    mock_session.query().all = mocker.Mock()
    mock_session.query().all.side_effect = [[local_source], [local_file],
                                            [local_message], [local_reply]]
    src_fn = mocker.patch('securedrop_client.storage.update_sources')
    rpl_fn = mocker.patch('securedrop_client.storage.update_replies')
    file_fn = mocker.patch('securedrop_client.storage.update_files')
    msg_fn = mocker.patch('securedrop_client.storage.update_messages')

    update_local_storage(mock_session, [remote_source], remote_submissions,
                         [remote_reply], homedir)
    src_fn.assert_called_once_with([remote_source], [local_source],
                                   mock_session, homedir)
    rpl_fn.assert_called_once_with([remote_reply], [local_reply], mock_session,
                                   homedir)
    file_fn.assert_called_once_with([remote_file], [local_file], mock_session,
                                    homedir)
    msg_fn.assert_called_once_with([remote_message], [local_message],
                                   mock_session, homedir)
Ejemplo n.º 3
0
    def on_sync_success(self, result) -> None:
        """
        Called when syncronisation of data via the API succeeds
        """
        # Update db with new metadata
        remote_sources, remote_submissions, remote_replies = result
        storage.update_local_storage(self.session, remote_sources,
                                     remote_submissions, remote_replies,
                                     self.data_dir)

        # Set last sync flag
        with open(self.sync_flag, 'w') as f:
            f.write(arrow.now().format())

        # Import keys into keyring
        for source in remote_sources:
            if source.key and source.key.get('type', None) == 'PGP':
                pub_key = source.key.get('public', None)
                fingerprint = source.key.get('fingerprint', None)
                if not pub_key or not fingerprint:
                    continue
                try:
                    self.gpg.import_key(source.uuid, pub_key, fingerprint)
                except CryptoError:
                    logger.warning('Failed to import key for source {}'.format(
                        source.uuid))

        self.update_sources()
        self.download_new_messages()
        self.download_new_replies()
        self.sync_events.emit('synced')
Ejemplo n.º 4
0
def test_update_local_storage(homedir, mocker):
    """
    Assuming no errors getting data, check the expected functions to update
    the state of the local database are called with the necessary data.
    """
    source = make_remote_source()
    submission = mocker.MagicMock()
    reply = mocker.MagicMock()
    sources = [
        source,
    ]
    submissions = [
        submission,
    ]
    replies = [
        reply,
    ]
    # Some local source, submission and reply objects from the local database.
    mock_session = mocker.MagicMock()
    local_source = mocker.MagicMock()
    local_submission = mocker.MagicMock()
    local_replies = mocker.MagicMock()
    mock_session.query.side_effect = [
        [
            local_source,
        ],
        [
            local_submission,
        ],
        [
            local_replies,
        ],
    ]
    src_fn = mocker.patch('securedrop_client.storage.update_sources')
    rpl_fn = mocker.patch('securedrop_client.storage.update_replies')
    sub_fn = mocker.patch('securedrop_client.storage.update_submissions')

    update_local_storage(mock_session, sources, submissions, replies, homedir)
    src_fn.assert_called_once_with([
        source,
    ], [
        local_source,
    ], mock_session, homedir)
    rpl_fn.assert_called_once_with([
        reply,
    ], [
        local_replies,
    ], mock_session, homedir)
    sub_fn.assert_called_once_with([
        submission,
    ], [
        local_submission,
    ], mock_session, homedir)
Ejemplo n.º 5
0
    def call_api(self, api_client: API, session: Session) -> Any:
        '''
        Override ApiJob.

        Download new metadata, update the local database, import new keys, and
        then the success signal will let the controller know to add any new download
        jobs.
        '''

        # TODO: Once https://github.com/freedomofpress/securedrop-client/issues/648, we will want to
        # pass the default request timeout to api calls instead of setting it on the api object
        # directly.
        api_client.default_request_timeout = 40
        remote_sources, remote_submissions, remote_replies = get_remote_data(
            api_client)

        update_local_storage(session, self.gpg, remote_sources,
                             remote_submissions, remote_replies, self.data_dir)
Ejemplo n.º 6
0
    def on_synced(self, result):
        """
        Called when syncronisation of data via the API is complete.
        """
        self.sync_events.emit('synced')
        if isinstance(result, tuple):
            remote_sources, remote_submissions, remote_replies = \
                result

            storage.update_local_storage(self.session, remote_sources,
                                         remote_submissions,
                                         remote_replies, self.data_dir)

            # clean up locally cached conversation views
            remote_source_uuids = [s.uuid for s in remote_sources]
            cached_sources = list(self.gui.conversations.keys())
            for cached_source in cached_sources:
                if cached_source not in remote_source_uuids:
                    self.gui.conversations.pop(cached_source, None)

            # Set last sync flag.
            with open(self.sync_flag, 'w') as f:
                f.write(arrow.now().format())

            # import keys into keyring
            for source in remote_sources:
                if source.key and source.key.get('type', None) == 'PGP':
                    pub_key = source.key.get('public', None)
                    if not pub_key:
                        continue
                    try:
                        self.gpg.import_key(source.uuid, pub_key)
                    except CryptoError:
                        logger.warning('Failed to import key for source {}'.format(source.uuid))

            self.update_conversation_views()
        else:
            # How to handle a failure? Exceptions are already logged. Perhaps
            # a message in the UI?
            pass

        self.update_sources()
Ejemplo n.º 7
0
 def on_synced(self, result):
     """
     Called when syncronisation of data via the API is complete.
     """
     if result and isinstance(self.api_runner.result, tuple):
         remote_sources, remote_submissions, remote_replies = \
             self.api_runner.result
         self.call_reset()
         storage.update_local_storage(self.session, remote_sources,
                                      remote_submissions, remote_replies)
         # Set last sync flag.
         with open(self.sync_flag, 'w') as f:
             f.write(arrow.now().format())
         # TODO: show something in the conversation view?
         # self.gui.show_conversation_for()
     else:
         # How to handle a failure? Exceptions are already logged. Perhaps
         # a message in the UI?
         pass
     self.update_sources()
Ejemplo n.º 8
0
def test_sync_delete_race(homedir, mocker, session_maker, session):
    """
    Test a race between sync and source deletion (#797).

    The original failure scenario:
      0. New source submits message 1.
      1. Sync occurs in client. Journalist sees message 1.
      2. Source submits message 2.
      3. Journalist simultaneously deletes the source while the sync
         begins. Deletion completes as it occurs independently of the
         sync, but by this time the sync has collected the list of new
         messages, which includes message 2.
      4. Source is gone, yet the logic in the sync will attempt to add
         message 2 which corresponds to a source that is deleted.
    """

    source = factory.RemoteSource()
    message1 = make_remote_message(source.uuid)

    sources = [source]
    submissions = [message1]
    replies = []

    update_local_storage(session, sources, submissions, replies, homedir)

    assert source_exists(session, source.uuid)
    get_message(session, message1.uuid)

    message2 = make_remote_message(source.uuid, file_counter=2)
    submissions = [message1, message2]

    class Deleter(QThread):
        def __init__(self, source_uuid):
            super().__init__()
            self.source_uuid = source_uuid

        def run(self):
            session = db.make_session_maker(homedir)()
            session.begin(subtransactions=True)
            delete_local_source_by_uuid(session, self.source_uuid, homedir)
            session.commit()
            self.exit()

    deleter = Deleter(source.uuid)

    def delayed_update_messages(remote_submissions, local_submissions, session,
                                data_dir):
        assert source_exists(session, source.uuid)
        deleter.start()
        time.sleep(1)

        # This next assert should fail if transactions are working, as
        # the source should still be visible in this session -- it's
        # only been deleted in the Deleter's session. If transactions
        # are *not* working, the deletion will be visible here.
        assert source_exists(session, source.uuid) is False
        update_messages(remote_submissions, local_submissions, session,
                        data_dir)

    mocker.patch("securedrop_client.storage.update_messages",
                 delayed_update_messages)

    # simulate update_local_storage being called as part of the sync operation
    update_local_storage(session, sources, [message1, message2], [], homedir)

    assert source_exists(session, source.uuid) is False
    with pytest.raises(NoResultFound):
        get_message(session, message1.uuid)

    assert source_exists(session, source.uuid) is False
    with pytest.raises(NoResultFound):
        get_message(session, message1.uuid)
        get_message(session, message2.uuid)