Example #1
0
def test_delete_local_source_by_uuid(homedir, mocker):
    """
    Delete the referenced source in the session. Ensure that both
    the database object and the corresponding source documents are deleted.
    """
    mock_session = mocker.MagicMock()
    source = factory.RemoteSource()
    source.journalist_filename = "sourcey_mcsource"

    # Make source folder
    source_directory = os.path.join(homedir, source.journalist_filename)
    os.mkdir(source_directory)

    # Make document in source disk to be deleted
    path_to_source_document = os.path.join(source_directory, "teehee")
    with open(path_to_source_document, "w") as f:
        f.write("this is a source document")

    mock_session.query().filter_by().one_or_none.return_value = source
    mock_session.query.reset_mock()
    delete_local_source_by_uuid(mock_session, "uuid", homedir)
    mock_session.query.assert_called_once_with(securedrop_client.db.Source)
    mock_session.query().filter_by.assert_called_once_with(uuid="uuid")
    assert mock_session.query().filter_by().one_or_none.call_count == 1
    mock_session.delete.assert_called_once_with(source)
    mock_session.commit.assert_called_once_with()

    # Ensure both source folder and its containing file are gone.
    with pytest.raises(FileNotFoundError):
        f = open(path_to_source_document, "r")

    with pytest.raises(FileNotFoundError):
        f = open(source_directory, "r")
Example #2
0
def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session,
                                                  session_maker):
    """
    Check that we can gracefully handle missing source keys.
    """
    gpg = GpgHelper(homedir, session_maker, is_qubes=False)
    job = MetadataSyncJob(homedir, gpg)

    mock_source = factory.RemoteSource(key={
        'type': 'PGP',
        'public': '',
        'fingerprint': '',
    })

    mock_key_import = mocker.patch.object(job.gpg, 'import_key')
    mock_get_remote_data = mocker.patch(
        'securedrop_client.api_jobs.sync.get_remote_data',
        return_value=([mock_source], [], []))

    api_client = mocker.MagicMock()
    api_client.default_request_timeout = mocker.MagicMock()

    job.call_api(api_client, session)

    assert mock_key_import.call_count == 0
    assert mock_get_remote_data.call_count == 1
Example #3
0
def test_MetadataSyncJob_success_with_key_import_fail(mocker, homedir, session,
                                                      session_maker):
    """
    Check that we can gracefully handle a key import failure.
    """
    gpg = GpgHelper(homedir, session_maker, is_qubes=False)
    job = MetadataSyncJob(homedir, gpg)

    mock_source = factory.RemoteSource(key={
        'type': 'PGP',
        'public': PUB_KEY,
        'fingerprint': '123456ABC',
    })

    mock_key_import = mocker.patch.object(job.gpg,
                                          'import_key',
                                          side_effect=CryptoError)
    mock_get_remote_data = mocker.patch(
        'securedrop_client.api_jobs.sync.get_remote_data',
        return_value=([mock_source], [], []))

    api_client = mocker.MagicMock()
    api_client.default_request_timeout = mocker.MagicMock()

    job.call_api(api_client, session)

    assert mock_key_import.call_args[0][0] == mock_source.uuid
    assert mock_key_import.call_args[0][1] == mock_source.key['public']
    assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint']
    assert mock_get_remote_data.call_count == 1
Example #4
0
def test_source_exists_true(homedir, mocker):
    """
    Check that method returns True if a source is return from the query.
    """
    session = mocker.MagicMock()
    source = factory.RemoteSource()
    source.uuid = "test-source-uuid"
    session.query().filter_by().one.return_value = source
    assert source_exists(session, "test-source-uuid")
Example #5
0
def test_get_remote_data(mocker):
    """
    In the good case, a tuple of results is returned.
    """
    # Some source, submission and reply objects from the API.
    mock_api = mocker.MagicMock()
    source = factory.RemoteSource()
    mock_api.get_sources.return_value = [source]
    submission = mocker.MagicMock()
    mock_api.get_all_submissions.return_value = [submission]
    reply = mocker.MagicMock()
    mock_api.get_all_replies.return_value = [reply]
    sources, submissions, replies = get_remote_data(mock_api)
    assert sources == [source]
    assert submissions == [submission]
    assert replies == [reply]
Example #6
0
def test_delete_local_source_by_uuid_no_files(homedir, mocker):
    """
    Delete the referenced source in the session. If there are no files
    corresponding to this source, no exception should be raised.
    """
    mock_session = mocker.MagicMock()
    source = factory.RemoteSource()
    source.journalist_filename = "sourcey_mcsource"
    mock_session.query().filter_by().one_or_none.return_value = source
    mock_session.query.reset_mock()
    delete_local_source_by_uuid(mock_session, "uuid", homedir)
    mock_session.query.assert_called_once_with(securedrop_client.db.Source)
    mock_session.query().filter_by.assert_called_once_with(uuid="uuid")
    assert mock_session.query().filter_by().one_or_none.call_count == 1
    mock_session.delete.assert_called_once_with(source)
    mock_session.commit.assert_called_once_with()
Example #7
0
def test_MetadataSyncJob_success(mocker, homedir, session, session_maker):
    job = MetadataSyncJob(homedir)

    mock_source = factory.RemoteSource(key={
        "type": "PGP",
        "public": PUB_KEY,
        "fingerprint": "123456ABC"
    })

    mock_get_remote_data = mocker.patch(
        "securedrop_client.api_jobs.sync.get_remote_data",
        return_value=([mock_source], [], []))

    api_client = mocker.MagicMock()
    api_client.default_request_timeout = mocker.MagicMock()
    api_client.default_request_timeout = mocker.MagicMock()

    job.call_api(api_client, session)

    assert mock_get_remote_data.call_count == 1
Example #8
0
def test_MetadataSyncJob_only_import_new_source_keys(mocker, homedir, session,
                                                     session_maker):
    """
    Verify that we only import source keys we don't already have.
    """
    gpg = GpgHelper(homedir, session_maker, is_qubes=False)
    job = MetadataSyncJob(homedir, gpg)

    mock_source = factory.RemoteSource(key={
        'type': 'PGP',
        'public': PUB_KEY,
        'fingerprint': '123456ABC',
    })

    mock_get_remote_data = mocker.patch(
        'securedrop_client.api_jobs.sync.get_remote_data',
        return_value=([mock_source], [], []))

    api_client = mocker.MagicMock()
    api_client.default_request_timeout = mocker.MagicMock()

    crypto_logger = mocker.patch('securedrop_client.crypto.logger')
    storage_logger = mocker.patch('securedrop_client.storage.logger')

    job.call_api(api_client, session)

    assert mock_get_remote_data.call_count == 1

    log_msg = crypto_logger.debug.call_args_list[0][0]
    assert log_msg == ('Importing key with fingerprint %s',
                       mock_source.key['fingerprint'])

    job.call_api(api_client, session)

    assert mock_get_remote_data.call_count == 2

    log_msg = storage_logger.debug.call_args_list[1][0][0]
    assert log_msg == 'Source key data is unchanged'
Example #9
0
def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session,
                                                  session_maker):
    """
    Check that we can gracefully handle missing source keys.
    """
    job = MetadataSyncJob(homedir)

    mock_source = factory.RemoteSource(key={
        "type": "PGP",
        "public": "",
        "fingerprint": ""
    })

    mock_get_remote_data = mocker.patch(
        "securedrop_client.api_jobs.sync.get_remote_data",
        return_value=([mock_source], [], []))

    api_client = mocker.MagicMock()
    api_client.default_request_timeout = mocker.MagicMock()

    job.call_api(api_client, session)

    assert mock_get_remote_data.call_count == 1
Example #10
0
def test_update_local_storage(homedir, mocker, session_maker):
    """
    Assuming no errors getting data, check the expected functions to update
    the state of the local database are called with the necessary data.
    """
    remote_source = factory.RemoteSource()
    remote_message = mocker.Mock(filename="1-foo.msg.gpg")
    remote_file = mocker.Mock(filename="2-foo.gpg")
    remote_submissions = [remote_message, remote_file]
    remote_reply = mocker.MagicMock()
    # Some local source, submission and reply objects from the local database.
    mock_session = mocker.MagicMock()
    local_source = mocker.MagicMock()
    local_file = mocker.MagicMock()
    local_message = mocker.MagicMock()
    local_reply = mocker.MagicMock()
    mock_session.query().all = mocker.Mock()
    mock_session.query().all.side_effect = [[local_file], [local_message],
                                            [local_reply]]
    mock_session.query().order_by().all = mocker.Mock()
    mock_session.query().order_by().all.side_effect = [[local_source]]
    src_fn = mocker.patch("securedrop_client.storage.update_sources")
    rpl_fn = mocker.patch("securedrop_client.storage.update_replies")
    file_fn = mocker.patch("securedrop_client.storage.update_files")
    msg_fn = mocker.patch("securedrop_client.storage.update_messages")

    update_local_storage(mock_session, [remote_source], remote_submissions,
                         [remote_reply], homedir)
    src_fn.assert_called_once_with([remote_source], [local_source],
                                   mock_session, homedir)
    rpl_fn.assert_called_once_with([remote_reply], [local_reply], mock_session,
                                   homedir)
    file_fn.assert_called_once_with([remote_file], [local_file], mock_session,
                                    homedir)
    msg_fn.assert_called_once_with([remote_message], [local_message],
                                   mock_session, homedir)
Example #11
0
def test_update_sources(homedir, mocker, session_maker, session):
    """
    Check that:

    * Existing sources are updated in the local database.
    * New sources have an entry in the local database.
    * Local sources not returned by the remote server are deleted from the
      local database.
    """
    # This remote source exists locally and will be updated.
    source_update = factory.RemoteSource(
        journalist_designation="source update")

    # This remote source does not exist locally and will be created.
    source_create = factory.RemoteSource(
        journalist_designation="source create")

    remote_sources = [source_update, source_create]

    # This local source already exists in the API results and will be updated.
    local_source1 = factory.Source(
        journalist_designation=source_update.journalist_designation,
        uuid=source_update.uuid,
        public_key=None,
        fingerprint=None,
    )

    # This local source does not exist in the API results and will be
    # deleted from the local database.
    local_source2 = factory.Source(journalist_designation="beep_boop")

    session.add(local_source1)
    session.add(local_source2)
    session.commit()

    local_sources = [local_source1, local_source2]

    file_delete_fcn = mocker.patch(
        "securedrop_client.storage.delete_source_collection")

    update_sources(remote_sources, local_sources, session, homedir)

    # Check the expected local source object has been updated with values from
    # the API.
    updated_source = session.query(
        db.Source).filter_by(uuid=source_update.uuid).one()
    assert updated_source.journalist_designation == source_update.journalist_designation
    assert updated_source.is_flagged == source_update.is_flagged
    assert updated_source.public_key == source_update.key["public"]
    assert updated_source.fingerprint == source_update.key["fingerprint"]
    assert updated_source.interaction_count == source_update.interaction_count
    assert updated_source.is_starred == source_update.is_starred
    assert updated_source.last_updated == parse(source_update.last_updated)

    # Check the expected local source object has been created with values from
    # the API.
    new_source = session.query(
        db.Source).filter_by(uuid=source_create.uuid).one()
    assert new_source.uuid == source_create.uuid
    assert new_source.journalist_designation == source_create.journalist_designation
    assert new_source.is_flagged == source_create.is_flagged
    assert new_source.public_key == source_create.key["public"]
    assert new_source.fingerprint == source_create.key["fingerprint"]
    assert new_source.interaction_count == source_create.interaction_count
    assert new_source.is_starred == source_create.is_starred
    assert new_source.last_updated == parse(source_create.last_updated)

    # Check that the local source not present in the API results was deleted.
    with pytest.raises(NoResultFound):
        session.query(db.Source).filter_by(uuid=local_source2.uuid).one()

    # Ensure that we called the method to delete the source collection.
    # This will delete any content in that source's data directory.
    assert file_delete_fcn.call_count == 1
Example #12
0
def test_sync_delete_race(homedir, mocker, session_maker, session):
    """
    Test a race between sync and source deletion (#797).

    The original failure scenario:
      0. New source submits message 1.
      1. Sync occurs in client. Journalist sees message 1.
      2. Source submits message 2.
      3. Journalist simultaneously deletes the source while the sync
         begins. Deletion completes as it occurs independently of the
         sync, but by this time the sync has collected the list of new
         messages, which includes message 2.
      4. Source is gone, yet the logic in the sync will attempt to add
         message 2 which corresponds to a source that is deleted.
    """

    source = factory.RemoteSource()
    message1 = make_remote_message(source.uuid)

    sources = [source]
    submissions = [message1]
    replies = []

    update_local_storage(session, sources, submissions, replies, homedir)

    assert source_exists(session, source.uuid)
    get_message(session, message1.uuid)

    message2 = make_remote_message(source.uuid, file_counter=2)
    submissions = [message1, message2]

    class Deleter(QThread):
        def __init__(self, source_uuid):
            super().__init__()
            self.source_uuid = source_uuid

        def run(self):
            session = db.make_session_maker(homedir)()
            session.begin(subtransactions=True)
            delete_local_source_by_uuid(session, self.source_uuid, homedir)
            session.commit()
            self.exit()

    deleter = Deleter(source.uuid)

    def delayed_update_messages(remote_submissions, local_submissions, session,
                                data_dir):
        assert source_exists(session, source.uuid)
        deleter.start()
        time.sleep(1)

        # This next assert should fail if transactions are working, as
        # the source should still be visible in this session -- it's
        # only been deleted in the Deleter's session. If transactions
        # are *not* working, the deletion will be visible here.
        assert source_exists(session, source.uuid) is False
        update_messages(remote_submissions, local_submissions, session,
                        data_dir)

    mocker.patch("securedrop_client.storage.update_messages",
                 delayed_update_messages)

    # simulate update_local_storage being called as part of the sync operation
    update_local_storage(session, sources, [message1, message2], [], homedir)

    assert source_exists(session, source.uuid) is False
    with pytest.raises(NoResultFound):
        get_message(session, message1.uuid)

    assert source_exists(session, source.uuid) is False
    with pytest.raises(NoResultFound):
        get_message(session, message1.uuid)
        get_message(session, message2.uuid)