def test_ApiJobQueue_start_if_queues_running(mocker): ''' Ensure token is passed to the queues that are already started. ''' mock_api = mocker.MagicMock() mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') mock_main_thread = mocker.patch.object(job_queue, 'main_thread') mock_download_file_thread = mocker.patch.object(job_queue, 'download_file_thread') job_queue.main_thread.isRunning = mocker.MagicMock(return_value=True) job_queue.download_file_thread.isRunning = mocker.MagicMock( return_value=True) job_queue.start(mock_api) assert mock_main_queue.api_client == mock_api assert mock_download_file_queue.api_client == mock_api assert not mock_main_thread.start.called assert not mock_download_file_thread.start.called
def test_ApiJobQueue_stop_stops_queue_threads(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) job_queue.stop() assert not job_queue.main_thread.isRunning() assert not job_queue.download_file_thread.isRunning()
def test_ApiJobQueue_start_if_queues_not_running(mocker): """ Ensure token is passed to the queues and that they are started. """ mock_api = mocker.MagicMock() mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) mock_main_queue = mocker.patch.object(job_queue, "main_queue") mock_download_file_queue = mocker.patch.object(job_queue, "download_file_queue") mock_main_thread = mocker.patch.object(job_queue, "main_thread") mock_download_file_thread = mocker.patch.object(job_queue, "download_file_thread") job_queue.main_thread.isRunning = mocker.MagicMock(return_value=False) job_queue.download_file_thread.isRunning = mocker.MagicMock( return_value=False) job_queue.start(mock_api) assert mock_main_queue.api_client == mock_api assert mock_download_file_queue.api_client == mock_api mock_main_thread.start.assert_called_once_with() mock_download_file_thread.start.assert_called_once_with()
def __init__(self, hostname: str, gui, session_maker: sessionmaker, home: str, proxy: bool = True, qubes: bool = True) -> None: """ The hostname, gui and session objects are used to coordinate with the various other layers of the application: the location of the SecureDrop proxy, the user interface and SqlAlchemy local storage respectively. """ check_dir_permissions(home) super().__init__() # Controller is unauthenticated by default self.__is_authenticated = False # used for finding DB in sync thread self.home = home # boolean flag for whether or not the client is operating behind a proxy self.proxy = proxy # boolean flag for whether the client is running within Qubes # (regardless of proxy state, to support local dev in an AppVM) self.qubes = qubes # Location of the SecureDrop server. self.hostname = hostname # Reference to the UI window. self.gui = gui # Reference to the API for secure drop proxy. self.api = None # type: sdclientapi.API # Reference to the SqlAlchemy `sessionmaker` and `session` self.session_maker = session_maker self.session = session_maker() # Queue that handles running API job self.api_job_queue = ApiJobQueue(self.api, self.session_maker) self.api_job_queue.paused.connect(self.on_queue_paused) # Contains active threads calling the API. self.api_threads = {} # type: Dict[str, Dict] self.gpg = GpgHelper(home, self.session_maker, proxy) self.export = Export() self.sync_flag = os.path.join(home, 'sync_flag') # File data. self.data_dir = os.path.join(self.home, 'data')
def test_ApiJobQueue_pause_queues(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue, 'paused') pause_job = PauseQueueJob() mocker.patch('securedrop_client.queue.PauseQueueJob', return_value=pause_job) job_queue.on_queue_paused() job_queue.paused.emit()
def test_ApiJobQueue_on_file_download_queue_paused(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue, 'paused') pause_job = PauseQueueJob() mocker.patch('securedrop_client.queue.PauseQueueJob', return_value=pause_job) job_queue.on_file_download_queue_paused() job_queue.paused.emit.assert_called_once_with()
def test_ApiJobQueue_on_main_queue_paused(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue, "paused") pause_job = PauseQueueJob() mocker.patch("securedrop_client.queue.PauseQueueJob", return_value=pause_job) job_queue.on_main_queue_paused() job_queue.paused.emit.assert_called_once_with()
def test_ApiJobQueue_resume_queues_emits_resume_signal(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue.main_queue, 'resume') mocker.patch.object(job_queue.download_file_queue, 'resume') job_queue.start_queues = mocker.MagicMock() job_queue.resume_queues() job_queue.start_queues.assert_called_once_with() job_queue.main_queue.resume.emit.assert_called_once_with() job_queue.download_file_queue.resume.emit.assert_called_once_with()
def test_ApiJobQueue_logout_removes_api_client(mocker): mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) job_queue.main_queue.api_client = 'my token!!!' job_queue.download_file_queue.api_client = 'my token!!!' job_queue.logout() assert job_queue.main_queue.api_client is None assert job_queue.download_file_queue.api_client is None
def test_ApiJobQueue_resume_queues_does_not_emit_resume_signal_if_queues_are_not_running( mocker): """ Ensure resume signal is not emitted if the queues ar not running. """ job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue.main_queue, 'resume') mocker.patch.object(job_queue.download_file_queue, 'resume') job_queue.main_thread.isRunning = mocker.MagicMock(return_value=False) job_queue.download_file_thread.isRunning = mocker.MagicMock( return_value=False) job_queue.resume_queues() job_queue.main_queue.resume.emit.assert_not_called() job_queue.download_file_queue.resume.emit.assert_not_called()
def test_ApiJobQueue_resume_queues_emits_resume_signal_if_queues_are_running( mocker): """ Ensure resume signal is emitted if the queues are running. """ job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue.main_queue, "resume") mocker.patch.object(job_queue.download_file_queue, "resume") job_queue.main_thread.isRunning = mocker.MagicMock(return_value=True) job_queue.download_file_thread.isRunning = mocker.MagicMock( return_value=True) job_queue.resume_queues() job_queue.main_queue.resume.emit.assert_called_once_with() job_queue.download_file_queue.resume.emit.assert_called_once_with()
def test_ApiJobQueue_enqueue_when_queues_are_running(mocker): mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) job_priority = 2 dummy_job = factory.dummy_job_factory(mocker, 'mock')() job_queue.JOB_PRIORITIES = { FileDownloadJob: job_priority, type(dummy_job): job_priority } mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_add_job = mocker.patch.object(mock_download_file_queue, 'add_job') mock_main_queue_add_job = mocker.patch.object(mock_main_queue, 'add_job') job_queue.main_queue.api_client = 'has a value' job_queue.download_file_queue.api_client = 'has a value' job_queue.main_thread.isRunning = mocker.MagicMock(return_value=True) job_queue.download_file_thread.isRunning = mocker.MagicMock( return_value=True) dl_job = FileDownloadJob('mock', 'mock', 'mock') job_queue.enqueue(dl_job) mock_download_file_add_job.assert_called_once_with(dl_job) assert not mock_main_queue_add_job.called # reset for next test mock_download_file_queue.reset_mock() mock_download_file_add_job.reset_mock() mock_main_queue.reset_mock() mock_main_queue_add_job.reset_mock() job_queue.enqueue(FileDownloadJob('mock', 'mock', 'mock')) assert not mock_main_queue_add_job.called # reset for next test mock_download_file_queue.reset_mock() mock_download_file_add_job.reset_mock() mock_main_queue.reset_mock() mock_main_queue_add_job.reset_mock() job_queue.enqueue(dummy_job) mock_main_queue_add_job.assert_called_once_with(dummy_job) assert not mock_download_file_add_job.called
def test_ApiJobQueue_enqueue_when_queues_are_not_running(mocker): mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) job_priority = 2 dummy_job = factory.dummy_job_factory(mocker, 'mock')() job_queue.JOB_PRIORITIES = { FileDownloadJob: job_priority, type(dummy_job): job_priority } mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_add_job = mocker.patch.object(mock_download_file_queue, 'add_job') mock_main_queue_add_job = mocker.patch.object(mock_main_queue, 'add_job') job_queue.main_queue.api_client = 'has a value' job_queue.download_file_queue.api_client = 'has a value' job_queue.stop( ) # queues are already not running, but just in case the code changes one day dl_job = FileDownloadJob('mock', 'mock', 'mock') job_queue.enqueue(dl_job) mock_download_file_add_job.assert_not_called() mock_main_queue_add_job.assert_not_called()
def test_ApiJobQueue_enqueue_no_auth(mocker): mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_add_job = mocker.patch.object(mock_download_file_queue, 'add_job') mock_main_queue_add_job = mocker.patch.object(mock_main_queue, 'add_job') job_queue.main_queue.api_client = None job_queue.download_file_queue.api_client = None dummy_job = factory.dummy_job_factory(mocker, 'mock')() job_queue.JOB_PRIORITIES = {type(dummy_job): 1} job_queue.enqueue(dummy_job) assert mock_download_file_add_job.call_count == 0 assert mock_main_queue_add_job.call_count == 0
def test_ApiJobQueue_stop_results_in_queue_threads_not_running(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) job_queue.main_thread = mocker.MagicMock() job_queue.download_file_thread = mocker.MagicMock() job_queue.stop() job_queue.main_thread.quit.assert_called_once_with() job_queue.download_file_thread.quit.assert_called_once_with()
def test_ApiJobQueue_login_if_queues_running(mocker): mock_api = mocker.MagicMock() mock_client = mocker.MagicMock() mock_session_maker = mocker.MagicMock() job_queue = ApiJobQueue(mock_client, mock_session_maker) mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') mock_main_thread = mocker.patch.object(job_queue, 'main_thread') mock_download_file_thread = mocker.patch.object(job_queue, 'download_file_thread') job_queue.main_thread.isRunning = mocker.MagicMock(return_value=True) job_queue.download_file_thread.isRunning = mocker.MagicMock( return_value=True) job_queue.login(mock_api) assert mock_main_queue.api_client == mock_api assert mock_download_file_queue.api_client == mock_api assert not mock_main_thread.start.called assert not mock_download_file_thread.start.called
class Controller(QObject): """ Represents the logic for the secure drop client application. In an MVC application, this is the controller. """ sync_events = pyqtSignal(str) """ Signal that notifies that a reply was accepted by the server. Emits the reply's UUID as a string. """ reply_succeeded = pyqtSignal(str) """ Signal that notifies that a reply failed to be accepted by the server. Emits the reply's UUID as a string. """ reply_failed = pyqtSignal(str) """ A signal that emits a signal when the authentication state changes. - `True` when the client becomes authenticated - `False` when the client becomes unauthenticated """ authentication_state = pyqtSignal(bool) """ This signal indicates that a file has been successfully downloaded by emitting the file's UUID as a string. """ file_ready = pyqtSignal(str) """ This signal indicates that a message has been successfully downloaded by emitting the message's UUID as a string. """ message_ready = pyqtSignal([str, str]) """ This signal indicates that a reply has been successfully downloaded by emitting the reply's UUID as a string. """ reply_ready = pyqtSignal([str, str]) def __init__(self, hostname: str, gui, session_maker: sessionmaker, home: str, proxy: bool = True, qubes: bool = True) -> None: """ The hostname, gui and session objects are used to coordinate with the various other layers of the application: the location of the SecureDrop proxy, the user interface and SqlAlchemy local storage respectively. """ check_dir_permissions(home) super().__init__() # Controller is unauthenticated by default self.__is_authenticated = False # used for finding DB in sync thread self.home = home # boolean flag for whether or not the client is operating behind a proxy self.proxy = proxy # boolean flag for whether the client is running within Qubes # (regardless of proxy state, to support local dev in an AppVM) self.qubes = qubes # Location of the SecureDrop server. self.hostname = hostname # Reference to the UI window. self.gui = gui # Reference to the API for secure drop proxy. self.api = None # type: sdclientapi.API # Reference to the SqlAlchemy `sessionmaker` and `session` self.session_maker = session_maker self.session = session_maker() # Queue that handles running API job self.api_job_queue = ApiJobQueue(self.api, self.session_maker) self.api_job_queue.paused.connect(self.on_queue_paused) # Contains active threads calling the API. self.api_threads = {} # type: Dict[str, Dict] self.gpg = GpgHelper(home, self.session_maker, proxy) self.export = Export() self.sync_flag = os.path.join(home, 'sync_flag') # File data. self.data_dir = os.path.join(self.home, 'data') @property def is_authenticated(self) -> bool: return self.__is_authenticated @is_authenticated.setter def is_authenticated(self, is_authenticated: bool) -> None: if self.__is_authenticated != is_authenticated: self.authentication_state.emit(is_authenticated) self.__is_authenticated = is_authenticated @is_authenticated.deleter def is_authenticated(self) -> None: raise AttributeError('Cannot delete is_authenticated') def setup(self): """ Setup the application with the default state of: * Not logged in. * Show most recent state of syncronised sources. * Show the login screen. * Check the sync status every 30 seconds. """ # The gui needs to reference this "controller" layer to call methods # triggered by UI events. self.gui.setup(self) # Create a timer to check for sync status every 30 seconds. self.sync_timer = QTimer() self.sync_timer.timeout.connect(self.update_sync) self.sync_timer.start(30000) # Automagically sync with the API every 5 minutes. self.sync_update = QTimer() self.sync_update.timeout.connect(self.sync_api) self.sync_update.start(1000 * 60 * 5) # every 5 minutes. def call_api(self, api_call_func, success_callback, failure_callback, *args, current_object=None, **kwargs): """ Calls the function in a non-blocking manner. Upon completion calls the callback with the result. Calls timeout if the timer associated with the call emits a timeout signal. Any further arguments are passed to the function to be called. """ new_thread_id = str(uuid.uuid4()) # Uniquely id the new thread. new_api_thread = QThread(self.gui) new_api_runner = APICallRunner(api_call_func, current_object, *args, **kwargs) new_api_runner.moveToThread(new_api_thread) # handle completed call: copy response data, reset the # client, give the user-provided callback the response # data new_api_runner.call_succeeded.connect( lambda: self.completed_api_call(new_thread_id, success_callback)) new_api_runner.call_failed.connect( lambda: self.completed_api_call(new_thread_id, failure_callback)) # when the thread starts, we want to run `call_api` on `api_runner` new_api_thread.started.connect(new_api_runner.call_api) # Add the thread related objects to the api_threads dictionary. self.api_threads[new_thread_id] = { 'thread': new_api_thread, 'runner': new_api_runner, } # Start the thread and related activity. new_api_thread.start() def on_queue_paused(self) -> None: if self.api is None: self.gui.update_error_status( _('The SecureDrop server cannot be reached.')) else: self.gui.update_error_status( _('The SecureDrop server cannot be reached.'), duration=0, retry=True) def resume_queues(self) -> None: self.api_job_queue.resume_queues() def completed_api_call(self, thread_id, user_callback): """ Manage a completed API call. The actual result *may* be an exception or error result from the API. It's up to the handler (user_callback) to handle these potential states. """ logger.info("Completed API call. Cleaning up and running callback.") thread_info = self.api_threads.pop(thread_id) runner = thread_info['runner'] result_data = runner.result arg_spec = inspect.getfullargspec(user_callback) if 'current_object' in arg_spec.args: user_callback(result_data, current_object=runner.current_object) else: user_callback(result_data) def login(self, username, password, totp): """ Given a username, password and time based one-time-passcode (TOTP), create a new instance representing the SecureDrop api and authenticate. """ self.api = sdclientapi.API(self.hostname, username, password, totp, self.proxy) self.call_api(self.api.authenticate, self.on_authenticate_success, self.on_authenticate_failure) def on_authenticate_success(self, result): """ Handles a successful authentication call against the API. """ logger.info('{} successfully logged in'.format(self.api.username)) self.gui.hide_login() self.sync_api() self.call_api(self.api.get_current_user, self.on_get_current_user_success, self.on_get_current_user_failure) self.api_job_queue.login(self.api) self.is_authenticated = True self.resume_queues() def on_authenticate_failure(self, result: Exception) -> None: # Failed to authenticate. Reset state with failure message. self.api = None error = _('There was a problem signing in. ' 'Please verify your credentials and try again.') self.gui.show_login_error(error=error) def on_get_current_user_success(self, result) -> None: user = storage.update_and_get_user(result['uuid'], result['username'], result['first_name'], result['last_name'], self.session) self.gui.show_main_window(user) def on_get_current_user_failure(self, result: Exception) -> None: self.api = None self.gui.show_login_error(error=_('Could not find your account.')) def login_offline_mode(self): """ Allow user to view in offline mode without authentication. """ self.gui.hide_login() self.gui.show_main_window() self.is_authenticated = False self.update_sources() def on_action_requiring_login(self): """ Indicate that a user needs to login to perform the specified action. """ error = _('You must sign in to perform this action.') self.gui.update_error_status(error) def authenticated(self): """ Return a boolean indication that the connection to the API is authenticated. """ return bool(self.api and self.api.token is not None) def sync_api(self): """ Grab data from the remote SecureDrop API in a non-blocking manner. """ logger.debug("In sync_api on thread {}".format( self.thread().currentThreadId())) self.sync_events.emit('syncing') if self.authenticated(): logger.debug("You are authenticated, going to make your call") self.call_api(storage.get_remote_data, self.on_sync_success, self.on_sync_failure, self.api) logger.debug("In sync_api, after call to call_api, on " "thread {}".format(self.thread().currentThreadId())) def last_sync(self): """ Returns the time of last synchronisation with the remote SD server. """ try: with open(self.sync_flag) as f: return arrow.get(f.read()) except Exception: return None def on_sync_success(self, result) -> None: """ Called when syncronisation of data via the API succeeds """ # Update db with new metadata remote_sources, remote_submissions, remote_replies = result storage.update_local_storage(self.session, remote_sources, remote_submissions, remote_replies, self.data_dir) # Set last sync flag with open(self.sync_flag, 'w') as f: f.write(arrow.now().format()) # Import keys into keyring for source in remote_sources: if source.key and source.key.get('type', None) == 'PGP': pub_key = source.key.get('public', None) fingerprint = source.key.get('fingerprint', None) if not pub_key or not fingerprint: continue try: self.gpg.import_key(source.uuid, pub_key, fingerprint) except CryptoError: logger.warning('Failed to import key for source {}'.format( source.uuid)) self.update_sources() self.download_new_messages() self.download_new_replies() self.sync_events.emit('synced') def on_sync_failure(self, result: Exception) -> None: """ Called when syncronisation of data via the API fails. """ self.gui.update_error_status( _('The SecureDrop server cannot be reached.'), duration=0, retry=True) def update_sync(self): """ Updates the UI to show human time of last sync. """ self.gui.show_sync(self.last_sync()) def update_sources(self): """ Display the updated list of sources with those found in local storage. """ sources = list(storage.get_local_sources(self.session)) if sources: sources.sort(key=lambda x: x.last_updated, reverse=True) self.gui.show_sources(sources) self.update_sync() def on_update_star_success(self, result) -> None: """ After we star a source, we should sync the API such that the local database is updated. """ self.sync_api() # Syncing the API also updates the source list UI def on_update_star_failure(self, result: UpdateStarJobException) -> None: """ After we unstar a source, we should sync the API such that the local database is updated. """ logging.info("failed to push change to server") error = _('Failed to update star.') self.gui.update_error_status(error) def update_star(self, source_db_object): """ Star or unstar. The callback here is the API sync as we first make sure that we apply the change to the server, and then update locally. """ if not self.api: # Then we should tell the user they need to login. self.on_action_requiring_login() return job = UpdateStarJob(source_db_object.uuid, source_db_object.is_starred) job.success_signal.connect(self.on_update_star_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_update_star_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) def logout(self): """ Call logout function in the API, reset the API object, and force the UI to update into a logged out state. """ self.call_api(self.api.logout, self.on_logout_success, self.on_logout_failure) self.api = None self.api_job_queue.logout() self.gui.logout() self.is_authenticated = False def set_status(self, message, duration=5000): """ Set a textual status message to be displayed to the user for a certain duration. """ self.gui.update_activity_status(message, duration) def _submit_download_job(self, object_type: Union[Type[db.Reply], Type[db.Message], Type[db.File]], uuid: str) -> None: if object_type == db.Reply: job = ReplyDownloadJob( uuid, self.data_dir, self.gpg ) # type: Union[ReplyDownloadJob, MessageDownloadJob, FileDownloadJob] job.success_signal.connect(self.on_reply_download_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_reply_download_failure, type=Qt.QueuedConnection) elif object_type == db.Message: job = MessageDownloadJob(uuid, self.data_dir, self.gpg) job.success_signal.connect(self.on_message_download_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_message_download_failure, type=Qt.QueuedConnection) elif object_type == db.File: job = FileDownloadJob(uuid, self.data_dir, self.gpg) job.success_signal.connect(self.on_file_download_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_file_download_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) def download_new_messages(self) -> None: messages = storage.find_new_messages(self.session) if len(messages) > 0: self.set_status(_('Downloading new messages')) for message in messages: self._submit_download_job(type(message), message.uuid) def on_message_download_success(self, uuid: str) -> None: """ Called when a message has downloaded. """ message = storage.get_message(self.session, uuid) self.message_ready.emit(message.uuid, message.content) def on_message_download_failure(self, exception: Exception) -> None: """ Called when a message fails to download. """ logger.debug('Failed to download message: {}'.format(exception)) # Keep resubmitting the job if the download is corrupted. if isinstance(exception, DownloadChecksumMismatchException): logger.debug( 'Failure due to checksum mismatch, retrying {}'.format( exception.uuid)) self._submit_download_job(exception.object_type, exception.uuid) def download_new_replies(self) -> None: replies = storage.find_new_replies(self.session) for reply in replies: self._submit_download_job(type(reply), reply.uuid) def on_reply_download_success(self, uuid: str) -> None: """ Called when a reply has downloaded. """ reply = storage.get_reply(self.session, uuid) self.reply_ready.emit(reply.uuid, reply.content) def on_reply_download_failure(self, exception: Exception) -> None: """ Called when a reply fails to download. """ logger.debug('Failed to download reply: {}'.format(exception)) # Keep resubmitting the job if the download is corrupted. if isinstance(exception, DownloadChecksumMismatchException): logger.debug( 'Failure due to checksum mismatch, retrying {}'.format( exception.uuid)) self._submit_download_job(exception.object_type, exception.uuid) def on_file_open(self, file_uuid: str) -> None: """ Open the already downloaded file associated with the message (which is a `File`). """ # Once downloaded, submissions are stored in the data directory # with the same filename as the server, except with the .gz.gpg # stripped off. file = self.get_file(file_uuid) fn_no_ext, _ = os.path.splitext(os.path.splitext(file.filename)[0]) submission_filepath = os.path.join(self.data_dir, fn_no_ext) original_filepath = os.path.join(self.data_dir, file.original_filename) if os.path.exists(original_filepath): os.remove(original_filepath) os.link(submission_filepath, original_filepath) if self.proxy or self.qubes: # Running on Qubes. command = "qvm-open-in-vm" args = ['$dispvm:sd-svs-disp', original_filepath] # QProcess (Qt) or Python's subprocess? Who cares? They do the # same thing. :-) process = QProcess(self) process.start(command, args) else: # pragma: no cover # Non Qubes OS. Just log the event for now. logger.info('Opening file "{}".'.format(original_filepath)) def run_export_preflight_checks(self): ''' Run preflight checks to make sure the Export VM is configured correctly and ''' logger.debug('Running export preflight checks') if not self.qubes: return self.export.run_preflight_checks() def export_file_to_usb_drive(self, file_uuid: str, passphrase: str) -> None: file = self.get_file(file_uuid) logger.debug('Exporting {}'.format(file.original_filename)) if not self.qubes: return filepath = os.path.join(self.data_dir, file.original_filename) self.export.send_file_to_usb_device([filepath], passphrase) logger.debug('Export successful') def on_submission_download( self, submission_type: Union[Type[db.File], Type[db.Message]], submission_uuid: str, ) -> None: """ Download the file associated with the Submission (which may be a File or Message). """ if self.api: self._submit_download_job(submission_type, submission_uuid) self.set_status(_('Downloading file')) else: self.on_action_requiring_login() def on_file_download_success(self, result: Any) -> None: """ Called when a file has downloaded. """ self.file_ready.emit(result) def on_file_download_failure(self, exception: Exception) -> None: """ Called when a file fails to download. """ logger.debug('Failed to download file: {}'.format(exception)) # Keep resubmitting the job if the download is corrupted. if isinstance(exception, DownloadChecksumMismatchException): logger.debug( 'Failure due to checksum mismatch, retrying {}'.format( exception.uuid)) self._submit_download_job(exception.object_type, exception.uuid) else: self.set_status(_('The file download failed. Please try again.')) def on_delete_source_success(self, result) -> None: """ Handler for when a source deletion succeeds. """ self.sync_api() def on_delete_source_failure(self, result: Exception) -> None: logging.info("failed to delete source at server") error = _('Failed to delete source at server') self.gui.update_error_status(error) def delete_source(self, source): """Performs a delete operation on source record. This method will first request server to delete the source record. If the process of deleting record at server is successful, it will sync the server records with the local state. On failure, it will display an error. """ self.call_api(self.api.delete_source, self.on_delete_source_success, self.on_delete_source_failure, source) def send_reply(self, source_uuid: str, reply_uuid: str, message: str) -> None: """ Send a reply to a source. """ job = SendReplyJob( source_uuid, reply_uuid, message, self.gpg, ) job.success_signal.connect(self.on_reply_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_reply_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) def on_reply_success(self, reply_uuid: str) -> None: logger.debug('{} sent successfully'.format(reply_uuid)) self.reply_succeeded.emit(reply_uuid) self.sync_api() def on_reply_failure( self, exception: Union[SendReplyJobError, SendReplyJobTimeoutError]) -> None: logger.debug('{} failed to send'.format(exception.reply_uuid)) self.reply_failed.emit(exception.reply_uuid) def get_file(self, file_uuid: str) -> db.File: file = storage.get_file(self.session, file_uuid) self.session.refresh(file) return file def on_logout_success(self, result) -> None: logging.info('Client logout successful') def on_logout_failure(self, result: Exception) -> None: logging.info('Client logout failure')
class Controller(QObject): """ Represents the logic for the secure drop client application. In an MVC application, this is the controller. """ sync_events = pyqtSignal(str) """ A signal that emits a signal when the authentication state changes. - `True` when the client becomes authenticated - `False` when the client becomes unauthenticated """ authentication_state = pyqtSignal(bool) """ This signal indicates that a reply was successfully sent and received by the server. Emits: str: the reply's source UUID str: the reply UUID str: the content of the reply """ reply_succeeded = pyqtSignal(str, str, str) """ This signal indicates that a reply was not successfully sent or received by the server. Emits: str: the reply UUID """ reply_failed = pyqtSignal(str) """ This signal indicates that a reply has been successfully downloaded. Emits: str: the reply's source UUID str: the reply UUID str: the content of the reply """ reply_ready = pyqtSignal(str, str, str) """ This signal indicates an error while downloading a reply. Emits: str: the reply's source UUID str: the reply UUID str: the content of the reply """ reply_download_failed = pyqtSignal(str, str, str) """ This signal indicates that a message has been successfully downloaded. Emits: str: the message's source UUID str: the message UUID str: the content of the message """ message_ready = pyqtSignal(str, str, str) """ This signal indicates an error while downloading a message. Emits: str: the message's source UUID str: the message UUID str: the content of the message """ message_download_failed = pyqtSignal(str, str, str) """ This signal indicates that a file has been successfully downloaded. Emits: str: the file's source UUID str: the file UUID str: the name of the file """ file_ready = pyqtSignal(str, str, str) """ This signal indicates that a file is missing. Emits: str: the file UUID """ file_missing = pyqtSignal(str, str, str) """ This signal indicates that a deletion request was accepted by the server. Emits: str: the source UUID """ source_deleted = pyqtSignal(str) """ This signal indicates that a star update request succeeded. Emits: str: the source UUID """ star_update_successful = pyqtSignal(str) """ This signal indicates that a star update request failed. Emits: str: the source UUID bool: is_starred """ star_update_failed = pyqtSignal(str, bool) """ This signal indicates that a deletion attempt failed at the server. Emits: str: the source UUID """ source_deletion_failed = pyqtSignal(str) def __init__( self, hostname: str, gui, session_maker: sessionmaker, home: str, proxy: bool = True, qubes: bool = True ) -> None: """ The hostname, gui and session objects are used to coordinate with the various other layers of the application: the location of the SecureDrop proxy, the user interface and SqlAlchemy local storage respectively. """ check_dir_permissions(home) super().__init__() # Controller is unauthenticated by default self.__is_authenticated = False # used for finding DB in sync thread self.home = home # boolean flag for whether or not the client is operating behind a proxy self.proxy = proxy # boolean flag for whether the client is running within Qubes # (regardless of proxy state, to support local dev in an AppVM) self.qubes = qubes # Location of the SecureDrop server. self.hostname = hostname # Reference to the UI window. self.gui = gui # Reference to the API for secure drop proxy. self.api = None # type: sdclientapi.API # Reference to the SqlAlchemy `sessionmaker` and `session` self.session_maker = session_maker self.session = session_maker() # Queue that handles running API job self.api_job_queue = ApiJobQueue(self.api, self.session_maker) self.api_job_queue.paused.connect(self.on_queue_paused) # Contains active threads calling the API. self.api_threads = {} # type: Dict[str, Dict] self.gpg = GpgHelper(home, self.session_maker, proxy) self.export = Export() # File data. self.data_dir = os.path.join(self.home, 'data') # Background sync to keep client up-to-date with server changes self.api_sync = ApiSync(self.api, self.session_maker, self.gpg, self.data_dir) self.api_sync.sync_started.connect(self.on_sync_started, type=Qt.QueuedConnection) self.api_sync.sync_success.connect(self.on_sync_success, type=Qt.QueuedConnection) self.api_sync.sync_failure.connect(self.on_sync_failure, type=Qt.QueuedConnection) # Create a timer to show the time since the last sync self.show_last_sync_timer = QTimer() self.show_last_sync_timer.timeout.connect(self.show_last_sync) # Path to the file containing the timestamp since the last sync with the server self.last_sync_filepath = os.path.join(home, 'sync_flag') @property def is_authenticated(self) -> bool: return self.__is_authenticated @is_authenticated.setter def is_authenticated(self, is_authenticated: bool) -> None: if self.__is_authenticated != is_authenticated: self.__is_authenticated = is_authenticated self.authentication_state.emit(is_authenticated) @is_authenticated.deleter def is_authenticated(self) -> None: raise AttributeError('Cannot delete is_authenticated') def setup(self): """ Setup the application with the default state of: * Not logged in. * Show most recent state of syncronised sources. * Show the login screen. * Check the sync status every 30 seconds. """ # The gui needs to reference this "controller" layer to call methods # triggered by UI events. self.gui.setup(self) # Run export object in a separate thread context (a reference to the # thread is kept on self such that it does not get garbage collected # after this method returns) - we want to keep our export thread around for # later processing. self.export_thread = QThread() self.export.moveToThread(self.export_thread) self.export_thread.start() storage.clear_download_errors(self.session) def call_api(self, api_call_func, success_callback, failure_callback, *args, current_object=None, **kwargs): """ Calls the function in a non-blocking manner. Upon completion calls the callback with the result. Calls timeout if the timer associated with the call emits a timeout signal. Any further arguments are passed to the function to be called. """ new_thread_id = str(uuid.uuid4()) # Uniquely id the new thread. new_api_thread = QThread(self.gui) new_api_runner = APICallRunner(api_call_func, current_object, *args, **kwargs) new_api_runner.moveToThread(new_api_thread) # handle completed call: copy response data, reset the # client, give the user-provided callback the response # data new_api_runner.call_succeeded.connect( lambda: self.completed_api_call(new_thread_id, success_callback)) new_api_runner.call_failed.connect( lambda: self.completed_api_call(new_thread_id, failure_callback)) # when the thread starts, we want to run `call_api` on `api_runner` new_api_thread.started.connect(new_api_runner.call_api) # Add the thread related objects to the api_threads dictionary. self.api_threads[new_thread_id] = { 'thread': new_api_thread, 'runner': new_api_runner, } # Start the thread and related activity. new_api_thread.start() def on_queue_paused(self) -> None: self.gui.update_error_status( _('The SecureDrop server cannot be reached. Trying to reconnect...'), duration=0) self.show_last_sync_timer.start(TIME_BETWEEN_SHOWING_LAST_SYNC_MS) def resume_queues(self) -> None: self.api_job_queue.resume_queues() self.show_last_sync_timer.stop() # clear error status in case queue was paused resulting in a permanent error message self.gui.clear_error_status() def completed_api_call(self, thread_id, user_callback): """ Manage a completed API call. The actual result *may* be an exception or error result from the API. It's up to the handler (user_callback) to handle these potential states. """ logger.debug("Completed API call. Cleaning up and running callback.") thread_info = self.api_threads.pop(thread_id) runner = thread_info['runner'] result_data = runner.result arg_spec = inspect.getfullargspec(user_callback) if 'current_object' in arg_spec.args: user_callback(result_data, current_object=runner.current_object) else: user_callback(result_data) def login(self, username, password, totp): """ Given a username, password and time based one-time-passcode (TOTP), create a new instance representing the SecureDrop api and authenticate. Default to 60 seconds until we implement a better request timeout strategy. We lower the default_request_timeout for Queue API requests in ApiJobQueue in order to display errors faster. """ storage.mark_all_pending_drafts_as_failed(self.session) self.api = sdclientapi.API( self.hostname, username, password, totp, self.proxy, default_request_timeout=60) self.call_api(self.api.authenticate, self.on_authenticate_success, self.on_authenticate_failure) self.show_last_sync_timer.stop() self.set_status('') def on_authenticate_success(self, result): """ Handles a successful authentication call against the API. """ logger.info('{} successfully logged in'.format(self.api.username)) self.gui.hide_login() user = storage.update_and_get_user( self.api.token_journalist_uuid, self.api.username, self.api.journalist_first_name, self.api.journalist_last_name, self.session) self.gui.show_main_window(user) self.update_sources() self.api_job_queue.start(self.api) self.api_sync.start(self.api) self.is_authenticated = True def on_authenticate_failure(self, result: Exception) -> None: # Failed to authenticate. Reset state with failure message. self.invalidate_token() error = _('That didn\'t work. Please check everything and try again.\n' 'Make sure to use a new two-factor code.') self.gui.show_login_error(error=error) self.api_sync.stop() def login_offline_mode(self): """ Allow user to view in offline mode without authentication. """ self.gui.hide_login() self.gui.show_main_window() storage.mark_all_pending_drafts_as_failed(self.session) self.is_authenticated = False self.update_sources() self.show_last_sync() self.show_last_sync_timer.start(TIME_BETWEEN_SHOWING_LAST_SYNC_MS) def on_action_requiring_login(self): """ Indicate that a user needs to login to perform the specified action. """ error = _('You must sign in to perform this action.') self.gui.update_error_status(error) def authenticated(self): """ Return a boolean indication that the connection to the API is authenticated. """ return bool(self.api and self.api.token is not None) def get_last_sync(self): """ Returns the time of last synchronisation with the remote SD server. """ try: with open(self.last_sync_filepath) as f: return arrow.get(f.read()) except Exception: return None def on_sync_started(self) -> None: self.sync_events.emit('syncing') def on_sync_success(self) -> None: """ Called when syncronisation of data via the API queue succeeds. * Set last sync flag * Display the last sync time and updated list of sources in GUI * Download new messages and replies * Update missing files so that they can be re-downloaded """ with open(self.last_sync_filepath, 'w') as f: f.write(arrow.now().format()) missing_files = storage.update_missing_files(self.data_dir, self.session) for missed_file in missing_files: self.file_missing.emit(missed_file.source.uuid, missed_file.uuid, str(missed_file)) self.update_sources() self.gui.refresh_current_source_conversation() self.download_new_messages() self.download_new_replies() self.sync_events.emit('synced') self.resume_queues() def on_sync_failure(self, result: Exception) -> None: """ Called when syncronisation of data via the API fails after a background sync. If the reason a sync fails is ApiInaccessibleError then we need to log the user out for security reasons and show them the login window in order to get a new token. """ logger.warning('sync failure: {}'.format(result)) if isinstance(result, ApiInaccessibleError): # Don't show login window if the user is already logged out if not self.is_authenticated or not self.api: return self.invalidate_token() self.logout() self.gui.show_login(error=_('Your session expired. Please log in again.')) elif isinstance(result, (RequestTimeoutError, ServerConnectionError)): self.gui.update_error_status( _('The SecureDrop server cannot be reached. Trying to reconnect...'), duration=0) def show_last_sync(self): """ Updates the UI to show human time of last sync. """ self.gui.show_last_sync(self.get_last_sync()) def update_sources(self): """ Display the updated list of sources with those found in local storage. """ sources = list(storage.get_local_sources(self.session)) self.gui.show_sources(sources) def on_update_star_success(self, source_uuid: str) -> None: self.star_update_successful.emit(source_uuid) def on_update_star_failure( self, error: Union[UpdateStarJobError, UpdateStarJobTimeoutError] ) -> None: if isinstance(error, UpdateStarJobError): self.gui.update_error_status(_('Failed to update star.')) source = self.session.query(db.Source).filter_by(uuid=error.source_uuid).one() self.star_update_failed.emit(error.source_uuid, source.is_starred) @login_required def update_star(self, source_uuid: str, is_starred: bool): """ Star or unstar. """ job = UpdateStarJob(source_uuid, is_starred) job.success_signal.connect(self.on_update_star_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_update_star_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) def logout(self): """ If the token is not already invalid, make an api call to logout and invalidate the token. Then mark all pending draft replies as failed, stop the queues, and show the user as logged out in the GUI. """ # clear error status in case queue was paused resulting in a permanent error message self.gui.clear_error_status() if self.api is not None: self.call_api(self.api.logout, self.on_logout_success, self.on_logout_failure) self.invalidate_token() failed_replies = storage.mark_all_pending_drafts_as_failed(self.session) for failed_reply in failed_replies: self.reply_failed.emit(failed_reply.uuid) self.api_sync.stop() self.api_job_queue.stop() self.gui.logout() self.show_last_sync_timer.start(TIME_BETWEEN_SHOWING_LAST_SYNC_MS) self.show_last_sync() self.is_authenticated = False def invalidate_token(self): self.api = None def set_status(self, message, duration=5000): """ Set a textual status message to be displayed to the user for a certain duration. """ self.gui.update_activity_status(message, duration) @login_required def _submit_download_job(self, object_type: Union[Type[db.Reply], Type[db.Message], Type[db.File]], uuid: str) -> None: if object_type == db.Reply: job = ReplyDownloadJob( uuid, self.data_dir, self.gpg ) # type: Union[ReplyDownloadJob, MessageDownloadJob, FileDownloadJob] job.success_signal.connect(self.on_reply_download_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_reply_download_failure, type=Qt.QueuedConnection) elif object_type == db.Message: job = MessageDownloadJob(uuid, self.data_dir, self.gpg) job.success_signal.connect(self.on_message_download_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_message_download_failure, type=Qt.QueuedConnection) elif object_type == db.File: job = FileDownloadJob(uuid, self.data_dir, self.gpg) job.success_signal.connect(self.on_file_download_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_file_download_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) def download_new_messages(self) -> None: new_messages = storage.find_new_messages(self.session) new_message_count = len(new_messages) if new_message_count > 0: self.set_status(_('Retrieving new messages'), 2500) for message in new_messages: if message.download_error: logger.info( f"Download of message {message.uuid} failed since client start; not retrying." ) else: self._submit_download_job(type(message), message.uuid) def on_message_download_success(self, uuid: str) -> None: """ Called when a message has downloaded. """ self.session.commit() # Needed to flush stale data. message = storage.get_message(self.session, uuid) self.message_ready.emit(message.source.uuid, message.uuid, message.content) def on_message_download_failure(self, exception: DownloadException) -> None: """ Called when a message fails to download. """ logger.info('Failed to download message: {}'.format(exception)) if isinstance(exception, DownloadChecksumMismatchException): # Keep resubmitting the job if the download is corrupted. logger.warning('Failure due to checksum mismatch, retrying {}'.format(exception.uuid)) self._submit_download_job(exception.object_type, exception.uuid) self.session.commit() try: message = storage.get_message(self.session, exception.uuid) self.message_download_failed.emit(message.source.uuid, message.uuid, str(message)) except Exception as e: logger.error(f"Could not emit message_download_failed: {e}") def download_new_replies(self) -> None: replies = storage.find_new_replies(self.session) for reply in replies: if reply.download_error: logger.info( f"Download of reply {reply.uuid} failed since client start; not retrying." ) else: self._submit_download_job(type(reply), reply.uuid) def on_reply_download_success(self, uuid: str) -> None: """ Called when a reply has downloaded. """ self.session.commit() # Needed to flush stale data. reply = storage.get_reply(self.session, uuid) self.reply_ready.emit(reply.source.uuid, reply.uuid, reply.content) def on_reply_download_failure(self, exception: DownloadException) -> None: """ Called when a reply fails to download. """ logger.info('Failed to download reply: {}'.format(exception)) if isinstance(exception, DownloadChecksumMismatchException): # Keep resubmitting the job if the download is corrupted. logger.warning('Failure due to checksum mismatch, retrying {}'.format(exception.uuid)) self._submit_download_job(exception.object_type, exception.uuid) self.session.commit() try: reply = storage.get_reply(self.session, exception.uuid) self.reply_download_failed.emit(reply.source.uuid, reply.uuid, str(reply)) except Exception as e: logger.error(f"Could not emit reply_download_failed: {e}") def downloaded_file_exists(self, file: db.File) -> bool: ''' Check if the file specified by file_uuid exists. If it doesn't update the local db and GUI to show the file as not downloaded. ''' if not os.path.exists(file.location(self.data_dir)): self.gui.update_error_status(_( 'File does not exist in the data directory. Please try re-downloading.')) logger.warning('Cannot find file in {}. File does not exist.'.format( os.path.dirname(file.filename))) missing_files = storage.update_missing_files(self.data_dir, self.session) for f in missing_files: self.file_missing.emit(f.source.uuid, f.uuid, str(f)) return False return True def on_file_open(self, file: db.File) -> None: ''' Open the file specified by file_uuid. If the file is missing, update the db so that is_downloaded is set to False. ''' logger.info('Opening file in "{}".'.format(os.path.dirname(file.location(self.data_dir)))) if not self.downloaded_file_exists(file): return if not self.qubes: return command = "qvm-open-in-vm" args = ['--view-only', '$dispvm:sd-viewer', file.location(self.data_dir)] process = QProcess(self) process.start(command, args) def run_printer_preflight_checks(self): ''' Run preflight checks to make sure the Export VM is configured correctly. ''' logger.info('Running printer preflight check') if not self.qubes: self.export.printer_preflight_success.emit() return self.export.begin_printer_preflight.emit() def run_export_preflight_checks(self): ''' Run preflight checks to make sure the Export VM is configured correctly. ''' logger.info('Running export preflight check') if not self.qubes: self.export.preflight_check_call_success.emit() return self.export.begin_preflight_check.emit() def export_file_to_usb_drive(self, file_uuid: str, passphrase: str) -> None: ''' Send the file specified by file_uuid to the Export VM with the user-provided passphrase for unlocking the attached transfer device. If the file is missing, update the db so that is_downloaded is set to False. ''' file = self.get_file(file_uuid) file_location = file.location(self.data_dir) logger.info('Exporting file in: {}'.format(os.path.dirname(file_location))) if not self.downloaded_file_exists(file): return if not self.qubes: self.export.export_usb_call_success.emit() return self.export.begin_usb_export.emit([file_location], passphrase) def print_file(self, file_uuid: str) -> None: ''' Send the file specified by file_uuid to the Export VM. If the file is missing, update the db so that is_downloaded is set to False. ''' file = self.get_file(file_uuid) file_location = file.location(self.data_dir) logger.info('Printing file in: {}'.format(os.path.dirname(file_location))) if not self.downloaded_file_exists(file): return if not self.qubes: return self.export.begin_print.emit([file_location]) @login_required def on_submission_download( self, submission_type: Union[Type[db.File], Type[db.Message]], submission_uuid: str, ) -> None: """ Download the file associated with the Submission (which may be a File or Message). """ self._submit_download_job(submission_type, submission_uuid) def on_file_download_success(self, uuid: Any) -> None: """ Called when a file has downloaded. """ self.session.commit() file_obj = storage.get_file(self.session, uuid) file_obj.download_error = None storage.update_file_size(uuid, self.data_dir, self.session) self.file_ready.emit(file_obj.source.uuid, uuid, file_obj.filename) def on_file_download_failure(self, exception: Exception) -> None: """ Called when a file fails to download. """ logger.info('Failed to download file: {}'.format(exception)) # Keep resubmitting the job if the download is corrupted. if isinstance(exception, DownloadChecksumMismatchException): logger.warning('Failure due to checksum mismatch, retrying {}'.format(exception.uuid)) self._submit_download_job(exception.object_type, exception.uuid) else: if isinstance(exception, DownloadDecryptionException): logger.error("Failed to decrypt %s", exception.uuid) f = self.get_file(exception.uuid) self.file_missing.emit(f.source.uuid, f.uuid, str(f)) self.gui.update_error_status(_('The file download failed. Please try again.')) def on_delete_source_success(self, source_uuid: str) -> None: """ Rely on sync to delete the source locally so we know for sure it was deleted """ pass def on_delete_source_failure(self, e: Exception) -> None: if isinstance(e, DeleteSourceJobException): error = _('Failed to delete source at server') self.gui.update_error_status(error) self.source_deletion_failed.emit(e.source_uuid) @login_required def delete_source(self, source: db.Source): """ Performs a delete operation on source record. This method will submit a job to delete the source record on the server. If the job succeeds, the success handler will synchronize the server records with the local state. If not, the failure handler will display an error. """ job = DeleteSourceJob(source.uuid) job.success_signal.connect(self.on_delete_source_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_delete_source_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) self.source_deleted.emit(source.uuid) @login_required def send_reply(self, source_uuid: str, reply_uuid: str, message: str) -> None: """ Send a reply to a source. """ # Before we send the reply, add the draft to the database with a PENDING # reply send status. source = self.session.query(db.Source).filter_by(uuid=source_uuid).one() reply_status = self.session.query(db.ReplySendStatus).filter_by( name=db.ReplySendStatusCodes.PENDING.value).one() draft_reply = db.DraftReply( uuid=reply_uuid, timestamp=datetime.datetime.utcnow(), source_id=source.id, journalist_id=self.api.token_journalist_uuid, file_counter=source.interaction_count, content=message, send_status_id=reply_status.id, ) self.session.add(draft_reply) self.session.commit() job = SendReplyJob(source_uuid, reply_uuid, message, self.gpg) job.success_signal.connect(self.on_reply_success, type=Qt.QueuedConnection) job.failure_signal.connect(self.on_reply_failure, type=Qt.QueuedConnection) self.api_job_queue.enqueue(job) def on_reply_success(self, reply_uuid: str) -> None: logger.info('{} sent successfully'.format(reply_uuid)) self.session.commit() reply = storage.get_reply(self.session, reply_uuid) self.reply_succeeded.emit(reply.source.uuid, reply_uuid, reply.content) def on_reply_failure( self, exception: Union[SendReplyJobError, SendReplyJobTimeoutError] ) -> None: logger.debug('{} failed to send'.format(exception.reply_uuid)) # only emit failure signal for non-timeout errors if isinstance(exception, SendReplyJobError): self.reply_failed.emit(exception.reply_uuid) def get_file(self, file_uuid: str) -> db.File: file = storage.get_file(self.session, file_uuid) self.session.refresh(file) return file def on_logout_success(self, result) -> None: logging.info('Client logout successful') def on_logout_failure(self, result: Exception) -> None: logging.info('Client logout failure')