def signal_server_event_db(self, _, event_type, rows): for row in rows: if str(row.campaign_id) != self.config['campaign_id']: continue if event_type == 'deleted': ti = gui_utilities.gtk_list_store_search( self._tv_model, str(row.id)) if ti is None: self.logger.warning( "received server db event: deleted for non-existent row {0}:{1}" .format(self.table_name, str(row.id))) else: gui_utilities.glib_idle_add_wait(self._tv_model.remove, ti) elif event_type == 'inserted': self.rpc.async_graphql( self.node_query, query_vars={'id': str(row.id)}, on_success=self.__async_rpc_cb_server_event_db_inserted, when_idle=True) elif event_type == 'updated': self.rpc.async_graphql( self.node_query, query_vars={'id': str(row.id)}, on_success=self.__async_rpc_cb_server_event_db_updated, when_idle=True)
def signal_server_event_db(self, _, event_type, rows): get_node = lambda id: self.rpc.graphql(self.node_query, {'id': str(id)})['db']['node'] for row in rows: if str(row.campaign_id) != self.config['campaign_id']: continue model = self.gobjects['treeview_campaign'].get_model() for case in utilities.switch(event_type): if case('inserted'): row_data = self.format_node_data(get_node(row.id)) row_data = list(map(self.format_cell_data, row_data)) row_data.insert(0, str(row.id)) gui_utilities.glib_idle_add_wait(model.append, row_data) ti = gui_utilities.gtk_list_store_search(model, str(row.id)) if ti is None: self.logger.warning( "received server db event: {0} for non-existent row {1}:{2}" .format(event_type, self.table_name, str(row.id))) break if case('deleted'): model.remove(ti) break if case('updated'): row_data = self.format_node_data(get_node(row.id)) for idx, cell_data in enumerate(row_data, 1): model[ti][idx] = self.format_cell_data(cell_data) break
def _add_catalog_to_tree(self, catalog_id, store): model = self._named_model(id=catalog_id, installed=None, enabled=True, title=catalog_id, compatibility=None, version=None, visible_enabled=False, visible_installed=False, sensitive_installed=False, type=_ROW_TYPE_CATALOG) catalog_row = gui_utilities.glib_idle_add_wait(self._store_append, store, None, model) for repo in self.catalog_plugins.get_repositories(catalog_id): model = self._named_model(id=repo.id, installed=None, enabled=True, title=repo.title, compatibility=None, version=None, visible_enabled=False, visible_installed=False, sensitive_installed=False, type=_ROW_TYPE_REPOSITORY) repo_row = gui_utilities.glib_idle_add_wait( self._store_append, store, catalog_row, model) plugin_collections = self.catalog_plugins.get_collection( catalog_id, repo.id) if not plugin_collections: continue self._add_plugins_to_tree(catalog_id, repo, store, repo_row, plugin_collections)
def process_pause(self, set_pause=False): """ Pause sending emails if a pause request has been set. :param bool set_pause: Whether to request a pause before processing it. :return: Whether or not the sending operation was cancelled during the pause. :rtype: bool """ if set_pause: if isinstance(self.tab, gui_utilities.GladeGObject): gui_utilities.glib_idle_add_wait( lambda: self.tab.pause_button.set_property('active', True)) else: self.pause() if self.paused.is_set(): self.tab_notify_status('Paused sending emails, waiting to resume') self.running.wait() self.paused.clear() if self.should_stop.is_set(): self.tab_notify_status('Sending emails cancelled') return False self.tab_notify_status('Resuming sending emails') self.max_messages_per_minute = float( self.config.get('smtp_max_send_rate', 0.0)) return True
def _load_catalog_local_tsafe(self): """ Load the plugins which are available into the treeview to make them visible to the user. """ self.logger.debug('loading the local catalog') pm = self.application.plugin_manager self.__load_errors = {} pm.load_all(on_error=self._on_plugin_load_error_tsafe) node = _ModelNode( id=_LOCAL_REPOSITORY_ID, installed=None, enabled=True, title=_LOCAL_REPOSITORY_TITLE, compatibility=None, version=None, visible_enabled=False, visible_installed=False, sensitive_installed=False, type=_ROW_TYPE_CATALOG ) for name, plugin in pm.loaded_plugins.items(): if self.config['plugins.installed'].get(name): continue self.config['plugins.installed'][name] = None node.children.append(_ModelNamedRow( id=plugin.name, installed=True, enabled=plugin.name in pm.enabled_plugins, title=plugin.title, compatibility='Yes' if plugin.is_compatible else 'No', version=plugin.version, visible_enabled=True, visible_installed=True, sensitive_installed=False, type=_ROW_TYPE_PLUGIN )) for name in self.__load_errors.keys(): node.children.append(_ModelNamedRow( id=name, installed=True, enabled=False, title="{0} (Load Failed)".format(name), compatibility='No', version='Unknown', visible_enabled=True, visible_installed=True, sensitive_installed=False, type=_ROW_TYPE_PLUGIN )) gui_utilities.glib_idle_add_wait(self.__store_add_node, node)
def process_pause(self, set_pause=False): if set_pause: gui_utilities.glib_idle_add_wait(lambda: self.tab.pause_button.set_property('active', True)) if self.paused.is_set(): GLib.idle_add(self.tab.notify_status, 'Paused Sending Emails, Waiting To Resume\n') self.running.wait() self.paused.clear() if self.should_exit.is_set(): GLib.idle_add(self.tab.notify_status, 'Sending Emails Cancelled\n') return False GLib.idle_add(self.tab.notify_status, 'Resuming Sending Emails\n') self.max_messages_per_minute = float(self.config.get('smtp_max_send_rate', 0.0)) return True
def _load_missing_plugins_tsafe(self): local_model_row = None for plugin in self._installed_plugins_treeview_tracker.keys(): self.logger.warning("plugin {} was not found in any loaded catalog or repo, moving to locally installed".format(plugin)) self.config['plugins.installed'][plugin] = None self._installed_plugins_treeview_tracker[plugin] = None for model_row in self._model: if _ModelNamedRow(*model_row).id == _LOCAL_REPOSITORY_ID: gui_utilities.glib_idle_add_wait(self._model.remove, model_row.iter) break else: raise RuntimeError('failed to find the local plugin repository') self._load_catalog_local_tsafe()
def _load_catalog_local_tsafe(self): """ Load the plugins which are available into the treeview to make them visible to the user. """ self.logger.debug('loading the local catalog') pm = self.application.plugin_manager self.__load_errors = {} pm.load_all(on_error=self._on_plugin_load_error_tsafe) node = _ModelNode(id=_LOCAL_REPOSITORY_ID, installed=None, enabled=True, title=_LOCAL_REPOSITORY_TITLE, compatibility=None, version=None, visible_enabled=False, visible_installed=False, sensitive_installed=False, type=_ROW_TYPE_CATALOG) for name, plugin in pm.loaded_plugins.items(): if self.config['plugins.installed'].get(name): continue self.config['plugins.installed'][name] = None node.children.append( _ModelNamedRow( id=plugin.name, installed=True, enabled=plugin.name in pm.enabled_plugins, title=plugin.title, compatibility='Yes' if plugin.is_compatible else 'No', version=plugin.version, visible_enabled=True, visible_installed=True, sensitive_installed=False, type=_ROW_TYPE_PLUGIN)) for name in self.__load_errors.keys(): node.children.append( _ModelNamedRow(id=name, installed=True, enabled=False, title="{0} (Load Failed)".format(name), compatibility='No', version='Unknown', visible_enabled=True, visible_installed=True, sensitive_installed=False, type=_ROW_TYPE_PLUGIN)) gui_utilities.glib_idle_add_wait(self.__store_add_node, node)
def _load_missing_plugins_tsafe(self): for plugin in self._installed_plugins_treeview_tracker.keys(): self.logger.warning( "plugin {} was not found in any loaded catalog or repo, moving to locally installed" .format(plugin)) self.config['plugins.installed'][plugin] = None self._installed_plugins_treeview_tracker[plugin] = None for model_row in self._model: if _ModelNamedRow(*model_row).id == _LOCAL_REPOSITORY_ID: gui_utilities.glib_idle_add_wait(self._model.remove, model_row.iter) break else: raise RuntimeError('failed to find the local plugin repository') self._load_catalog_local_tsafe()
def loader_thread_routine(self): """The loading routine to be executed within a thread.""" info_cache = {} for graph in self.graphs: if self.is_destroyed.is_set(): break info_cache = gui_utilities.glib_idle_add_wait(lambda: graph.refresh(info_cache, self.is_destroyed)) self.last_load_time = time.time()
def signal_server_event_db(self, _, event_type, rows): get_node = lambda id: self.rpc.graphql(self.node_query, {'id': str(id)})['db']['node'] for row in rows: if str(row.campaign_id) != self.config['campaign_id']: continue for case in utilities.switch(event_type): if case('inserted'): row_data = (str(row.id),) + tuple(self.format_node_data(get_node(row.id))) gui_utilities.glib_idle_add_wait(self._tv_model.append, row_data) ti = gui_utilities.gtk_list_store_search(self._tv_model, str(row.id)) if ti is None: self.logger.warning("received server db event: {0} for non-existent row {1}:{2}".format(event_type, self.table_name, str(row.id))) break if case('deleted'): self._tv_model.remove(ti) break if case('updated'): row_data = self.format_node_data(get_node(row.id)) for idx, cell_data in enumerate(row_data, 1): self._tv_model[ti][idx] = cell_data break
def loader_thread_routine(self): """The loading routine to be executed within a thread.""" if not 'campaign_id' in self.config: return if not self.rpc.remote_table_row('campaigns', self.config['campaign_id']): return info_cache = {} for graph in self.graphs: if self.is_destroyed.is_set(): break info_cache.update(gui_utilities.glib_idle_add_wait(lambda g=graph: g.refresh(info_cache, self.is_destroyed))) self.last_load_time = time.time()
def loader_thread_routine(self, store): """ The loading routine to be executed within a thread. :param store: The store object to place the new data. :type store: :py:class:`Gtk.ListStore` """ gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', False)) campaign_id = self.config['campaign_id'] count = 500 page_info = {'endCursor': None, 'hasNextPage': True} while page_info['hasNextPage']: if self.rpc is None: break results = self.rpc.graphql(self.table_query, {'campaign': campaign_id, 'count': count, 'cursor': page_info['endCursor']}) if self.loader_thread_stop.is_set(): break if self.is_destroyed.is_set(): break for edge in results['db']['campaign'][self.table_name]['edges']: row_data = self.format_node_data(edge['node']) row_data = list(map(self.format_cell_data, row_data)) row_data.insert(0, str(edge['node']['id'])) gui_utilities.glib_idle_add_wait(store.append, row_data) page_info = results['db']['campaign'][self.table_name]['pageInfo'] if self.is_destroyed.is_set(): return gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', True)) self.last_load_time = time.time()
def loader_thread_routine(self, store): """ The loading routine to be executed within a thread. :param store: The store object to place the new data. :type store: :py:class:`Gtk.ListStore` """ gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', False)) campaign_id = self.config['campaign_id'] count = 500 page_info = {'endCursor': None, 'hasNextPage': True} while page_info['hasNextPage']: if self.rpc is None: break try: results = self.rpc.graphql(self.table_query, {'campaign': campaign_id, 'count': count, 'cursor': page_info['endCursor']}) except errors.KingPhisherGraphQLQueryError as error: self.logger.error('graphql error: ' + error.message) raise if self.loader_thread_stop.is_set(): break if self.is_destroyed.is_set(): break for edge in results['db']['campaign'][self.table_name]['edges']: node = edge['node'] row_data = (str(node['id']),) + tuple(self.format_node_data(node)) gui_utilities.glib_idle_add_wait(store.append, row_data) page_info = results['db']['campaign'][self.table_name]['pageInfo'] if self.is_destroyed.is_set(): return gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', True)) self.last_load_time = time.time()
def loader_thread_routine(self, store): """ The loading routine to be executed within a thread. :param store: The store object to place the new data. :type store: :py:class:`Gtk.ListStore` """ gui_utilities.glib_idle_add_wait(lambda: self.gobjects[ 'treeview_campaign'].set_property('sensitive', False)) for row in self.rpc.remote_table( self.remote_table_name, query_filter={'campaign_id': self.config['campaign_id']}): if self.is_destroyed.is_set(): break if self.rpc is None: break row_data = self.format_row_data(row) if row_data is None: self.rpc('db/table/delete', self.remote_table_name, row.id) continue row_data = list(map(self.format_cell_data, row_data)) row_data.insert(0, str(row.id)) gui_utilities.glib_idle_add_wait(store.append, row_data) if self.is_destroyed.is_set(): return gui_utilities.glib_idle_add_wait(lambda: self.gobjects[ 'treeview_campaign'].set_property('sensitive', True)) self.last_load_time = time.time()
def loader_thread_routine(self, store): """ The loading routine to be executed within a thread. :param store: The store object to place the new data. :type store: :py:class:`Gtk.ListStore` """ gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', False)) for row in self.rpc.remote_table(self.remote_table_name, query_filter={'campaign_id': self.config['campaign_id']}): if self.loader_thread_stop.is_set(): break if self.is_destroyed.is_set(): break if self.rpc is None: break row_data = self.format_row_data(row) if row_data is None: self.rpc('db/table/delete', self.remote_table_name, row.id) continue row_data = list(map(self.format_cell_data, row_data)) row_data.insert(0, str(row.id)) gui_utilities.glib_idle_add_wait(store.append, row_data) if self.is_destroyed.is_set(): return gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', True)) self.last_load_time = time.time()
def process_pause(self, set_pause=False): """ Pause sending emails if a pause request has been set. :param bool set_pause: Whether to request a pause before processing it. :return: Whether or not the sending operation was cancelled during the pause. :rtype: bool """ if set_pause: if isinstance(self.tab, gui_utilities.GladeGObject): gui_utilities.glib_idle_add_wait(lambda: self.tab.pause_button.set_property('active', True)) else: self.pause() if self.paused.is_set(): self.tab_notify_status('Paused sending emails, waiting to resume') self.running.wait() self.paused.clear() if self.should_stop.is_set(): self.tab_notify_status('Sending emails cancelled') return False self.tab_notify_status('Resuming sending emails') self.max_messages_per_minute = float(self.config.get('smtp_max_send_rate', 0.0)) return True
def loader_thread_routine(self): """The loading routine to be executed within a thread.""" if not 'campaign_id' in self.config: return if not self.application.get_graphql_campaign(): return info_cache = {} for graph in self.graphs: if self.loader_thread_stop.is_set(): break if self.is_destroyed.is_set(): break info_cache.update(gui_utilities.glib_idle_add_wait(lambda g=graph: g.refresh(info_cache, self.loader_thread_stop))) else: self.last_load_time = time.time()
def loader_thread_routine(self): """The loading routine to be executed within a thread.""" if not 'campaign_id' in self.config: return if not self._get_graphql_campaign(): return info_cache = {} for graph in self.graphs: if self.loader_thread_stop.is_set(): break if self.is_destroyed.is_set(): break info_cache.update( gui_utilities.glib_idle_add_wait(lambda g=graph: g.refresh( info_cache, self.loader_thread_stop))) else: self.last_load_time = time.time()
def loader_thread_routine(self): """The loading routine to be executed within a thread.""" if not 'campaign_id' in self.config: return try: campaign = self.application.get_graphql_campaign() except (ConnectionError, advancedhttpserver.RPCConnectionError): return if campaign is None: return info_cache = {} for graph in self.graphs: if self.loader_thread_stop.is_set(): break if self.is_destroyed.is_set(): break info_cache.update(gui_utilities.glib_idle_add_wait(lambda g=graph: g.refresh(info_cache, self.loader_thread_stop))) else: self.last_load_time = time.time()
def row_loader_thread_routine(self, store): gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', False)) for row_data in self.parent.rpc.remote_table('campaign/' + self.remote_table_name, self.config['campaign_id']): row_id = row_data['id'] row_data = self.format_row_data(row_data) if row_data == None: self.parent.rpc(self.remote_table_name + '/delete', row_id) continue row_data = map(lambda x: '' if x == None else str(x), row_data) row_data.insert(0, str(row_id)) if self.is_destroyed.is_set(): return gui_utilities.glib_idle_add_wait(store.append, row_data) gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', True))
def _load_plugins(self): """ Load the plugins which are available into the treeview to make them visible to the user. """ self.logger.debug('loading plugins') self._update_status_bar('Loading plugins...', idle=True) store = self._model store.clear() pm = self.application.plugin_manager self._module_errors = {} pm.load_all(on_error=self._on_plugin_load_error) model = (_LOCAL_REPOSITORY_ID, None, True, _LOCAL_REPOSITORY_TITLE, None, None, False, False, False, _ROW_TYPE_CATALOG) catalog_row = gui_utilities.glib_idle_add_wait(self._store_append, store, None, model) models = [] for name, plugin in pm.loaded_plugins.items(): if self.config['plugins.installed'].get(name): continue self.config['plugins.installed'][name] = None models.append( self._named_model( id=plugin.name, installed=True, enabled=plugin.name in pm.enabled_plugins, title=plugin.title, compatibility='Yes' if plugin.is_compatible else 'No', version=plugin.version, visible_enabled=True, visible_installed=True, sensitive_installed=False, type=_ROW_TYPE_PLUGIN)) gui_utilities.glib_idle_add_once(self._store_extend, store, catalog_row, models) del models for name in self._module_errors.keys(): model = (name, True, False, "{0} (Load Failed)".format(name), 'No', 'Unknown', True, True, False, _ROW_TYPE_PLUGIN) gui_utilities.glib_idle_add_once(self._store_append, store, catalog_row, model) self.logger.debug('loading catalog into plugin treeview') for catalog_id in self.catalog_plugins.catalog_ids(): self._add_catalog_to_tree(catalog_id, store) catalog_cache = self.catalog_plugins.get_cache() for catalog_id in catalog_cache: if self.catalog_plugins.catalogs.get(catalog_id, None): continue named_catalog = catalog_cache[catalog_id]['value'] model = (catalog_id, None, True, catalog_id, None, None, False, False, False, _ROW_TYPE_CATALOG) catalog_row = gui_utilities.glib_idle_add_wait( self._store_append, store, None, model) for repo in named_catalog.repositories: model = (repo.id, None, True, repo.title, None, None, False, False, False, _ROW_TYPE_REPOSITORY) repo_row = gui_utilities.glib_idle_add_wait( self._store_append, store, catalog_row, model) self._add_plugins_offline(catalog_id, repo.id, store, repo_row) gui_utilities.glib_idle_add_once(self._treeview_unselect) self._update_status_bar('Loading completed', idle=True)
def _import_campaign(self): """ Used by the import thread to import the campaign into the database. Through this process after every major action, the thread will check to see if it has been requested to stop. """ self.logger.debug("import campaign running in tid: 0x{0:x}".format( threading.current_thread().ident)) if not self.campaign_info: return # prevent user from changing campaign info during import start_time = datetime.datetime.now() GLib.idle_add(self.button_import_campaign.set_sensitive, False) GLib.idle_add(self.button_select.set_sensitive, False) GLib.idle_add(self.spinner.start) batch_size = 100 if self.thread_import_campaign.stopped(): return self.preprep_xml_data() self.campaign_info.find('id').text = self.rpc( 'campaign/new', self.campaign_info.find('name').text, self.campaign_info.find('description').text) self.logger.info("created new campaign id: {}".format( self.campaign_info.find('id').text)) nodes_completed = 0 node_count = float(len(self.campaign_info.findall('.//*'))) if self.thread_import_campaign.stopped(): return for nods in self.campaign_info.getiterator(): if nods.tag == 'campaign_id': nods.text = self.campaign_info.find('id').text self._update_text_view("Campaign created, ID set to {}".format( self.campaign_info.find('id').text), idle=True) keys = [] values = [] if self.thread_import_campaign.stopped(): return for elements in self.campaign_info: if elements.tag in ('id', 'landing_pages', 'messages', 'visits', 'credentials', 'deaddrop_deployments', 'deaddrop_connections'): continue keys.append(elements.tag) values.append(elements.text) self.rpc('db/table/set', 'campaigns', int(self.campaign_info.find('id').text), tuple(keys), tuple(values)) nodes_completed += float(len(values) + 1) percentage_completed = nodes_completed / node_count GLib.idle_add(self.import_progress.set_fraction, percentage_completed) if self.thread_import_campaign.stopped(): return for tables in ('landing_pages', 'messages', 'visits', 'credentials', 'deaddrop_deployments', 'deaddrop_connections'): inserted_ids = [] if self.thread_import_campaign.stopped(): return self._update_text_view( "Serializing table {} data for import".format(tables), idle=True) keys, rows = self._get_keys_values(self.campaign_info.find(tables)) self._update_text_view("Working on table {} adding {} rows".format( tables, len(rows)), idle=True) if self.thread_import_campaign.stopped(): return # make table rows easy to manage for updating new ids returned table_rows = [] for row in rows: row = dict(zip(keys, row)) table_rows.append(row) while rows and not self.thread_import_campaign.stopped(): try: inserted_ids = inserted_ids + self.rpc( '/db/table/insert/multi', tables, keys, rows[:batch_size], deconflict_ids=True) except advancedhttpserver.RPCError: response = gui_utilities.glib_idle_add_wait( self.failed_import_action) self._import_cleanup(remove_campaign=response) failed_string = 'Failed to import campaign, all partial campaign data ' + ( 'has been removed' if response else 'was left in place') self.logger.warning(failed_string.lower()) self._update_text_view(failed_string, idle=True) return rows = rows[batch_size:] nodes_completed += float(batch_size * len(keys)) percentage_completed = nodes_completed / node_count GLib.idle_add(self.import_progress.set_fraction, percentage_completed) if self.thread_import_campaign.stopped(): return # update id fields to maintain relationships self._update_text_view( "Updating dependencies for table: {}".format(tables), idle=True) for id_ in inserted_ids: if id_ != table_rows[inserted_ids.index(id_)]['id']: self._update_id(self.campaign_info, ['id', "{}_id".format(tables[:-1])], table_rows[inserted_ids.index(id_)]['id'], id_) GLib.idle_add(self.import_progress.set_fraction, 1.0) self._import_cleanup() done_string = "Done importing campaign. Importing the campaign took {}".format( datetime.datetime.now() - start_time) self._update_text_view(done_string, idle=True) self.logger.info(done_string.lower())
def _import_campaign(self): """ Used by the import thread to import the campaign into the database. Through this process after every major action, the thread will check to see if it has been requested to stop. """ self.logger.debug("import campaign running in tid: 0x{0:x}".format(threading.current_thread().ident)) if not self.campaign_info: return # prevent user from changing campaign info during import start_time = datetime.datetime.now() GLib.idle_add(self.button_import_campaign.set_sensitive, False) GLib.idle_add(self.button_select.set_sensitive, False) GLib.idle_add(self.spinner.start) batch_size = 100 if self.thread_import_campaign.stopped(): return self.preprep_xml_data() self.campaign_info.find('id').text = self.rpc( 'campaign/new', self.campaign_info.find('name').text, self.campaign_info.find('description').text ) self.logger.info("created new campaign id: {}".format(self.campaign_info.find('id').text)) nodes_completed = 0 node_count = float(len(self.campaign_info.findall('.//*'))) if self.thread_import_campaign.stopped(): return for nods in self.campaign_info.getiterator(): if nods.tag == 'campaign_id': nods.text = self.campaign_info.find('id').text self._update_text_view("Campaign created, ID set to {}".format(self.campaign_info.find('id').text), idle=True) keys = [] values = [] if self.thread_import_campaign.stopped(): return for elements in self.campaign_info: if elements.tag in ('id', 'landing_pages', 'messages', 'visits', 'credentials', 'deaddrop_deployments', 'deaddrop_connections'): continue keys.append(elements.tag) values.append(elements.text) self.rpc('db/table/set', 'campaigns', int(self.campaign_info.find('id').text), tuple(keys), tuple(values)) nodes_completed += float(len(values) + 1) percentage_completed = nodes_completed / node_count GLib.idle_add(self.import_progress.set_fraction, percentage_completed) if self.thread_import_campaign.stopped(): return for tables in ('landing_pages', 'messages', 'visits', 'credentials', 'deaddrop_deployments', 'deaddrop_connections'): inserted_ids = [] if self.thread_import_campaign.stopped(): return self._update_text_view("Serializing table {} data for import".format(tables), idle=True) keys, rows = self._get_keys_values(self.campaign_info.find(tables)) self._update_text_view("Working on table {} adding {} rows".format(tables, len(rows)), idle=True) if self.thread_import_campaign.stopped(): return # make table rows easy to manage for updating new ids returned table_rows = [] for row in rows: row = dict(zip(keys, row)) table_rows.append(row) while rows and not self.thread_import_campaign.stopped(): try: inserted_ids = inserted_ids + self.rpc('/db/table/insert/multi', tables, keys, rows[:batch_size], deconflict_ids=True) except advancedhttpserver.RPCError: response = gui_utilities.glib_idle_add_wait(self.failed_import_action) self._import_cleanup(remove_campaign=response) failed_string = 'Failed to import campaign, all partial campaign data ' + ('has been removed' if response else 'was left in place') self.logger.warning(failed_string.lower()) self._update_text_view(failed_string, idle=True) return rows = rows[batch_size:] nodes_completed += float(batch_size * len(keys)) percentage_completed = nodes_completed / node_count GLib.idle_add(self.import_progress.set_fraction, percentage_completed) if self.thread_import_campaign.stopped(): return # update id fields to maintain relationships self._update_text_view("Updating dependencies for table: {}".format(tables), idle=True) for id_ in inserted_ids: if id_ != table_rows[inserted_ids.index(id_)]['id']: self._update_id( self.campaign_info, ['id', "{}_id".format(tables[:-1])], table_rows[inserted_ids.index(id_)]['id'], id_ ) GLib.idle_add(self.import_progress.set_fraction, 1.0) self._import_cleanup() done_string = "Done importing campaign. Importing the campaign took {}".format(datetime.datetime.now() - start_time) self._update_text_view(done_string, idle=True) self.logger.info(done_string.lower())
def loader_thread_routine(self): info_cache = {} for graph in self.graphs: info_cache = gui_utilities.glib_idle_add_wait(graph.refresh, info_cache)