def do_exit(self): self.main_window.hide() gui_utilities.gtk_widget_destroy_children(self.main_window) gui_utilities.gtk_sync() self.server_disconnect() self.main_window.destroy() return
def signal_destroy(self, gobject): self.is_destroyed.set() if isinstance(self.loader_thread, threading.Thread) and self.loader_thread.is_alive(): self.logger.debug("waiting on thread: {0}.loader_thread (tid: 0x{1:x})".format(self.__class__.__name__, self.loader_thread.ident)) while self.loader_thread.is_alive(): gui_utilities.gtk_sync() self.logger.debug("joined thread: {0}.loader_thread (tid: 0x{1:x})".format(self.__class__.__name__, self.loader_thread.ident))
def do_exit(self): self.hide() gui_utilities.gtk_widget_destroy_children(self) gui_utilities.gtk_sync() self.server_disconnect() self.destroy() return
def signal_window_destroy(self, window): gui_utilities.gtk_widget_destroy_children(self) gui_utilities.gtk_sync() self.server_disconnect() self.save_config() Gtk.main_quit() return
def do_exit(self): self.plugin_manager.shutdown() self.main_window.hide() gui_utilities.gtk_widget_destroy_children(self.main_window) gui_utilities.gtk_sync() self.emit('server-disconnected') self.main_window.destroy() return
def _sync_loader_thread(self): """ Synchronize the loader thread by ensuring that it is stopped. If it is currently running, this will use :py:attr:`~.loader_thread_stop` to request that the loader stops early. """ if not self.loader_thread_is_running: return # it's alive so tell it to stop, wait for it, then proceed self.loader_thread_stop.set() while self.loader_thread.is_alive(): gui_utilities.gtk_sync() self.loader_thread.join(1)
def wait(self): """ Wait for the cloning operation to complete and return whether the operation was successful or not. :return: True if the operation was successful. :rtype: bool """ while not self.load_started: gui_utilities.gtk_sync() while self.webview.get_property('is-loading') or len(self.__web_resources): gui_utilities.gtk_sync() return not self.load_failed
def wait(self): """ Wait for the cloning operation to complete and return whether the operation was successful or not. :return: True if the operation was successful. :rtype: bool """ while not self.load_started: gui_utilities.gtk_sync() while self.webview.get_property('is-loading') or len( self.__web_resources): gui_utilities.gtk_sync() return not self.load_failed
def interact(self): self.dialog.show_all() self.set_status('Waiting') if not web_cloner.has_webkit2: gui_utilities.show_dialog_error('WebKit2GTK+ Is Unavailable', self.dialog, 'The WebKit2GTK+ package is not available.') self.dialog.destroy() return while self.dialog.run() == Gtk.ResponseType.APPLY: target_url = self.entry_target.get_text() if not target_url: gui_utilities.show_dialog_error('Missing Information', self.dialog, 'Please set the target URL.') self.set_status('Missing Information') continue dest_dir = self.entry_directory.get_text() if not dest_dir: gui_utilities.show_dialog_error('Missing Information', self.dialog, 'Please set the destination directory.') self.set_status('Missing Information') continue if not os.access(dest_dir, os.W_OK): gui_utilities.show_dialog_error('Invalid Directory', self.dialog, 'Can not write to the specified directory.') self.set_status('Invalid Directory') continue self.objects_save_to_config() self.set_status('Cloning', spinner_active=True) cloner = web_cloner.WebPageCloner(target_url, dest_dir) signal_id = self.button_cancel.connect('clicked', lambda _: cloner.stop_cloning()) original_label = self.button_cancel.get_label() self.button_cancel.set_label('Cancel') cloner.wait() self.button_cancel.set_label(original_label) self.button_cancel.disconnect(signal_id) if cloner.load_failed: self.set_status('Failed') gui_utilities.show_dialog_error('Operation Failed', self.dialog, 'The web page clone operation failed.') continue for resource in cloner.cloned_resources.values(): if gui_utilities.gtk_list_store_search(self.resources, resource.resource, column=0): continue self.resources.append(_ModelNamedRow( path=resource.resource, mime_type=resource.mime_type or 'N/A', size=resource.size )) self.set_status('Done') gui_utilities.gtk_sync() if len(self.resources) and gui_utilities.show_dialog_yes_no('Transfer Cloned Pages', self.dialog, 'Would you like to start the SFTP client\nto upload the cloned pages?'): self.application.emit('sftp-client-start') self.dialog.destroy()
def interact(self): self.dialog.show_all() self.set_status('Waiting') if not web_cloner.has_webkit2: gui_utilities.show_dialog_error( 'WebKit2GTK+ Is Unavailable', self.dialog, 'The WebKit2GTK+ package is not available.') self.dialog.destroy() return while self.dialog.run() == Gtk.ResponseType.APPLY: target_url = self.entry_target.get_text() if not target_url: gui_utilities.show_dialog_error('Missing Information', self.dialog, 'Please set the target URL.') self.set_status('Missing Information') continue dest_dir = self.entry_directory.get_text() if not dest_dir: gui_utilities.show_dialog_error( 'Missing Information', self.dialog, 'Please set the destination directory.') self.set_status('Missing Information') continue if not os.access(dest_dir, os.W_OK): gui_utilities.show_dialog_error( 'Invalid Directory', self.dialog, 'Can not write to the specified directory.') self.set_status('Invalid Directory') continue self.objects_save_to_config() self.set_status('Cloning', spinner_active=True) cloner = web_cloner.WebPageCloner(target_url, dest_dir) signal_id = self.button_cancel.connect( 'clicked', lambda _: cloner.stop_cloning()) original_label = self.button_cancel.get_label() self.button_cancel.set_label('Cancel') cloner.wait() self.button_cancel.set_label(original_label) self.button_cancel.disconnect(signal_id) if cloner.load_failed: self.set_status('Failed') gui_utilities.show_dialog_error( 'Operation Failed', self.dialog, 'The web page clone operation failed.') continue for resource in cloner.cloned_resources.values(): if gui_utilities.gtk_list_store_search(self.resources, resource.resource, column=0): continue self.resources.append( _ModelNamedRow(path=resource.resource, mime_type=resource.mime_type or 'N/A', size=resource.size)) self.set_status('Done') gui_utilities.gtk_sync() if len(self.resources) and gui_utilities.show_dialog_yes_no( 'Transfer Cloned Pages', self.dialog, 'Would you like to start the SFTP client\nto upload the cloned pages?' ): self.application.emit('sftp-client-start') self.dialog.destroy()
def campaign_to_xml(rpc, campaign_id, xml_file, encoding='utf-8'): """ Load all information for a particular campaign and dump it to an XML file. :param rpc: The connected RPC instance to load the information with. :type rpc: :py:class:`.KingPhisherRPCClient` :param campaign_id: The ID of the campaign to load the information for. :param str xml_file: The destination file for the XML data. :param str encoding: The encoding to use for strings. """ tzutc = dateutil.tz.tzutc() root = ET.Element('king_phisher') # Generate export metadata metadata = ET.SubElement(root, 'metadata') serializers.to_elementtree_subelement( metadata, 'timestamp', datetime.datetime.utcnow().replace(tzinfo=tzutc), attrib={'utc': 'true'} ) serializers.to_elementtree_subelement(metadata, 'version', '1.3') campaign = ET.SubElement(root, 'campaign') logger.info('gathering campaign information for export') campaign_info = _get_graphql_campaignexport(rpc, campaign_id) gui_utilities.gtk_sync() for key, value in campaign_info.items(): if key in ('landingPages', 'messages', 'visits', 'credentials', 'deaddropDeployments', 'deaddropConnections'): continue if isinstance(value, datetime.datetime): value = value.replace(tzinfo=tzutc) serializers.to_elementtree_subelement(campaign, key, value) gui_utilities.gtk_sync() # Tables with a campaign_id field table_names = ['landing_pages', 'messages', 'visits', 'credentials', 'deaddrop_deployments', 'deaddrop_connections'] cursor = None last_cursor = None table_elements = {} while True: gui_utilities.gtk_sync() if not cursor and last_cursor: break if cursor: last_cursor = cursor campaign_info = _get_graphql_campaignexport(rpc, campaign_id, cursor) cursor = None gui_utilities.gtk_sync() for table_name in table_names: gui_utilities.gtk_sync() if campaign_info[parse_case_snake_to_camel(table_name, upper_first=False)]['pageInfo']['hasNextPage']: cursor = campaign_info[parse_case_snake_to_camel(table_name, upper_first=False)]['pageInfo']['endCursor'] table = campaign_info[parse_case_snake_to_camel(table_name, upper_first=False)]['edges'] if table_name not in table_elements: table_elements[table_name] = ET.SubElement(campaign, table_name) for node in table: gui_utilities.gtk_sync() row = node['node'] table_row_element = ET.SubElement(table_elements[table_name], table_name[:-1]) for key, value in row.items(): gui_utilities.gtk_sync() if isinstance(value, datetime.datetime): value = value.replace(tzinfo=tzutc) serializers.to_elementtree_subelement(table_row_element, key, value) logger.info('completed processing campaign information for export') document = minidom.parseString(ET.tostring(root)) with open(xml_file, 'wb') as file_h: file_h.write(document.toprettyxml(indent=' ', encoding=encoding)) logger.info('campaign export complete')
def campaign_to_xml(rpc, campaign_id, xml_file, encoding='utf-8'): """ Load all information for a particular campaign and dump it to an XML file. :param rpc: The connected RPC instance to load the information with. :type rpc: :py:class:`.KingPhisherRPCClient` :param campaign_id: The ID of the campaign to load the information for. :param str xml_file: The destination file for the XML data. :param str encoding: The encoding to use for strings. """ tzutc = dateutil.tz.tzutc() root = ET.Element('king_phisher') # Generate export metadata metadata = ET.SubElement(root, 'metadata') serializers.to_elementtree_subelement( metadata, 'timestamp', datetime.datetime.utcnow().replace(tzinfo=tzutc), attrib={'utc': 'true'} ) serializers.to_elementtree_subelement(metadata, 'version', '1.3') campaign = ET.SubElement(root, 'campaign') logger.info('gathering campaign information for export') try: campaign_info = _get_graphql_campaignexport(rpc, campaign_id) except errors.KingPhisherGraphQLQueryError as error: logger.error('graphql error: ' + error.message) raise gui_utilities.gtk_sync() for key, value in campaign_info.items(): if key in ('landingPages', 'messages', 'visits', 'credentials', 'deaddropDeployments', 'deaddropConnections'): continue if isinstance(value, datetime.datetime): value = value.replace(tzinfo=tzutc) serializers.to_elementtree_subelement(campaign, key, value) gui_utilities.gtk_sync() # Tables with a campaign_id field table_names = ['landing_pages', 'messages', 'visits', 'credentials', 'deaddrop_deployments', 'deaddrop_connections'] cursor = None last_cursor = None table_elements = {} while True: gui_utilities.gtk_sync() if not cursor and last_cursor: break if cursor: last_cursor = cursor campaign_info = _get_graphql_campaignexport(rpc, campaign_id, cursor) cursor = None gui_utilities.gtk_sync() for table_name in table_names: gui_utilities.gtk_sync() if campaign_info[parse_case_snake_to_camel(table_name, upper_first=False)]['pageInfo']['hasNextPage']: cursor = campaign_info[parse_case_snake_to_camel(table_name, upper_first=False)]['pageInfo']['endCursor'] table = campaign_info[parse_case_snake_to_camel(table_name, upper_first=False)]['edges'] if table_name not in table_elements: table_elements[table_name] = ET.SubElement(campaign, table_name) for node in table: gui_utilities.gtk_sync() row = node['node'] table_row_element = ET.SubElement(table_elements[table_name], table_name[:-1]) for key, value in row.items(): gui_utilities.gtk_sync() if isinstance(value, datetime.datetime): value = value.replace(tzinfo=tzutc) serializers.to_elementtree_subelement(table_row_element, key, value) logger.info('completed processing campaign information for export') document = minidom.parseString(ET.tostring(root)) with open(xml_file, 'wb') as file_h: file_h.write(document.toprettyxml(indent=' ', encoding=encoding)) logger.info('campaign export complete')