def wrapper(*args, **kwargs): try: res = func(*args, **kwargs) except ServerRequestException as e: res = response_in_emergency getLogger("Server-GTK IO").warning(e) return res
def startAPIs(plugin_controller, model_controller, hostname, port): global _rest_controllers global _http_server _rest_controllers = [PluginControllerAPI(plugin_controller), ModelControllerAPI(model_controller)] app = Flask('APISController') _http_server = HTTPServer(WSGIContainer(app)) while True: try: _http_server.listen(port, address=hostname) logger.getLogger().info( "REST API server configured on %s" % str( CONF.getApiRestfulConInfo())) break except socket.error as exception: if exception.errno == 98: # Port already in use # Let's try the next one port += 1 if port > 65535: raise Exception("No ports available!") CONF.setApiRestfulConInfoPort(port) CONF.saveConfig() else: raise exception routes = [r for c in _rest_controllers for r in c.getRoutes()] for route in routes: app.add_url_rule(route.path, view_func=route.view_func, methods=route.methods) logging.getLogger("tornado.access").addHandler(logger.getLogger(app)) logging.getLogger("tornado.access").propagate = False threading.Thread(target=startServer).start()
def run(self): self.online_plugins.start() tmp_timer = .0 while not self._stop: time.sleep(.1) tmp_timer += .1 if tmp_timer >= self.timer: try: self.syncReports() if not self.polling: break except Exception: getLogger(self).error( "An exception was captured while saving reports\n%s" % traceback.format_exc()) finally: tmp_timer = 0
def _loadPlugins(self, plugin_repo_path): """ Finds and load all the plugins that are available in the plugin_repo_path. """ try: os.stat(plugin_repo_path) except OSError: pass sys.path.append(plugin_repo_path) dir_name_regexp = re.compile(r"^[\d\w\-\_]+$") for name in os.listdir(plugin_repo_path): if dir_name_regexp.match(name): try: module_path = os.path.join(plugin_repo_path, name) sys.path.append(module_path) module_filename = os.path.join(module_path, "plugin.py") self._plugin_modules[name] = imp.load_source(name, module_filename) except Exception as e: msg = "An error ocurred while loading plugin %s.\n%s" % (module_filename, traceback.format_exc()) getLogger(self).debug(msg) getLogger(self).warn(e) else: pass
def waitForDBChange(self, db_name, since=0, timeout=15000): """ Be warned this will return after the database has a change, if there was one before call it will return immediatly with the changes done""" changes = [] last_seq = max(self.getLastChangeSeq(db_name), since) db = self._getDb(db_name) with ChangesStream(db, feed="longpoll", since=last_seq, timeout=timeout) as stream: for change in stream: if change['seq'] > self.getLastChangeSeq(db_name): self.setLastChangeSeq(db_name, change['seq']) if not change['id'].startswith('_design'): #fake doc type for deleted objects doc = { 'type': 'unknown', '_deleted': 'False', '_rev': [0] } if not change.get('deleted'): doc = self.getDocument(db_name, change['id']) changes.append(change_factory.create(doc)) if len(changes): getLogger(self).debug("Changes from another instance") return changes
def parse(self, xml_file, from_string=False): """Import .nessus file""" # Parse XML file getLogger(self).debug("Parsing report start") if from_string: xml_file = StringIO(xml_file) # Iterate through each host scanned and create objects for each for event, elem in ET.iterparse(xml_file): getLogger(self).debug("Parsing elemn %s" % elem[0:20]) # Grab the report name from the Report element if event == 'end' and elem.tag == 'Report': self.name = elem.attrib.get('name') continue # Only process ReportHost elements elif event == 'end' and elem.tag != 'ReportHost': continue rh_obj = ReportHost(elem) if rh_obj: self.targets.append(rh_obj) # Update Report dates if not self.scan_start: self.scan_start = rh_obj.host_start if not self.scan_end: self.scan_end = rh_obj.host_end if rh_obj.get('host_start'): if rh_obj.host_start < self.scan_start: self.scan_start = rh_obj.host_start if rh_obj.host_end > self.scan_end: self.scan_end = rh_obj.host_end
def saveDocument(self, aWorkspaceName, aDocument): self.incrementSeqNumber(aWorkspaceName) getLogger(self).debug("Saving document in remote workspace %s" % aWorkspaceName) return self._getDb(aWorkspaceName).save_doc(aDocument, use_uuids=True, force_update=True)
def _pluginStart(self, name, command_id): self.active_plugins_count_lock.acquire() self.processing = True getLogger(self).info("Plugin Started: {0}. ".format(name, command_id)) self.active_plugins_count += 1 self.active_plugins_count_lock.release() return True
def _loadDbs(self): conditions = lambda x: not x.startswith("_") and x != 'reports' for dbname in filter(conditions, self.__serv.all_dbs()): if dbname not in self.dbs.keys(): getLogger(self).debug( "Asking for dbname[%s], registering for lazy initialization" % dbname) self.dbs[dbname] = lambda x: self._loadDb(x)
def open_file(self, file_path): """ This method uses file signatures to recognize file types :param file_path: report file. """ """ If you need add support to a new report type add the file signature here and add the code in self.getRootTag() for get the root tag. """ f = result = None signatures = {"\x50\x4B": "zip", "\x3C\x3F\x78\x6D\x6C": "xml"} try: f = open(file_path, 'rb') file_signature = f.read(10) f.seek(0) for key in signatures: if file_signature.find(key) == 0: result = signatures[key] getLogger(self).debug("Report type detected: %s" % result) break except IOError, err: self.report_type = None getLogger(self).error("Error while opening file.\n%s. %s" % (err, file_path))
def loadChange(self, objid, revision, deleted): try: obj = self.mapper_manager.find(objid) change = change_factory.create(obj, revision, deleted) if change.getChangeType() == CHANGETYPE.DELETE: # object deleted if isinstance(change, ChangeModelObject): obj_parent = obj.getParent() if obj_parent: obj_parent.deleteChild(obj.getID()) self.mapper_manager.remove(objid) elif change.getChangeType() == CHANGETYPE.UPDATE: # object edited self.mapper_manager.reload(objid) elif change.getChangeType() == CHANGETYPE.ADD: if isinstance(change, ChangeModelObject): # The child has a parent, but the parent doesn't # have the child yet... if obj.getParent(): obj.getParent().addChild(obj) if isinstance(change, ChangeModelObject): self._notify_model_object_change(change, obj) model.guiapi.notification_center.changeFromInstance(change) except: getLogger(self).debug( "Change couldn't be processed")
def loadChange(self, objid, revision, deleted): try: obj = self.mapper_manager.find(objid) change = change_factory.create(obj, revision, deleted) if change.getChangeType() == CHANGETYPE.DELETE: # object deleted if isinstance(change, ChangeModelObject): obj_parent = obj.getParent() if obj_parent: obj_parent.deleteChild(obj.getID()) self.mapper_manager.remove(objid) elif change.getChangeType() == CHANGETYPE.UPDATE: # object edited self.mapper_manager.reload(objid) elif change.getChangeType() == CHANGETYPE.ADD: if isinstance(change, ChangeModelObject): # The child has a parent, but the parent doesn't # have the child yet... if obj.getParent(): obj.getParent().addChild(obj) if isinstance(change, ChangeModelObject): self._notify_model_object_change(change, obj) model.guiapi.notification_center.changeFromInstance(change) except: getLogger(self).debug("Change couldn't be processed")
def run(self, args): workspace = args.workspace try: ws = super(GuiApp, self).openWorkspace(workspace) except Exception as e: getLogger(self).error( ("Your last workspace %s is not accessible, " "check configuration") % workspace) getLogger(self).error(str(e)) ws = self.openDefaultWorkspace() workspace = ws.name CONF.setLastWorkspace(workspace) CONF.saveConfig() self.loadWorkspaces() self.createLoggerWidget() self.stopSplashScreen() self._main_window.showAll() couchURL = CONF.getCouchURI() if couchURL: url = "%s/reports/_design/reports/index.html" % couchURL model.api.log("Faraday ui is ready") model.api.log("Make sure you have couchdb up and running if you want visualizations.") model.api.log("If couchdb is up, point your browser to: [%s]" % url) else: model.api.log("Please configure Couchdb for fancy HTML5 Dashboard (https://github.com/infobyte/faraday/wiki/Couchdb)") exit_code = self.exec_loop() return exit_code
def syncReports(self): """ Synchronize report directory using the DataManager and Plugins online We first make sure that all shared reports were added to the repo """ for root, dirs, files in os.walk(self._report_path, False): # skip processed and unprocessed directories if root == self._report_path: for name in files: filename = os.path.join(root, name) name = os.path.basename(filename) # If plugin not is detected... move to unprocessed # PluginCommiter will rename the file to processed or unprocessed # when the plugin finishes if self.processor.processReport(filename) is False: getLogger(self).info('Plugin not detected. Moving {0} to unprocessed'.format(filename)) os.rename( filename, os.path.join(self._report_upath, name)) else: getLogger(self).info( 'Detected valid report {0}'.format(filename)) os.rename( filename, os.path.join(self._report_ppath, name)) self.onlinePlugins()
def syncReports(self): """ Synchronize report directory using the DataManager and Plugins online We first make sure that all shared reports were added to the repo """ for root, dirs, files in os.walk(self._report_path, False): # skip processed and unprocessed directories if root == self._report_path: for name in files: filename = os.path.join(root, name) name = os.path.basename(filename) # If plugin not is detected... move to unprocessed # PluginCommiter will rename the file to processed or unprocessed # when the plugin finishes if self.processor.processReport(filename) is False: getLogger(self).info( 'Plugin not detected. Moving {0} to unprocessed'. format(filename)) os.rename(filename, os.path.join(self._report_upath, name)) else: getLogger(self).info( 'Detected valid report {0}'.format(filename)) os.rename(filename, os.path.join(self._report_ppath, name))
def run(self, args): workspace = args.workspace try: ws = super(GuiApp, self).openWorkspace(workspace) except Exception as e: getLogger(self).error(("Your last workspace %s is not accessible, " "check configuration") % workspace) getLogger(self).error(str(e)) ws = self.openDefaultWorkspace() workspace = ws.name CONF.setLastWorkspace(workspace) CONF.saveConfig() self.loadWorkspaces() self.createLoggerWidget() self.stopSplashScreen() self._main_window.showAll() couchURL = CONF.getCouchURI() if couchURL: url = "%s/reports/_design/reports/index.html" % couchURL model.api.log("Faraday ui is ready") model.api.log( "Make sure you have couchdb up and running if you want visualizations." ) model.api.log("If couchdb is up, point your browser to: [%s]" % url) else: model.api.log( "Please configure Couchdb for fancy HTML5 Dashboard (https://github.com/infobyte/faraday/wiki/Couchdb)" ) exit_code = self.exec_loop() return exit_code
def getRootTag(self, file_path): report_type = result = f = None f, report_type = self.open_file(file_path) # Check error in open_file() if f is None and report_type is None: self.report_type = None return None, None # Find root tag based in report_type if report_type == "zip": result = "maltego" else: try: for event, elem in ET.iterparse(f, ('start', )): result = elem.tag break except SyntaxError, err: self.report_type = None getLogger(self).error("Not an xml file.\n %s" % (err))
def getRootTag(self, file_path): report_type = result = f = None f, report_type = self.open_file(file_path) # Check error in open_file() if f is None and report_type is None: self.report_type = None return None, None # Find root tag based in report_type if report_type == "zip": result = "maltego" elif report_type == "dat": result = 'lynis' elif report_type == 'json': # this will work since recon-ng is the first plugin to use json. # we need to add json detection here! result = 'reconng' else: try: for event, elem in ET.iterparse(f, ('start', )): result = elem.tag break except SyntaxError, err: self.report_type = None getLogger(self).error("Not an xml file.\n %s" % (err))
def sendReport(self, plugin_id, filename): """Sends a report to the appropiate plugin specified by plugin_id""" getLogger(self).debug("The file is %s, %s" % (filename, plugin_id)) if not self.plugin_controller.processReport(plugin_id, filename): getLogger(self).error("Faraday doesn't have a plugin for this tool..." " Processing: ABORT") return False return True
def _loadPlugins(self, plugin_repo_path): """ Finds and load all the plugins that are available in the plugin_repo_path. """ try: os.stat(plugin_repo_path) except OSError: pass sys.path.append(plugin_repo_path) dir_name_regexp = re.compile(r"^[\d\w\-\_]+$") for name in os.listdir(plugin_repo_path): if dir_name_regexp.match(name): try: module_path = os.path.join(plugin_repo_path, name) sys.path.append(module_path) module_filename = os.path.join(module_path, "plugin.py") self._plugin_modules[name] = imp.load_source( name, module_filename) except Exception as e: msg = "An error ocurred while loading plugin %s.\n%s" % ( module_filename, traceback.format_exc()) getLogger(self).debug(msg) getLogger(self).warn(e) else: pass
def checkVersion(): try: f = open(CONST_VERSION_FILE) f_version = f.read().strip() if not args.update: if getInstanceConfiguration().getVersion( ) != None and getInstanceConfiguration().getVersion() != f_version: logger.warning( "You have different version of Faraday since your last run.\nRun ./faraday.py --update to update configuration!" ) if query_yes_no('Do you want to close Faraday?', 'yes'): exit(-1) getInstanceConfiguration().setVersion(f_version) f.close() doc = {"ver": getInstanceConfiguration().getVersion()} if os.path.isfile(CONST_CONFIG): os.remove(CONST_CONFIG) with open(CONST_CONFIG, "w") as doc_file: json.dump(doc, doc_file) except Exception as e: getLogger("launcher").error( "It seems that something's wrong with your version\nPlease contact customer support" ) exit(-1)
def addWrapper(new_obj, parent_id=None, *args): parent = self.mappers_manager.find(parent_type, parent_id) if parent: add_func(new_obj, parent_id, *args) else: msg = "A parent is needed for %s objects" % new_obj.class_signature getLogger(self).error(msg) return False
def _processAction(self, action, parameters): """ decodes and performs the action given It works kind of a dispatcher """ getLogger(self).debug("_processAction - %s - parameters = %s" % (action, str(parameters))) self._actionDispatcher[action](*parameters)
def exit_faraday_without_confirm(self, widget=None): """Exits faraday without confirm. Used as a middle-man between connect callbacks (which will send the widget as an argument and self.window.destroy, which takes none. """ getLogger(self).error("Faraday exited because you didn't connect " "to a valid Faraday Server.") GObject.idle_add(self.window.destroy) GObject.idle_add(self.on_quit)
def _pluginStart(self, name, command_id): self.active_plugins_count_lock.acquire() self.processing = True if name not in ["MetasploitOn", "Beef", "Sentinel"]: getLogger(self).info("Plugin Started: {0}. ".format(name, command_id)) self.active_plugins_count += 1 self.active_plugins_count_lock.release() return True
def check_faraday_version(): try: server.check_faraday_version() except RuntimeError: getLogger("launcher").error( "The server is running a different Faraday version than the client you are running. Version numbers must match!" ) sys.exit(2)
def _sendReport(self, plugin_id, filename): getLogger(self).debug('The file is %s, %s' % (filename, plugin_id)) if not self.plugin_controller.processReport(plugin_id, filename): getLogger(self).error( "Faraday doesn't have a plugin for this tool..." " Processing: ABORT") return False return True
def check_faraday_version(): try: server.check_faraday_version() except RuntimeError: getLogger("launcher").error("The server is running a different Faraday version than the client " "you are running. Version numbers must match!") sys.exit(2)
def launchSentinel(self): psettings = CONF.getPluginSettings() name, cmd = "Sentinel", "sentinel" if name in psettings: if psettings[name]['settings']['Enable'] == "1": getLogger(self).info("Plugin Started: Sentinel") self.processor.onlinePlugin(cmd) getLogger(self).info("Plugin Ended: Sentinel")
def _sendReport(self, plugin_id, filename): getLogger(self).debug( 'The file is %s, %s' % (filename, plugin_id)) if not self.plugin_controller.processReport(plugin_id, filename): getLogger(self).error( "Faraday doesn't have a plugin for this tool..." " Processing: ABORT") return False return True
def __init__(self, path=CONF.getPersistencePath()): super(FileSystemManager, self).__init__() getLogger(self).debug( "Initializing FileSystemManager for path [%s]" % path) self._path = path if not os.path.exists(self._path): os.mkdir(self._path) self._loadDbs() self._available = True
def _processAction(self, action, parameters): """ decodes and performs the action given It works kind of a dispatcher """ getLogger(self).debug( "_processAction - %s - parameters = %s" % (action, str(parameters))) self._actionDispatcher[action](*parameters)
def sendReport(self, plugin_id, filename): """Sends a report to the appropiate plugin specified by plugin_id""" getLogger(self).info('The file is %s, %s' % (filename, plugin_id)) if not self.plugin_controller.processReport(plugin_id, filename): getLogger(self).error( "Faraday doesn't have a plugin for this tool..." " Processing: ABORT") return False return True
def pushReports(self): vmanager = ViewsManager() reports = os.path.join(os.getcwd(), "views", "reports") try: workspace = self.__serv.get_or_create_db("reports") vmanager.addView(reports, workspace) except: getLogger(self).warn( "Reports database couldn't be uploaded. You need to be an admin to do it") return self.__uri + "/reports/_design/reports/index.html"
def saveDocument(self, dic): try: filepath = os.path.join(self.path, "%s.json" % dic.get("_id", )) getLogger(self).debug("Saving document in local db %s" % self.path) with open(filepath, "w") as outfile: json.dump(dic, outfile, indent=2) return True except Exception: #log Exception? return False
def getServicesCount(self): """Get how many services are in the workspace. If it can't, it will return zero.""" try: services = model.hosts.Service.class_signature count = self.mappers_manager.getMapper(services).getCount() except: getLogger(self).debug("Couldn't get services count: assuming it is zero.") count = 0 return count
def saveDocument(self, dic): try: filepath = os.path.join(self.path, "%s.json" % dic.get("_id", )) getLogger(self).debug( "Saving document in local db %s" % self.path) with open(filepath, "w") as outfile: json.dump(dic, outfile, indent=2) return True except Exception: #log Exception? return False
def sendReport(self, plugin_id, filename): """Sends a report to the appropiate plugin specified by plugin_id""" getLogger(self).info( 'The file is %s, %s' % (filename, plugin_id)) command_id = self.plugin_controller.processReport(plugin_id, filename, ws_name=self.ws_name) if not command_id: getLogger(self).error( "Faraday doesn't have a plugin for this tool..." " Processing: ABORT") return False return command_id
def getHostsCount(self): """Get how many hosts are in the workspace. If it can't, it will return zero.""" try: hosts = model.hosts.Host.class_signature count = self.mappers_manager.getMapper(hosts).getCount() except: getLogger(self).debug( "Couldn't get host count: assuming it is zero.") count = 0 return count
def saveDocument(self, document): self.incrementSeqNumber() getLogger(self).debug("Saving document in couch db %s" % self.db) res = self.db.save_doc(document, use_uuids=True, force_update=True) if res: self.saves_counter += 1 self.addDoc(document) if self.saves_counter > self.MAXIMUM_SAVES: self._compactDatabase() self.saves_counter = 0 return res
def getServicesCount(self): """Get how many services are in the workspace. If it can't, it will return zero.""" try: services = models.Service.class_signature count = self.mappers_manager.getMapper(services).getCount() except: getLogger(self).debug( "Couldn't get services count: assuming it is zero.") count = 0 return count
def getHostsCount(self): """Get how many hosts are in the workspace. If it can't, it will return zero.""" try: hosts = models.Hosts.class_signature count = self.mappers_manager.getMapper(hosts).getCount() except: getLogger(self).debug( "Couldn't get host count: assuming it is zero.") count = 0 return count
def __getDb(self, aWorkspaceName): aWorkspaceName = aWorkspaceName.lower() getLogger(self).debug("Getting workspace [%s]" % aWorkspaceName) workspacedb = self.__dbs.get(aWorkspaceName, self.__serv.get_db(aWorkspaceName)) if not self.__dbs.has_key(aWorkspaceName): getLogger(self).debug("Asking couchdb for workspace [%s]" % aWorkspaceName) self.__dbs[aWorkspaceName] = workspacedb self.__seq_nums[aWorkspaceName] = workspacedb.info()['update_seq'] return workspacedb
def getDocument(self, document_id): getLogger(self).debug( "Getting document %s for local db %s" % (document_id, self.path)) path = os.path.join(self.path, "%s.json" % document_id) doc = None try: doc = open(path, "r") doc = json.loads(doc.read()) except IOError: doc = None finally: return doc
def saveDocument(self, document): self.incrementSeqNumber() getLogger(self).debug( "Saving document in couch db %s" % self.db) res = self.db.save_doc(document, use_uuids=True, force_update=True) if res: self.saves_counter += 1 self.addDoc(document) if self.saves_counter > self.MAXIMUM_SAVES: self._compactDatabase() self.saves_counter = 0 return res
def run(self, args): workspace = args.workspace try: self.workspace_manager.openWorkspace(workspace) except Exception as e: getLogger(self).error(("The workspace %s is not accessible, " "check configuration") % workspace) getLogger(self).error(str(e)) return -1 rp = ReportProcessor(self.plugin_controller) rp.processReport(args.filename)
def _pluginEnd(self, name, command_id): self.active_plugins_count_lock.acquire() getLogger(self).info("Plugin Ended: {0}".format(name)) if self.active_plugins_count == 0: self.active_plugins_count_lock.release() getLogger(self).warn("All plugins ended, but a plugin end action was received.") return True self.active_plugins_count -= 1 if self.active_plugins_count == 0: self.processing = False self.active_plugins_count_lock.release() return True