Пример #1
0
    def intialise_proxy_manager(options):
        """ Proxy Manager initialization.

        :param dict options: Proxy manager configuration parameters.
        """
        proxy_manager = None
        if options['Botnet_mode'] is not None:
            proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = proxy_manager.load_proxy_list(
                    options['Botnet_mode'][1]
                )
                answer = raw_input(
                    "[#] Do you want to check the proxy list? [Yes/no] : "
                )

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = multiprocessing.Queue()
                proxy_checker = multiprocessing.Process(
                    target=Proxy_Checker.check_proxies,
                    args=(proxy_q, proxies,)
                )
                logging.info("Checking Proxies...")
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            proxy_manager.proxies = proxies
            proxy_manager.number_of_proxies = len(proxies)

            if options['Botnet_mode'][0] == "miner":
                logging.info("Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)")
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                logging.info(
                    "Proxy Check Time: %s",
                    time.strftime(
                        '%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)
                    )
                )
                cprint("Done")

            if proxy_manager.number_of_proxies is 0:
                ServiceLocator.get_component("error_handler").FrameworkAbort("No Alive proxies.")

            proxy = proxy_manager.get_next_available_proxy()

            # check proxy var... http:// sock://
            options['OutboundProxy'] = []
            options['OutboundProxy'].append(proxy["proxy"][0])
            options['OutboundProxy'].append(proxy["proxy"][1])
Пример #2
0
    def StartBotnetMode(self, options):
        self.Proxy_manager = None
        if options['Botnet_mode'] is not None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(
                    options['Botnet_mode'][1])
                answer = raw_input(
                    "[#] Do you want to check the proxy list? [Yes/no] : ")

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = multiprocessing.Queue()
                proxy_checker = multiprocessing.Process(
                    target=Proxy_Checker.check_proxies,
                    args=(
                        proxy_q,
                        proxies,
                    ))
                logging.info("Checking Proxies...")
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if options['Botnet_mode'][0] == "miner":
                logging.info(
                    "Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)")
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                logging.info(
                    "Proxy Check Time: %s",
                    time.strftime(
                        '%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)))
                cprint("Done")

            if self.Proxy_manager.number_of_proxies is 0:
                self.Error.FrameworkAbort("No Alive proxies.")

            proxy = self.Proxy_manager.get_next_available_proxy()

            # check proxy var... http:// sock://
            options['OutboundProxy'] = []
            options['OutboundProxy'].append(proxy["proxy"][0])
            options['OutboundProxy'].append(proxy["proxy"][1])
Пример #3
0
    def StartBotnetMode(self, Options):
        self.Proxy_manager = None
        if Options['Botnet_mode'] != None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if Options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if Options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(
                    Options['Botnet_mode'][1])
                answer = raw_input(
                    "[#] Do you want to check the proxy list? [Yes/no] : ")

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = Queue()
                proxy_checker = Process(target=Proxy_Checker.check_proxies,
                                        args=(
                                            proxy_q,
                                            proxies,
                                        ))
                cprint("Checking Proxies...")
                #cprint("Start Time: " + time.strftime('%H:%M:%S', time.localtime(time.time())))
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if Options['Botnet_mode'][0] == "miner":
                print "Writing Proxies to disk(~/.owtf/proxy_miner/proxies.txt)"
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                cprint("Proxy Check Time: " +\
                        time.strftime('%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)
                                      )
                       )
                cprint("Done")

            proxy = self.Proxy_manager.get_next_available_proxy()

            #check proxy var... http:// sock://
            Options['OutboundProxy'] = []
            Options['OutboundProxy'].append(proxy["proxy"][0])
            Options['OutboundProxy'].append(proxy["proxy"][1])
Пример #4
0
    def StartBotnetMode(self, Options):
        self.Proxy_manager = None
        if Options['Botnet_mode'] != None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if Options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if Options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(Options['Botnet_mode'][1])
                answer = raw_input("[#] Do you want to check the proxy list? [Yes/no] : ")

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = Queue()
                proxy_checker = Process(
                                        target=Proxy_Checker.check_proxies,
                                        args=(proxy_q, proxies,)
                                        )
                cprint("Checking Proxies...")
                #cprint("Start Time: " + time.strftime('%H:%M:%S', time.localtime(time.time())))
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if Options['Botnet_mode'][0] == "miner":
                print "Writing Proxies to disk(~/.owtf/proxy_miner/proxies.txt)"
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                cprint("Proxy Check Time: " +\
                        time.strftime('%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)
                                      )
                       )
                cprint("Done")

            proxy = self.Proxy_manager.get_next_available_proxy()

            #check proxy var... http:// sock://
            Options['OutboundProxy'] = []
            Options['OutboundProxy'].append(proxy["proxy"][0])
            Options['OutboundProxy'].append(proxy["proxy"][1])
Пример #5
0
class Core:
    def __init__(self, RootDir, OwtfPid):
        self.CreateTempStorageDirs(OwtfPid)
        # Tightly coupled, cohesive framework components:
        self.Error = error_handler.ErrorHandler(self)
        self.Shell = blocking_shell.Shell(
            self
        )  # Config needs to find plugins via shell = instantiate shell first
        self.Config = config.Config(RootDir, OwtfPid, self)
        self.Config.Init(
        )  # Now the the config is hooked to the core, init config sub-components
        self.PluginHelper = plugin_helper.PluginHelper(
            self)  # Plugin Helper needs access to automate Plugin tasks
        self.Random = random.Random()
        self.Reporter = reporter.Reporter(
            self)  # Reporter needs access to Core to access Config, etc
        self.Selenium = selenium_handler.Selenium(self)
        self.InteractiveShell = interactive_shell.InteractiveShell(self)
        self.SET = set_handler.SETHandler(self)
        self.SMTP = smtp.SMTP(self)
        self.SMB = smb.SMB(self)
        self.messaging_admin = messaging_admin.message_admin(self)
        self.showOutput = True
        self.TOR_process = None
        # Create internal IPv4 regex following rfc1918
        self.re_ipv4_internal = re.compile(
            r"(^128\.\d{1,3}\.\d{1,3}\.\d{1,3}$)|"
            r"(^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$)|"
            r"(^192\.168\.\d{1,3}\.\d{1,3}$)|"
            r"(^172\.(1[6-9]|2[0-9]|3[0-1])\.[0-9]{1,3}\.[0-9]{1,3})$")

    def CreateTempStorageDirs(self, OwtfPid):
        temp_storage = os.path.join("/tmp", "owtf", str(OwtfPid))
        if not os.path.exists(temp_storage):
            os.makedirs(temp_storage)

    def CleanTempStorageDirs(self, OwtfPid):
        temp_storage = os.path.join("/tmp", "owtf", str(OwtfPid))
        renamed_temp_storage = os.path.join("/tmp", "owtf",
                                            "old-" + str(OwtfPid))
        if os.path.exists(temp_storage):
            os.rename(temp_storage, renamed_temp_storage)

    #wrapper to log function
    def log(self, *args):
        log(*args)

    def IsInScopeURL(self, URL):  # To avoid following links to other domains
        ParsedURL = urlparse(URL)
        #URLHostName = URL.split("/")[2]
        for HostName in self.Config.GetAll(
                'HOST_NAME'):  # Get all known Host Names in Scope
            #if URLHostName == HostName:
            if ParsedURL.hostname == HostName:
                return True
        return False

    def CreateMissingDirs(self, Path):
        Dir = os.path.dirname(Path)
        if not os.path.exists(Dir):
            os.makedirs(Dir)  # Create any missing directories

    def DumpFile(self, Filename, Contents, Directory):
        SavePath = Directory + WipeBadCharsForFilename(Filename)
        self.CreateMissingDirs(Directory)
        with open(SavePath, 'wb') as file:
            file.write(Contents)
        return SavePath

    def get_child_pids(self, parent_pid):
        PsCommand = subprocess.Popen("ps -o pid --ppid %d --noheaders" %
                                     parent_pid,
                                     shell=True,
                                     stdout=subprocess.PIPE)
        output, error = PsCommand.communicate()
        return [int(child_pid) for child_pid in output.readlines("\n")[:-1]]

    def GetPartialPath(self, Path):
        #return MultipleReplace(Path, List2DictKeys(RemoveListBlanks(self.Config.GetAsList( [ 'HOST_OUTPUT', 'OUTPUT_PATH' ]))))
        #print str(self.Config.GetAsList( [ 'HOST_OUTPUT', 'OUTPUT_PATH' ] ))
        #print "Path before="+Path
        #Path = MultipleReplace(Path, List2DictKeys(RemoveListBlanks(self.Config.GetAsList( [ 'OUTPUT_PATH' ]))))
        #Need to replace URL OUTPUT first so that "View Unique as HTML" Matches on body links work
        Path = MultipleReplace(
            Path,
            List2DictKeys(
                RemoveListBlanks(
                    self.Config.GetAsList(['HOST_OUTPUT', 'OUTPUT_PATH']))))
        #print "Path after="+Path

        if '/' == Path[0]:  # Stripping out leading "/" if present
            Path = Path[1:]
        return Path

    def GetCommand(self, argv):
        # Format command to remove directory and space-separate arguments
        return " ".join(argv).replace(argv[0], os.path.basename(argv[0]))

    def AnonymiseCommand(self, Command):
        for Host in self.Config.GetAll(
                'HOST_NAME'
        ):  # Host name setting value for all targets in scope
            if Host:  # Value is not blank
                Command = Command.replace(Host, 'some.target.com')
        for ip in self.Config.GetAll('HOST_IP'):
            if ip:
                Command = Command.replace(ip, 'xxx.xxx.xxx.xxx')
        return Command

    def start_reporter(self):
        """
        This function starts the reporting process
        """
        self.reporting = reporting_process()
        self.reporting_queue = multiprocessing.Queue()
        self.reporting_process = multiprocessing.Process(
            target=self.reporting.start, args=(self, 60, self.reporting_queue))
        self.reporting_process.start()

    def Start_TOR_Mode(self, Options):
        if Options['TOR_mode'] != None:
            #if Options['TOR_mode'][0] != "help":
            if tor_manager.TOR_manager.is_tor_running():
                self.TOR_process = tor_manager.TOR_manager(
                    self, Options['TOR_mode'])
                self.TOR_process = self.TOR_process.Run()
            else:
                tor_manager.TOR_manager.msg_start_tor(self)
                tor_manager.TOR_manager.msg_configure_tor()
                self.Error.FrameworkAbort("TOR Daemon is not running")
            #else:
            #tor_manager.TOR_manager.msg_configure_tor()
            #self.Error.FrameworkAbort("Configuration help is running")

    def StartBotnetMode(self, Options):
        self.Proxy_manager = None
        if Options['Botnet_mode'] != None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if Options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if Options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(
                    Options['Botnet_mode'][1])
                answer = raw_input(
                    "[#] Do you want to check the proxy list? [Yes/no] : ")

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = Queue()
                proxy_checker = Process(target=Proxy_Checker.check_proxies,
                                        args=(
                                            proxy_q,
                                            proxies,
                                        ))
                cprint("Checking Proxies...")
                #cprint("Start Time: " + time.strftime('%H:%M:%S', time.localtime(time.time())))
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if Options['Botnet_mode'][0] == "miner":
                print "Writing Proxies to disk(~/.owtf/proxy_miner/proxies.txt)"
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                cprint("Proxy Check Time: " +\
                        time.strftime('%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)
                                      )
                       )
                cprint("Done")

            proxy = self.Proxy_manager.get_next_available_proxy()

            #check proxy var... http:// sock://
            Options['OutboundProxy'] = []
            Options['OutboundProxy'].append(proxy["proxy"][0])
            Options['OutboundProxy'].append(proxy["proxy"][1])

            #start running and recheck proxies
        #OutboundProxy': ['http', '10.10.10.10', '8080']
        #Options["OutboundProxy"]=['http', '10.10.10.10', '8080']
        #print Options
        #time.sleep(21)
        #self.Error.FrameworkAbort("Testing Run")

    def StartProxy(self, Options):
        # The proxy along with supporting processes are started
        if not self.Config.Get('SIMULATION'):
            # Check if port is in use
            try:
                temp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                temp_socket.bind((self.Config.Get('INBOUND_PROXY_IP'),
                                  int(self.Config.Get('INBOUND_PROXY_PORT'))))
                temp_socket.close()
            except Exception:
                self.Error.FrameworkAbort("Inbound proxy address " +
                                          self.Config.Get('INBOUND_PROXY') +
                                          " already in use")

            # If everything is fine
            self.ProxyProcess = proxy.ProxyProcess(
                self, Options['OutboundProxy'], Options['OutboundProxyAuth'])
            self.TransactionLogger = transaction_logger.TransactionLogger(self)
            cprint("Starting Inbound proxy at " +
                   self.Config.Get('INBOUND_PROXY'))
            self.ProxyProcess.start()
            cprint("Starting Transaction logger process")
            self.TransactionLogger.start()
            self.Requester = requester.Requester(self, [
                self.Config.Get('INBOUND_PROXY_IP'),
                self.Config.Get('INBOUND_PROXY_PORT')
            ])
            cprint("Proxy transaction's log file at %s" %
                   (self.Config.Get("PROXY_LOG")))
            cprint("Visit http://" + self.Config.Get('INBOUND_PROXY') +
                   "/proxy to use Plug-n-Hack standard")
            cprint(
                "Execution of OWTF is halted.You can browse through OWTF proxy) Press Enter to continue with OWTF"
            )
            if Options["Interactive"]:
                raw_input()
        else:
            self.Requester = requester.Requester(self,
                                                 Options['OutboundProxy'])

    def outputfunc(self, q):
        """This is the function/thread which writes on terminal.

        It takes the content from queue and if showOutput is true it writes to
        console.
        Otherwise it appends to a variable.
        If the next token is 'end' It simply writes to the console.

        """
        t = ""
        #flags = fcntl.fcntl(sys.stdout, fcntl.F_GETFL)
        #fcntl.fcntl(sys.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK)
        while True:
            try:
                k = q.get()
                #print k
            except:
                continue
            if k == 'end':
                try:
                    sys.stdout.write(t)
                except:
                    pass
                return
            t = t + k
            if (self.showOutput):
                try:
                    sys.stdout.write(t)
                    t = ""
                except:
                    pass

    def initlogger(self):
        """Init two loggers to output in logfile and stdout."""
        #logger for output in console
        self.outputqueue = multiprocessing.Queue()
        result_queue = logQueue(self.outputqueue)
        log = logging.getLogger('general')
        infohandler = logging.StreamHandler(result_queue)
        log.setLevel(logging.INFO)
        infoformatter = logging.Formatter("%(message)s")
        infohandler.setFormatter(infoformatter)
        log.addHandler(infohandler)
        self.outputthread = Thread(target=self.outputfunc,
                                   args=(self.outputqueue, ))
        self.outputthread.start()

        #logger for output in log file
        log = logging.getLogger('logfile')
        infohandler = logging.FileHandler(self.Config.Get("OWTF_LOG_FILE"),
                                          mode="w+")
        log.setLevel(logging.INFO)
        infoformatter = logging.Formatter(
            "%(type)s - %(asctime)s - %(processname)s - %(functionname)s - %(message)s"
        )
        infohandler.setFormatter(infoformatter)
        log.addHandler(infohandler)

    def Start(self, Options):
        if self.initialise_framework(Options):
            return self.run_plugins()

    def initialise_framework(self, Options):
        self.ProxyMode = Options["ProxyMode"]
        cprint("Loading framework please wait..")
        self.Config.ProcessOptions(Options)
        self.initlogger()

        self.Timer = timer.Timer(
            self.Config.Get('DATE_TIME_FORMAT'))  # Requires user config
        self.Timer.StartTimer('core')
        self.initialise_plugin_handler_and_params(Options)
        if Options['ListPlugins']:
            self.PluginHandler.ShowPluginList()
            self.exitOutput()
            return False  # No processing required, just list available modules
        self.DB = db.DB(
            self
        )  # DB is initialised from some Config settings, must be hooked at this point

        self.DB.Init()
        self.messaging_admin.Init()
        Command = self.GetCommand(Options['argv'])

        self.DB.Run.StartRun(Command)  # Log owtf run options, start time, etc
        if self.Config.Get('SIMULATION'):
            cprint(
                "WARNING: In Simulation mode plugins are not executed only plugin sequence is simulated"
            )
        else:  # Reporter process is not needed unless a real run
            self.start_reporter()
        self.StartBotnetMode(Options)  #starting only if the Options are setted
        self.StartProxy(Options)  # Proxy mode is started in that function
        self.Start_TOR_Mode(
            Options)  # TOR mode will start only if the Options are set
        # Proxy Check
        ProxySuccess, Message = self.Requester.ProxyCheck()
        cprint(Message)
        if not ProxySuccess:  # Regardless of interactivity settings if the proxy check fails = no point to move on
            self.Error.FrameworkAbort(Message)  # Abort if proxy check failed
        # Each Plugin adds its own results to the report, the report is updated on the fly after each plugin completes (or before!)
        self.Error.SetCommand(self.AnonymiseCommand(
            Command))  # Set anonymised invoking command for error dump info
        return True

    def initialise_plugin_handler_and_params(self, Options):
        self.PluginHandler = plugin_handler.PluginHandler(self, Options)
        self.PluginParams = plugin_params.PluginParams(self, Options)

    def run_plugins(self):
        Status = self.PluginHandler.ProcessPlugins()
        if Status['AllSkipped']:
            self.Finish('Skipped')
        elif not Status['SomeSuccessful'] and Status['SomeAborted']:
            self.Finish('Aborted')
            return False
        elif not Status[
                'SomeSuccessful']:  # Not a single plugin completed successfully, major crash or something
            self.Finish('Crashed')
            return False
        return True  # Scan was successful

    def ReportErrorsToGithub(self):
        cprint(
            "Do you want to add any extra info to the bug report ? [Just press Enter to skip]"
        )
        info = raw_input("> ")
        cprint(
            "Do you want to add your GitHub username to the report? [Press Enter to skip]"
        )
        user = raw_input("Reported by @")
        if self.Error.AddGithubIssue(Info=info, User=user):
            cprint("Github issue added, Thanks for reporting!!")
        else:
            cprint("Unable to add github issue, but thanks for trying :D")

    def Finish(self, Status='Complete', Report=True):
        if self.TOR_process != None:
            self.TOR_process.terminate()
        if self.Config.Get('SIMULATION'):
            if hasattr(self, 'messaging_admin'):
                self.messaging_admin.finishMessaging()
            self.exitOutput()
            exit()
        else:
            try:
                self.DB.Run.EndRun(Status)
                cprint("Saving DBs")
                self.DB.SaveDBs()  # Save DBs prior to producing the report :)
                if Report:
                    cprint(
                        "Finishing iteration and assembling report again (with updated run information)"
                    )
                    #PreviousTarget = self.Config.GetTarget()
                    #for Target in self.Config.GetTargets(): # We have to finish all the reports in this run to update run information
                    #    self.Config.SetTarget(Target) # Much save the report for each target
                    #self.Reporter.ReportFinish() # Must save the report again at the end regarless of Status => Update Run info
                    #self.Config.SetTarget(PreviousTarget) # Restore previous target
                cprint("OWTF iteration finished")

                if self.DB.ErrorCount(
                ) > 0:  # Some error occurred (counter not accurate but we only need to know if sth happened)
                    cprint('Errors saved to ' + self.Config.Get('ERROR_DB') +
                           '. Would you like us to auto-report bugs ?')
                    choice = raw_input("[Y/n] ")
                    if choice != 'n' and choice != 'N':
                        self.ReportErrorsToGithub()
                    else:
                        cprint(
                            "We know that you are planning on submitting it manually ;)"
                        )
                #self.dbHandlerProcess.join()
            except AttributeError:  # DB not instantiated yet!
                cprint("OWTF finished: No time to report anything! :P")
            finally:
                if self.ProxyMode:
                    try:
                        cprint(
                            "Stopping inbound proxy processes and cleaning up, Please wait!"
                        )
                        self.KillChildProcesses(self.ProxyProcess.pid)
                        self.ProxyProcess.terminate()
                        # No signal is generated during closing process by terminate()
                        os.kill(int(self.TransactionLogger.pid), signal.SIGINT)
                    except:  # It means the proxy was not started
                        pass
                if hasattr(self, 'reporting_process'):
                    self.reporting_queue.put("done")
                    self.reporting_process.join()
                if hasattr(self, 'DB'):
                    cprint("Saving DBs before stopping messaging")
                    self.DB.SaveDBs(
                    )  # So that detailed_report_register populated by reporting is saved :P
                if hasattr(self, 'messaging_admin'):
                    self.messaging_admin.finishMessaging()

                self.exitOutput()
                #print self.Timer.GetElapsedTime('core')
                exit()

    def exitOutput(self):
        if hasattr(self, 'outputthread'):
            self.outputqueue.put('end')
            self.outputthread.join()
            if os.path.exists("owtf_review"):
                if os.path.exists("owtf_review/logfile"):
                    data = open(self.Config.Get("OWTF_LOG_FILE")).read()
                    AppendToFile("owtf_review/logfile", data)
                else:
                    shutil.move(self.Config.Get("OWTF_LOG_FILE"),
                                "owtf_review")

    def GetSeed(self):
        try:
            return self.DB.GetSeed()
        except AttributeError:  # DB not instantiated yet
            return ""

    def is_ip_internal(self, ip):
        return len(self.re_ipv4_internal.findall(ip)) == 1

    def IsTargetUnreachable(self, Target=''):
        if not Target:
            Target = self.Config.GetTarget()
        #print "Target="+Target+" in "+str(self.DB.GetData('UNREACHABLE_DB'))+"?? -> "+str(Target in self.DB.GetData('UNREACHABLE_DB'))
        return Target in self.DB.GetData('UNREACHABLE_DB')

    def GetFileAsList(self, FileName):
        return GetFileAsList(FileName)

    def KillChildProcesses(self, parent_pid, sig=signal.SIGINT):
        PsCommand = subprocess.Popen("ps -o pid --ppid %d --noheaders" %
                                     parent_pid,
                                     shell=True,
                                     stdout=subprocess.PIPE)
        PsOutput = PsCommand.stdout.read()
        RetCode = PsCommand.wait()
        #assert RetCode == 0, "ps command returned %d" % RetCode
        for PidStr in PsOutput.split("\n")[:-1]:
            self.KillChildProcesses(int(PidStr), sig)
            #print PidStr
            try:
                os.kill(int(PidStr), sig)
            except:
                print("unable to kill it")
Пример #6
0
class Core(object):
    """
    The glue which holds everything together
    """
    def __init__(self, root_dir, owtf_pid):
        """
        [*] Tightly coupled, cohesive framework components
        [*] Order is important

        + IO decorated so as to abort on any permission errors
        + Attach error handler and config
        + Required folders created
        + All other components are attached to core: shell, db etc...
        + Required booleans and attributes are initialised
        + If modules have Init calls, they are run
          Init procedures can exist only if the component can do some
          initialisation only after addition of all components
        """
        # ------------------------ IO decoration ------------------------ #
        self.decorate_io()

        # ------------------------ Error & Config ------------------------ #
        self.Error = error_handler.ErrorHandler(self)
        self.Config = config.Config(root_dir, owtf_pid, self)

        # ----------------------- Directory creation ----------------------- #
        self.create_dirs()
        self.pnh_log_file()  # <-- This is not supposed to be here

        # -------------------- Component attachment -------------------- #
        # (Order is important, if there is a dependency on some other
        # other component please mention in a comment)
        # Shell might be needed in some places
        self.Shell = blocking_shell.Shell(self)
        # As soon as you have config create logger for MainProcess
        self.enable_logging()
        # Plugin Helper needs access to automate Plugin tasks
        self.PluginHelper = plugin_helper.PluginHelper(self)
        # Reporter needs access to Core to access Config, etc
        self.Reporter = reporter.Reporter(self)
        self.Selenium = selenium_handler.Selenium(self)
        self.InteractiveShell = interactive_shell.InteractiveShell(self)
        self.SET = set_handler.SETHandler(self)
        self.SMTP = smtp.SMTP(self)
        self.SMB = smb.SMB(self)
        # DB needs Config for some settings
        self.DB = db.DB(self)
        self.DB.Init()  # Seperate Init because of self reference
        # Timer requires DB
        self.Timer = timer.Timer(self.DB.Config.Get('DATE_TIME_FORMAT'))
        # Zest related components
        self.zest = zest.Zest(self)
        self.zap_api_handler = zap.ZAP_API(self)

        # -------------------- Booleans and attributes -------------------- #
        self.IsIPInternalRegexp = re.compile(
            "^127.\d{123}.\d{123}.\d{123}$|^10.\d{123}.\d{123}.\d{123}$|"
            "^192.168.\d{123}$|^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{123}.[0-9]{123}$"
        )
        self.TOR_process = None

        # --------------------------- Init calls --------------------------- #
        # Nothing as of now
        self.health_check()

    def health_check(self):
        self.HealthCheck = health_check.HealthCheck(self)

    def create_dirs(self):
        """
        Any directory which needs to be created at the start of owtf
        needs to be placed inside here. No hardcoding of paths please
        """
        # Logs folder creation
        if not os.path.exists(self.Config.FrameworkConfigGetLogsDir()):
            self.CreateMissingDirs(self.Config.FrameworkConfigGetLogsDir())
        # Temporary storage directories creation
        self.create_temp_storage_dirs()

    def create_temp_storage_dirs(self):
        """Create a temporary directory in /tmp with pid suffix."""
        tmp_dir = os.path.join('/tmp', 'owtf')
        if not os.path.exists(tmp_dir):
            tmp_dir = os.path.join(tmp_dir, str(self.Config.OwtfPid))
            if not os.path.exists(tmp_dir):
                self.makedirs(tmp_dir)

    def clean_temp_storage_dirs(self):
        """Rename older temporary directory to avoid any further confusions."""
        curr_tmp_dir = os.path.join('/tmp', 'owtf', str(self.Config.OwtfPid))
        new_tmp_dir = os.path.join(
            '/tmp', 'owtf', 'old-%d' % self.Config.OwtfPid)
        if os.path.exists(curr_tmp_dir) and os.access(curr_tmp_dir, os.W_OK):
            os.rename(curr_tmp_dir, new_tmp_dir)

    # wrapper to logging.info function
    def log(self, msg, *args, **kwargs):
        logging.info(msg, *args, **kwargs)

    def CreateMissingDirs(self, path):
        if os.path.isfile(path):
            dir = os.path.dirname(path)
        else:
            dir = path
        if not os.path.exists(dir):
            self.makedirs(dir)  # Create any missing directories.

    def pnh_log_file(self):
        self.path = self.Config.FrameworkConfigGet('PNH_EVENTS_FILE')
        self.mode = "w"
        try:
            if os.path.isfile(self.path):
                pass
            else:
                with self.open(self.path, self.mode, owtf_clean=False):
                    pass
        except IOError as e:
            self.log("I/O error ({0}): {1}".format(e.errno, e.strerror))
            raise

    def write_event(self, content, mode):
        self.content = content
        self.mode = mode
        self.file_path = self.Config.FrameworkConfigGet('PNH_EVENTS_FILE')

        if (os.path.isfile(self.file_path) and os.access(self.file_path, os.W_OK)):
            try:
                with self.open(self.file_path, self.mode, owtf_clean=False) as log_file:
                    log_file.write(self.content)
                    log_file.write("\n")
                return True
            except IOError:
                return False

    def DumpFile(self, filename, contents, directory):
        save_path = os.path.join(directory, WipeBadCharsForFilename(filename))
        self.CreateMissingDirs(directory)
        with self.codecs_open(save_path, 'wb', 'utf-8') as f:
            f.write(contents.decode('utf-8', 'replace'))
        return save_path

    def get_child_pids(self, parent_pid):
        ps_command = subprocess.Popen(
            "ps -o pid --ppid %d --noheaders" % parent_pid,
            shell=True,
            stdout=subprocess.PIPE)
        output, error = ps_command.communicate()
        return [int(child_pid) for child_pid in output.readlines("\n")[:-1]]

    def GetCommand(self, argv):
        # Format command to remove directory and space-separate arguments.
        return " ".join(argv).replace(argv[0], os.path.basename(argv[0]))

    def AnonymiseCommand(self, command):
        # Host name setting value for all targets in scope.
        for host in self.DB.Target.GetAll('host_name'):
            if host:  # Value is not blank
                command = command.replace(host, 'some.target.com')
        for ip in self.DB.Target.GetAll('host_ip'):
            if ip:
                command = command.replace(ip, 'xxx.xxx.xxx.xxx')
        return command

    def Start_TOR_Mode(self, options):
        if options['TOR_mode'] is not None:
            if options['TOR_mode'][0] != "help":
                if tor_manager.TOR_manager.is_tor_running():
                    self.TOR_process = tor_manager.TOR_manager(
                        self,
                        options['TOR_mode'])
                    self.TOR_process = self.TOR_process.Run()
                else:
                    tor_manager.TOR_manager.msg_start_tor(self)
                    tor_manager.TOR_manager.msg_configure_tor(self)
                    self.Error.FrameworkAbort("TOR Daemon is not running")
            else:
                tor_manager.TOR_manager.msg_configure_tor()
                self.Error.FrameworkAbort("Configuration help is running")

    def StartBotnetMode(self, options):
        self.Proxy_manager = None
        if options['Botnet_mode'] is not None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(
                    options['Botnet_mode'][1]
                )
                answer = raw_input(
                    "[#] Do you want to check the proxy list? [Yes/no] : "
                )

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = multiprocessing.Queue()
                proxy_checker = multiprocessing.Process(
                    target=Proxy_Checker.check_proxies,
                    args=(proxy_q, proxies,)
                )
                logging.info("Checking Proxies...")
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if options['Botnet_mode'][0] == "miner":
                logging.info("Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)")
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                logging.info(
                    "Proxy Check Time: %s",
                    time.strftime(
                        '%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)
                    )
                )
                cprint("Done")

            if self.Proxy_manager.number_of_proxies is 0:
                self.Error.FrameworkAbort("No Alive proxies.")


            proxy = self.Proxy_manager.get_next_available_proxy()

            # check proxy var... http:// sock://
            options['OutboundProxy'] = []
            options['OutboundProxy'].append(proxy["proxy"][0])
            options['OutboundProxy'].append(proxy["proxy"][1])

    def StartProxy(self, options):
        # The proxy along with supporting processes are started
        if True:
            # Check if port is in use
            try:
                temp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                temp_socket.bind((
                    self.DB.Config.Get('INBOUND_PROXY_IP'),
                    int(self.DB.Config.Get('INBOUND_PROXY_PORT'))))
                temp_socket.close()
            except socket.error:
                self.Error.FrameworkAbort(
                    "Inbound proxy address " +
                    self.DB.Config.Get('INBOUND_PROXY_IP') + ":" +
                    self.DB.Config.Get("INBOUND_PROXY_PORT") +
                    " already in use")

            # If everything is fine.
            self.ProxyProcess = proxy.ProxyProcess(self)
            self.ProxyProcess.initialize(
                options['OutboundProxy'],
                options['OutboundProxyAuth']
            )
            self.TransactionLogger = transaction_logger.TransactionLogger(
                self,
                cache_dir=self.DB.Config.Get('INBOUND_PROXY_CACHE_DIR')
            )
            logging.warn(
                "%s:%s <-- HTTP(S) Proxy to which requests can be directed",
                self.DB.Config.Get('INBOUND_PROXY_IP'),
                self.DB.Config.Get("INBOUND_PROXY_PORT"))
            self.ProxyProcess.start()
            logging.debug("Starting Transaction logger process")
            self.TransactionLogger.start()
            self.Requester = requester.Requester(
                self, [
                    self.DB.Config.Get('INBOUND_PROXY_IP'),
                    self.DB.Config.Get('INBOUND_PROXY_PORT')]
                )
            logging.debug(
                "Proxy transaction's log file at %s",
                self.DB.Config.Get("PROXY_LOG"))
        else:
            self.Requester = requester.Requester(
                self,
                options['OutboundProxy'])

    def enable_logging(self, **kwargs):
        """
        + process_name <-- can be specified in kwargs
        + Must be called from inside the process because we are kind of
          overriding the root logger
        + Enables both file and console logging
        """
        process_name = kwargs.get(
            "process_name",
            multiprocessing.current_process().name
        )
        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)
        file_handler = self.FileHandler(
            self.Config.FrameworkConfigGetLogPath(process_name),
            mode="w+"
        )
        file_handler.setLevel(logging.DEBUG)
        file_handler.setFormatter(FileFormatter())

        stream_handler = logging.StreamHandler(sys.stdout)
        stream_handler.setLevel(logging.INFO)
        stream_handler.setFormatter(ConsoleFormatter())

        # Replace any old handlers
        logger.handlers = [file_handler, stream_handler]

    def disable_console_logging(self, **kwargs):
        """
        + Must be called from inside the process because we should
          remove handler for that root logger
        + Since we add console handler in the last, we can remove
          the last handler to disable console logging
        """
        logger = logging.getLogger()
        logger.removeHandler(logger.handlers[-1])

    def Start(self, options):
        if self.initialise_framework(options):
            return self.run_server()

    def initialise_framework(self, options):
        self.ProxyMode = options["ProxyMode"]
        logging.info("Loading framework please wait..")
        # self.initlogger()

        # No processing required, just list available modules.
        if options['ListPlugins']:
            self.PluginHandler.ShowPluginList()
            self.exit_output()
            return False
        self.Config.ProcessOptions(options)
        command = self.GetCommand(options['argv'])

        self.StartBotnetMode(options)
        self.StartProxy(options)  # Proxy mode is started in that function.
        # Set anonymised invoking command for error dump info.
        self.Error.SetCommand(self.AnonymiseCommand(command))
        self.initialise_plugin_handler_and_params(options)
        return True

    def initialise_plugin_handler_and_params(self, options):
        # The order is important here ;)
        self.PluginHandler = plugin_handler.PluginHandler(self, options)
        self.PluginParams = plugin_params.PluginParams(self, options)
        self.WorkerManager = worker_manager.WorkerManager(self)

    def run_server(self):
        """
        This method starts the interface server
        """
        self.FileServer = server.FileServer(self)
        self.FileServer.start()
        self.InterfaceServer = server.InterfaceServer(self)
        logging.warn(
            "http://%s:%s <-- Web UI URL",
            self.Config.FrameworkConfigGet("SERVER_ADDR"),
            self.Config.FrameworkConfigGet("UI_SERVER_PORT"))
        self.disable_console_logging()
        logging.info("Press Ctrl+C when you spawned a shell ;)")
        self.InterfaceServer.start()

    def ReportErrorsToGithub(self):
        cprint(
            "Do you want to add any extra info to the bug report? "
            "[Just press Enter to skip]")
        info = raw_input("> ")
        cprint(
            "Do you want to add your GitHub username to the report? "
            "[Press Enter to skip]")
        user = raw_input("Reported by @")
        if self.Error.AddGithubIssue(Info=info, User=user):
            cprint("Github issue added, Thanks for reporting!!")
        else:
            cprint("Unable to add github issue, but thanks for trying :D")

    def Finish(self, status='Complete', report=True):
        if getattr(self, "TOR_process", None) is not None:
            self.TOR_process.terminate()
        # TODO: Fix this for lions_2014
        # if self.DB.Config.Get('SIMULATION'):
        #    exit()
        try:
            self.KillChildProcesses(multiprocessing.current_process().pid)
        except:
            pass
        try:
            self.PluginHandler.CleanUp()
        except AttributeError:  # DB not instantiated yet!
            pass
        finally:
            if getattr(self, "ProxyMode", None) is not None:
                try:
                    cprint(
                        "Stopping inbound proxy processes and "
                        "cleaning up, Please wait!")
                    self.KillChildProcesses(self.ProxyProcess.pid)
                    self.ProxyProcess.terminate()
                    # No signal is generated during closing process by
                    # terminate()
                    self.TransactionLogger.poison_q.put('done')
                    self.TransactionLogger.join()
                except:  # It means the proxy was not started.
                    pass
            exit()

    def IsIPInternal(self, IP):
        return len(self.IsIPInternalRegexp.findall(IP)) == 1

    def KillChildProcesses(self, parent_pid, sig=signal.SIGINT):
        ps_command = subprocess.Popen(
            "ps -o pid --ppid %d --noheaders" % parent_pid,
            shell=True,
            stdout=subprocess.PIPE)
        ps_output = ps_command.stdout.read()
        for pid_str in ps_output.split("\n")[:-1]:
            self.KillChildProcesses(int(pid_str), sig)
            try:
                os.kill(int(pid_str), sig)
            except:
                cprint("unable to kill it")

    def decorate_io(self):
        """Decorate different I/O functions to ensure OWTF to properly quit."""
        def catch_error(func):
            """Decorator on I/O functions.

            If an error is detected, force OWTF to quit properly.

            """
            def io_error(*args, **kwargs):
                """Call the original function while checking for errors.

                If `owtf_clean` parameter is not explicitely passed or if it is
                set to `True`, it force OWTF to properly exit.

                """
                owtf_clean = kwargs.pop('owtf_clean', True)
                try:
                    return func(*args, **kwargs)
                except (OSError, IOError) as e:
                    if owtf_clean:
                        self.Error.FrameworkAbort(
                            "Error when calling '%s'! %s." %
                            (func.__name__, str(e)))
                    raise e
            return io_error

        # Decorated functions
        self.open = catch_error(open)
        self.codecs_open = catch_error(codecs.open)
        self.mkdir = catch_error(os.mkdir)
        self.makedirs = catch_error(os.makedirs)
        self.rmtree = catch_error(shutil.rmtree)
        self.FileHandler = catch_error(logging.FileHandler)
Пример #7
0
class Core:
    def __init__(self, RootDir, OwtfPid):
        self.CreateTempStorageDirs(OwtfPid)
        # Tightly coupled, cohesive framework components:
        self.Error = error_handler.ErrorHandler(self)
        self.Shell = blocking_shell.Shell(self) # Config needs to find plugins via shell = instantiate shell first
        self.Config = config.Config(RootDir, OwtfPid, self)
        self.Config.Init() # Now the the config is hooked to the core, init config sub-components
        self.PluginHelper = plugin_helper.PluginHelper(self) # Plugin Helper needs access to automate Plugin tasks
        self.Random = random.Random()
        self.Reporter = reporter.Reporter(self) # Reporter needs access to Core to access Config, etc
        self.Selenium = selenium_handler.Selenium(self)
        self.InteractiveShell = interactive_shell.InteractiveShell(self)
        self.SET = set_handler.SETHandler(self)
        self.SMTP = smtp.SMTP(self)
        self.SMB = smb.SMB(self)
        self.messaging_admin = messaging_admin.message_admin(self)
        self.showOutput=True
        self.TOR_process = None
        # Create internal IPv4 regex following rfc1918
        self.re_ipv4_internal = re.compile(
            r"(^128\.\d{1,3}\.\d{1,3}\.\d{1,3}$)|"
            r"(^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$)|"
            r"(^192\.168\.\d{1,3}\.\d{1,3}$)|"
            r"(^172\.(1[6-9]|2[0-9]|3[0-1])\.[0-9]{1,3}\.[0-9]{1,3})$")

    def CreateTempStorageDirs(self, OwtfPid):
        temp_storage = os.path.join("/tmp", "owtf", str(OwtfPid))
        if not os.path.exists(temp_storage):
            os.makedirs(temp_storage)

    def CleanTempStorageDirs(self, OwtfPid):
        temp_storage = os.path.join("/tmp", "owtf", str(OwtfPid))
        renamed_temp_storage = os.path.join("/tmp", "owtf", "old-"+str(OwtfPid))
        if os.path.exists(temp_storage):
            os.rename(temp_storage, renamed_temp_storage)

    #wrapper to log function
    def log(self,*args):
        log(*args)

    def IsInScopeURL(self, URL): # To avoid following links to other domains
        ParsedURL = urlparse(URL)
        #URLHostName = URL.split("/")[2]
        for HostName in self.Config.GetAll('HOST_NAME'): # Get all known Host Names in Scope
            #if URLHostName == HostName:
            if ParsedURL.hostname == HostName:
                return True
        return False

    def CreateMissingDirs(self, Path):
        Dir = os.path.dirname(Path)
        if not os.path.exists(Dir):
            os.makedirs(Dir) # Create any missing directories

    def DumpFile(self, Filename, Contents, Directory):
        SavePath=Directory+WipeBadCharsForFilename(Filename)
        self.CreateMissingDirs(Directory)
        with open(SavePath, 'wb') as file:
            file.write(Contents)
        return SavePath

    def get_child_pids(self, parent_pid):
        PsCommand = subprocess.Popen("ps -o pid --ppid %d --noheaders" % parent_pid, shell=True, stdout=subprocess.PIPE)
        output, error = PsCommand.communicate()
        return [int(child_pid) for child_pid in output.readlines("\n")[:-1]]

    def GetPartialPath(self, Path):
        #return MultipleReplace(Path, List2DictKeys(RemoveListBlanks(self.Config.GetAsList( [ 'HOST_OUTPUT', 'OUTPUT_PATH' ]))))
        #print str(self.Config.GetAsList( [ 'HOST_OUTPUT', 'OUTPUT_PATH' ] ))
        #print "Path before="+Path
        #Path = MultipleReplace(Path, List2DictKeys(RemoveListBlanks(self.Config.GetAsList( [ 'OUTPUT_PATH' ]))))
        #Need to replace URL OUTPUT first so that "View Unique as HTML" Matches on body links work
        Path = MultipleReplace(Path, List2DictKeys(RemoveListBlanks(self.Config.GetAsList( [ 'HOST_OUTPUT', 'OUTPUT_PATH' ]))))
        #print "Path after="+Path

        if '/' == Path[0]: # Stripping out leading "/" if present
            Path = Path[1:]
        return Path

    def GetCommand(self, argv):
        # Format command to remove directory and space-separate arguments
        return " ".join(argv).replace(argv[0], os.path.basename(argv[0]))

    def AnonymiseCommand(self, Command):
        for Host in self.Config.GetAll('HOST_NAME'): # Host name setting value for all targets in scope
            if Host: # Value is not blank
                Command = Command.replace(Host, 'some.target.com')
        for ip in self.Config.GetAll('HOST_IP'):
            if ip:
                Command = Command.replace(ip, 'xxx.xxx.xxx.xxx')
        return Command

    def start_reporter(self):
        """
        This function starts the reporting process
        """
        self.reporting = reporting_process()
        self.reporting_queue = multiprocessing.Queue()
        self.reporting_process = multiprocessing.Process(target=self.reporting.start, args=(self,60,self.reporting_queue))
        self.reporting_process.start()

    def Start_TOR_Mode(self, Options):
        if Options['TOR_mode'] != None:
            #if Options['TOR_mode'][0] != "help":
            if tor_manager.TOR_manager.is_tor_running():
                self.TOR_process = tor_manager.TOR_manager(self, Options['TOR_mode'])
                self.TOR_process = self.TOR_process.Run()
            else:
                tor_manager.TOR_manager.msg_start_tor(self)
                tor_manager.TOR_manager.msg_configure_tor()
                self.Error.FrameworkAbort("TOR Daemon is not running")
            #else:
                #tor_manager.TOR_manager.msg_configure_tor()
                #self.Error.FrameworkAbort("Configuration help is running")

    def StartBotnetMode(self, Options):
        self.Proxy_manager = None
        if Options['Botnet_mode'] != None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if Options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if Options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(Options['Botnet_mode'][1])
                answer = raw_input("[#] Do you want to check the proxy list? [Yes/no] : ")

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = Queue()
                proxy_checker = Process(
                                        target=Proxy_Checker.check_proxies,
                                        args=(proxy_q, proxies,)
                                        )
                cprint("Checking Proxies...")
                #cprint("Start Time: " + time.strftime('%H:%M:%S', time.localtime(time.time())))
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if Options['Botnet_mode'][0] == "miner":
                print "Writing Proxies to disk(~/.owtf/proxy_miner/proxies.txt)"
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                cprint("Proxy Check Time: " +\
                        time.strftime('%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)
                                      )
                       )
                cprint("Done")

            proxy = self.Proxy_manager.get_next_available_proxy()

            #check proxy var... http:// sock://
            Options['OutboundProxy'] = []
            Options['OutboundProxy'].append(proxy["proxy"][0])
            Options['OutboundProxy'].append(proxy["proxy"][1])

                #start running and recheck proxies
        #OutboundProxy': ['http', '10.10.10.10', '8080']
        #Options["OutboundProxy"]=['http', '10.10.10.10', '8080']
        #print Options
        #time.sleep(21)
        #self.Error.FrameworkAbort("Testing Run")

    def StartProxy(self, Options):
        # The proxy along with supporting processes are started
        if not self.Config.Get('SIMULATION'):
            # Check if port is in use
            try:
                temp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                temp_socket.bind((self.Config.Get('INBOUND_PROXY_IP'), int(self.Config.Get('INBOUND_PROXY_PORT'))))
                temp_socket.close()
            except Exception:
                self.Error.FrameworkAbort("Inbound proxy address " + self.Config.Get('INBOUND_PROXY') + " already in use")

            # If everything is fine
            self.ProxyProcess = proxy.ProxyProcess(
                                                    self,
                                                    Options['OutboundProxy'],
                                                    Options['OutboundProxyAuth']
                                                  )
            self.TransactionLogger = transaction_logger.TransactionLogger(self)
            cprint("Starting Inbound proxy at " + self.Config.Get('INBOUND_PROXY'))
            self.ProxyProcess.start()
            cprint("Starting Transaction logger process")
            self.TransactionLogger.start()
            self.Requester = requester.Requester(self, [self.Config.Get('INBOUND_PROXY_IP'), self.Config.Get('INBOUND_PROXY_PORT')])
            cprint("Proxy transaction's log file at %s"%(self.Config.Get("PROXY_LOG")))
            cprint("Visit http://" + self.Config.Get('INBOUND_PROXY') + "/proxy to use Plug-n-Hack standard")
            cprint("Execution of OWTF is halted.You can browse through OWTF proxy) Press Enter to continue with OWTF")
            if Options["Interactive"]:
                raw_input()
        else:
            self.Requester = requester.Requester(self, Options['OutboundProxy'])

    def outputfunc(self,q):
        """This is the function/thread which writes on terminal.

        It takes the content from queue and if showOutput is true it writes to
        console.
        Otherwise it appends to a variable.
        If the next token is 'end' It simply writes to the console.

        """
        t=""
        #flags = fcntl.fcntl(sys.stdout, fcntl.F_GETFL)
        #fcntl.fcntl(sys.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK)
        while True:
            try:
                k = q.get()
                #print k
            except:
                continue
            if k=='end':
                try:
                    sys.stdout.write(t)
                except:
                    pass
                return
            t = t+k
            if(self.showOutput):
                try:
                    sys.stdout.write(t)
                    t=""
                except:
                    pass

    def initlogger(self):
        """Init two loggers to output in logfile and stdout."""
        #logger for output in console
        self.outputqueue = multiprocessing.Queue()
        result_queue = logQueue(self.outputqueue)
        log = logging.getLogger('general')
        infohandler = logging.StreamHandler(result_queue)
        log.setLevel(logging.INFO)
        infoformatter = logging.Formatter("%(message)s")
        infohandler.setFormatter(infoformatter)
        log.addHandler(infohandler)
        self.outputthread =Thread(target=self.outputfunc, args=(self.outputqueue,))
        self.outputthread.start()

        #logger for output in log file
        log = logging.getLogger('logfile')
        infohandler = logging.FileHandler(self.Config.Get("OWTF_LOG_FILE"),mode="w+")
        log.setLevel(logging.INFO)
        infoformatter = logging.Formatter("%(type)s - %(asctime)s - %(processname)s - %(functionname)s - %(message)s")
        infohandler.setFormatter(infoformatter)
        log.addHandler(infohandler)

    def Start(self, Options):
        if self.initialise_framework(Options):
            return self.run_plugins()

    def initialise_framework(self, Options):
        self.ProxyMode = Options["ProxyMode"]
        cprint("Loading framework please wait..")
        self.Config.ProcessOptions(Options)
        self.initlogger()

        self.Timer = timer.Timer(self.Config.Get('DATE_TIME_FORMAT')) # Requires user config
        self.Timer.StartTimer('core')
        self.initialise_plugin_handler_and_params(Options)
        if Options['ListPlugins']:
            self.PluginHandler.ShowPluginList()
            self.exitOutput()
            return False # No processing required, just list available modules
        self.DB = db.DB(self) # DB is initialised from some Config settings, must be hooked at this point

        self.DB.Init()
        self.messaging_admin.Init()
        Command = self.GetCommand(Options['argv'])

        self.DB.Run.StartRun(Command) # Log owtf run options, start time, etc
        if self.Config.Get('SIMULATION'):
            cprint("WARNING: In Simulation mode plugins are not executed only plugin sequence is simulated")
        else: # Reporter process is not needed unless a real run
            self.start_reporter()
        self.StartBotnetMode(Options)#starting only if the Options are setted
        self.StartProxy(Options) # Proxy mode is started in that function
        self.Start_TOR_Mode(Options)# TOR mode will start only if the Options are set
        # Proxy Check
        ProxySuccess, Message = self.Requester.ProxyCheck()
        cprint(Message)
        if not ProxySuccess: # Regardless of interactivity settings if the proxy check fails = no point to move on
            self.Error.FrameworkAbort(Message) # Abort if proxy check failed
        # Each Plugin adds its own results to the report, the report is updated on the fly after each plugin completes (or before!)
        self.Error.SetCommand(self.AnonymiseCommand(Command)) # Set anonymised invoking command for error dump info
        return True

    def initialise_plugin_handler_and_params(self, Options):
        self.PluginHandler = plugin_handler.PluginHandler(self, Options)
        self.PluginParams = plugin_params.PluginParams(self, Options)

    def run_plugins(self):
        Status = self.PluginHandler.ProcessPlugins()
        if Status['AllSkipped']:
            self.Finish('Skipped')
        elif not Status['SomeSuccessful'] and Status['SomeAborted']:
            self.Finish('Aborted')
            return False
        elif not Status['SomeSuccessful']: # Not a single plugin completed successfully, major crash or something
            self.Finish('Crashed')
            return False
        return True # Scan was successful

    def ReportErrorsToGithub(self):
        cprint("Do you want to add any extra info to the bug report ? [Just press Enter to skip]")
        info = raw_input("> ")
        cprint("Do you want to add your GitHub username to the report? [Press Enter to skip]")
        user = raw_input("Reported by @")
        if self.Error.AddGithubIssue(Info=info, User=user):
            cprint("Github issue added, Thanks for reporting!!")
        else:
            cprint("Unable to add github issue, but thanks for trying :D")

    def Finish(self, Status = 'Complete', Report = True):
        if self.TOR_process != None:
            self.TOR_process.terminate()
        if self.Config.Get('SIMULATION'):
            if hasattr(self,'messaging_admin'):
                self.messaging_admin.finishMessaging()
            self.exitOutput()
            exit()
        else:
            try:
                self.DB.Run.EndRun(Status)
                cprint("Saving DBs")
                self.DB.SaveDBs() # Save DBs prior to producing the report :)
                if Report:
                    cprint("Finishing iteration and assembling report again (with updated run information)")
                    #PreviousTarget = self.Config.GetTarget()
                    #for Target in self.Config.GetTargets(): # We have to finish all the reports in this run to update run information
                    #    self.Config.SetTarget(Target) # Much save the report for each target
                        #self.Reporter.ReportFinish() # Must save the report again at the end regarless of Status => Update Run info
                    #self.Config.SetTarget(PreviousTarget) # Restore previous target
                cprint("OWTF iteration finished")

                if self.DB.ErrorCount() > 0: # Some error occurred (counter not accurate but we only need to know if sth happened)
                    cprint('Errors saved to ' + self.Config.Get('ERROR_DB') + '. Would you like us to auto-report bugs ?')
                    choice = raw_input("[Y/n] ")
                    if choice != 'n' and choice != 'N':
                        self.ReportErrorsToGithub()
                    else:
                        cprint("We know that you are planning on submitting it manually ;)")
                #self.dbHandlerProcess.join()
            except AttributeError: # DB not instantiated yet!
                cprint("OWTF finished: No time to report anything! :P")
            finally:
                if self.ProxyMode:
                    try:
                        cprint("Stopping inbound proxy processes and cleaning up, Please wait!")
                        self.KillChildProcesses(self.ProxyProcess.pid)
                        self.ProxyProcess.terminate()
                        # No signal is generated during closing process by terminate()
                        os.kill(int(self.TransactionLogger.pid), signal.SIGINT)
                    except: # It means the proxy was not started
                        pass
                if hasattr(self,'reporting_process'):
                    self.reporting_queue.put("done")
                    self.reporting_process.join()
                if hasattr(self, 'DB'):
                    cprint("Saving DBs before stopping messaging")
                    self.DB.SaveDBs() # So that detailed_report_register populated by reporting is saved :P
                if hasattr(self,'messaging_admin'):
                    self.messaging_admin.finishMessaging()

                self.exitOutput()
                #print self.Timer.GetElapsedTime('core')
                exit()

    def exitOutput(self):
        if hasattr(self,'outputthread'):
            self.outputqueue.put('end')
            self.outputthread.join()
            if os.path.exists("owtf_review"):
                if os.path.exists("owtf_review/logfile"):
                    data = open(self.Config.Get("OWTF_LOG_FILE")).read()
                    AppendToFile("owtf_review/logfile", data)
                else:
                    shutil.move(self.Config.Get("OWTF_LOG_FILE"), "owtf_review")

    def GetSeed(self):
        try:
            return self.DB.GetSeed()
        except AttributeError: # DB not instantiated yet
            return ""

    def is_ip_internal(self, ip):
        return len(self.re_ipv4_internal.findall(ip)) == 1

    def IsTargetUnreachable(self, Target = ''):
        if not Target:
            Target = self.Config.GetTarget()
        #print "Target="+Target+" in "+str(self.DB.GetData('UNREACHABLE_DB'))+"?? -> "+str(Target in self.DB.GetData('UNREACHABLE_DB'))
        return Target in self.DB.GetData('UNREACHABLE_DB')

    def GetFileAsList(self, FileName):
        return GetFileAsList(FileName)

    def KillChildProcesses(self, parent_pid, sig=signal.SIGINT):
        PsCommand = subprocess.Popen("ps -o pid --ppid %d --noheaders" % parent_pid, shell=True, stdout=subprocess.PIPE)
        PsOutput = PsCommand.stdout.read()
        RetCode = PsCommand.wait()
        #assert RetCode == 0, "ps command returned %d" % RetCode
        for PidStr in PsOutput.split("\n")[:-1]:
                self.KillChildProcesses(int(PidStr),sig)
                #print PidStr
                try:
                    os.kill(int(PidStr), sig)
                except:
                    print("unable to kill it")
Пример #8
0
class Core(object):
    """
    The glue which holds everything together
    """
    def __init__(self, root_dir, owtf_pid):
        """
        [*] Tightly coupled, cohesive framework components
        [*] Order is important

        + IO decorated so as to abort on any permission errors
        + Attach error handler and config
        + Required folders created
        + All other components are attached to core: shell, db etc...
        + Required booleans and attributes are initialised
        + If modules have Init calls, they are run
          Init procedures can exist only if the component can do some
          initialisation only after addition of all components
        """
        # ------------------------ IO decoration ------------------------ #
        self.decorate_io()

        # ------------------------ Error & Config ------------------------ #
        self.Error = error_handler.ErrorHandler(self)
        self.Config = config.Config(root_dir, owtf_pid, self)

        # ----------------------- Directory creation ----------------------- #
        self.create_dirs()
        self.pnh_log_file()  # <-- This is not supposed to be here

        # -------------------- Component attachment -------------------- #
        # (Order is important, if there is a dependency on some other
        # other component please mention in a comment)
        # Shell might be needed in some places
        self.Shell = blocking_shell.Shell(self)
        # As soon as you have config create logger for MainProcess
        self.enable_logging()
        # Plugin Helper needs access to automate Plugin tasks
        self.PluginHelper = plugin_helper.PluginHelper(self)
        # Reporter needs access to Core to access Config, etc
        self.Reporter = reporter.Reporter(self)
        self.Selenium = selenium_handler.Selenium(self)
        self.InteractiveShell = interactive_shell.InteractiveShell(self)
        self.SET = set_handler.SETHandler(self)
        self.SMTP = smtp.SMTP(self)
        self.SMB = smb.SMB(self)
        # DB needs Config for some settings
        self.DB = db.DB(self)
        self.DB.Init()  # Seperate Init because of self reference
        # Timer requires DB
        self.Timer = timer.Timer(self.DB.Config.Get('DATE_TIME_FORMAT'))
        # Zest related components
        self.zest = zest.Zest(self)
        self.zap_api_handler = zap.ZAP_API(self)

        # -------------------- Booleans and attributes -------------------- #
        self.IsIPInternalRegexp = re.compile(
            "^127.\d{123}.\d{123}.\d{123}$|^10.\d{123}.\d{123}.\d{123}$|"
            "^192.168.\d{123}$|^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{123}.[0-9]{123}$"
        )
        self.TOR_process = None

        # --------------------------- Init calls --------------------------- #
        # Nothing as of now
        self.health_check()

    def health_check(self):
        self.HealthCheck = health_check.HealthCheck(self)

    def create_dirs(self):
        """
        Any directory which needs to be created at the start of owtf
        needs to be placed inside here. No hardcoding of paths please
        """
        # Logs folder creation
        if not os.path.exists(self.Config.FrameworkConfigGetLogsDir()):
            self.CreateMissingDirs(self.Config.FrameworkConfigGetLogsDir())
        # Temporary storage directories creation
        self.create_temp_storage_dirs()

    def create_temp_storage_dirs(self):
        """Create a temporary directory in /tmp with pid suffix."""
        tmp_dir = os.path.join('/tmp', 'owtf')
        if not os.path.exists(tmp_dir):
            tmp_dir = os.path.join(tmp_dir, str(self.Config.OwtfPid))
            if not os.path.exists(tmp_dir):
                self.makedirs(tmp_dir)

    def clean_temp_storage_dirs(self):
        """Rename older temporary directory to avoid any further confusions."""
        curr_tmp_dir = os.path.join('/tmp', 'owtf', str(self.Config.OwtfPid))
        new_tmp_dir = os.path.join('/tmp', 'owtf',
                                   'old-%d' % self.Config.OwtfPid)
        if os.path.exists(curr_tmp_dir) and os.access(curr_tmp_dir, os.W_OK):
            os.rename(curr_tmp_dir, new_tmp_dir)

    # wrapper to logging.info function
    def log(self, msg, *args, **kwargs):
        logging.info(msg, *args, **kwargs)

    def CreateMissingDirs(self, path):
        if os.path.isfile(path):
            dir = os.path.dirname(path)
        else:
            dir = path
        if not os.path.exists(dir):
            self.makedirs(dir)  # Create any missing directories.

    def pnh_log_file(self):
        self.path = self.Config.FrameworkConfigGet('PNH_EVENTS_FILE')
        self.mode = "w"
        try:
            if os.path.isfile(self.path):
                pass
            else:
                with self.open(self.path, self.mode, owtf_clean=False):
                    pass
        except IOError as e:
            self.log("I/O error ({0}): {1}".format(e.errno, e.strerror))
            raise

    def write_event(self, content, mode):
        self.content = content
        self.mode = mode
        self.file_path = self.Config.FrameworkConfigGet('PNH_EVENTS_FILE')

        if (os.path.isfile(self.file_path)
                and os.access(self.file_path, os.W_OK)):
            try:
                with self.open(self.file_path, self.mode,
                               owtf_clean=False) as log_file:
                    log_file.write(self.content)
                    log_file.write("\n")
                return True
            except IOError:
                return False

    def DumpFile(self, filename, contents, directory):
        save_path = os.path.join(directory, WipeBadCharsForFilename(filename))
        self.CreateMissingDirs(directory)
        with self.codecs_open(save_path, 'wb', 'utf-8') as f:
            f.write(contents.decode('utf-8', 'replace'))
        return save_path

    def get_child_pids(self, parent_pid):
        ps_command = subprocess.Popen("ps -o pid --ppid %d --noheaders" %
                                      parent_pid,
                                      shell=True,
                                      stdout=subprocess.PIPE)
        output, error = ps_command.communicate()
        return [int(child_pid) for child_pid in output.readlines("\n")[:-1]]

    def GetCommand(self, argv):
        # Format command to remove directory and space-separate arguments.
        return " ".join(argv).replace(argv[0], os.path.basename(argv[0]))

    def AnonymiseCommand(self, command):
        # Host name setting value for all targets in scope.
        for host in self.DB.Target.GetAll('host_name'):
            if host:  # Value is not blank
                command = command.replace(host, 'some.target.com')
        for ip in self.DB.Target.GetAll('host_ip'):
            if ip:
                command = command.replace(ip, 'xxx.xxx.xxx.xxx')
        return command

    def Start_TOR_Mode(self, options):
        if options['TOR_mode'] is not None:
            if options['TOR_mode'][0] != "help":
                if tor_manager.TOR_manager.is_tor_running():
                    self.TOR_process = tor_manager.TOR_manager(
                        self, options['TOR_mode'])
                    self.TOR_process = self.TOR_process.Run()
                else:
                    tor_manager.TOR_manager.msg_start_tor(self)
                    tor_manager.TOR_manager.msg_configure_tor(self)
                    self.Error.FrameworkAbort("TOR Daemon is not running")
            else:
                tor_manager.TOR_manager.msg_configure_tor()
                self.Error.FrameworkAbort("Configuration help is running")

    def StartBotnetMode(self, options):
        self.Proxy_manager = None
        if options['Botnet_mode'] is not None:
            self.Proxy_manager = Proxy_manager()
            answer = "Yes"
            proxies = []
            if options['Botnet_mode'][0] == "miner":
                miner = Proxy_Miner()
                proxies = miner.start_miner()

            if options['Botnet_mode'][0] == "list":  # load proxies from list
                proxies = self.Proxy_manager.load_proxy_list(
                    options['Botnet_mode'][1])
                answer = raw_input(
                    "[#] Do you want to check the proxy list? [Yes/no] : ")

            if answer.upper() in ["", "YES", "Y"]:
                proxy_q = multiprocessing.Queue()
                proxy_checker = multiprocessing.Process(
                    target=Proxy_Checker.check_proxies,
                    args=(
                        proxy_q,
                        proxies,
                    ))
                logging.info("Checking Proxies...")
                start_time = time.time()
                proxy_checker.start()
                proxies = proxy_q.get()
                proxy_checker.join()

            self.Proxy_manager.proxies = proxies
            self.Proxy_manager.number_of_proxies = len(proxies)

            if options['Botnet_mode'][0] == "miner":
                logging.info(
                    "Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)")
                miner.export_proxies_to_file("proxies.txt", proxies)
            if answer.upper() in ["", "YES", "Y"]:
                logging.info(
                    "Proxy Check Time: %s",
                    time.strftime(
                        '%H:%M:%S',
                        time.localtime(time.time() - start_time - 3600)))
                cprint("Done")

            if self.Proxy_manager.number_of_proxies is 0:
                self.Error.FrameworkAbort("No Alive proxies.")

            proxy = self.Proxy_manager.get_next_available_proxy()

            # check proxy var... http:// sock://
            options['OutboundProxy'] = []
            options['OutboundProxy'].append(proxy["proxy"][0])
            options['OutboundProxy'].append(proxy["proxy"][1])

    def StartProxy(self, options):
        # The proxy along with supporting processes are started
        if True:
            # Check if port is in use
            try:
                temp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                temp_socket.bind(
                    (self.DB.Config.Get('INBOUND_PROXY_IP'),
                     int(self.DB.Config.Get('INBOUND_PROXY_PORT'))))
                temp_socket.close()
            except socket.error:
                self.Error.FrameworkAbort(
                    "Inbound proxy address " +
                    self.DB.Config.Get('INBOUND_PROXY_IP') + ":" +
                    self.DB.Config.Get("INBOUND_PROXY_PORT") +
                    " already in use")

            # If everything is fine.
            self.ProxyProcess = proxy.ProxyProcess(self)
            self.ProxyProcess.initialize(options['OutboundProxy'],
                                         options['OutboundProxyAuth'])
            self.TransactionLogger = transaction_logger.TransactionLogger(
                self, cache_dir=self.DB.Config.Get('INBOUND_PROXY_CACHE_DIR'))
            logging.warn(
                "%s:%s <-- HTTP(S) Proxy to which requests can be directed",
                self.DB.Config.Get('INBOUND_PROXY_IP'),
                self.DB.Config.Get("INBOUND_PROXY_PORT"))
            self.ProxyProcess.start()
            logging.debug("Starting Transaction logger process")
            self.TransactionLogger.start()
            self.Requester = requester.Requester(self, [
                self.DB.Config.Get('INBOUND_PROXY_IP'),
                self.DB.Config.Get('INBOUND_PROXY_PORT')
            ])
            logging.debug("Proxy transaction's log file at %s",
                          self.DB.Config.Get("PROXY_LOG"))
        else:
            self.Requester = requester.Requester(self,
                                                 options['OutboundProxy'])

    def enable_logging(self, **kwargs):
        """
        + process_name <-- can be specified in kwargs
        + Must be called from inside the process because we are kind of
          overriding the root logger
        + Enables both file and console logging
        """
        process_name = kwargs.get("process_name",
                                  multiprocessing.current_process().name)
        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)
        file_handler = self.FileHandler(
            self.Config.FrameworkConfigGetLogPath(process_name), mode="w+")
        file_handler.setLevel(logging.DEBUG)
        file_handler.setFormatter(FileFormatter())

        stream_handler = logging.StreamHandler(sys.stdout)
        stream_handler.setLevel(logging.INFO)
        stream_handler.setFormatter(ConsoleFormatter())

        # Replace any old handlers
        logger.handlers = [file_handler, stream_handler]

    def disable_console_logging(self, **kwargs):
        """
        + Must be called from inside the process because we should
          remove handler for that root logger
        + Since we add console handler in the last, we can remove
          the last handler to disable console logging
        """
        logger = logging.getLogger()
        logger.removeHandler(logger.handlers[-1])

    def Start(self, options):
        if self.initialise_framework(options):
            return self.run_server()

    def initialise_framework(self, options):
        self.ProxyMode = options["ProxyMode"]
        logging.info("Loading framework please wait..")
        # self.initlogger()

        # No processing required, just list available modules.
        if options['ListPlugins']:
            self.PluginHandler.ShowPluginList()
            self.exit_output()
            return False
        self.Config.ProcessOptions(options)
        command = self.GetCommand(options['argv'])

        self.StartBotnetMode(options)
        self.StartProxy(options)  # Proxy mode is started in that function.
        # Set anonymised invoking command for error dump info.
        self.Error.SetCommand(self.AnonymiseCommand(command))
        self.initialise_plugin_handler_and_params(options)
        return True

    def initialise_plugin_handler_and_params(self, options):
        # The order is important here ;)
        self.PluginHandler = plugin_handler.PluginHandler(self, options)
        self.PluginParams = plugin_params.PluginParams(self, options)
        self.WorkerManager = worker_manager.WorkerManager(self)

    def run_server(self):
        """
        This method starts the interface server
        """
        self.FileServer = server.FileServer(self)
        self.FileServer.start()
        self.InterfaceServer = server.InterfaceServer(self)
        logging.warn("http://%s:%s <-- Web UI URL",
                     self.Config.FrameworkConfigGet("SERVER_ADDR"),
                     self.Config.FrameworkConfigGet("UI_SERVER_PORT"))
        self.disable_console_logging()
        logging.info("Press Ctrl+C when you spawned a shell ;)")
        self.InterfaceServer.start()

    def ReportErrorsToGithub(self):
        cprint("Do you want to add any extra info to the bug report? "
               "[Just press Enter to skip]")
        info = raw_input("> ")
        cprint("Do you want to add your GitHub username to the report? "
               "[Press Enter to skip]")
        user = raw_input("Reported by @")
        if self.Error.AddGithubIssue(Info=info, User=user):
            cprint("Github issue added, Thanks for reporting!!")
        else:
            cprint("Unable to add github issue, but thanks for trying :D")

    def Finish(self, status='Complete', report=True):
        if getattr(self, "TOR_process", None) is not None:
            self.TOR_process.terminate()
        # TODO: Fix this for lions_2014
        # if self.DB.Config.Get('SIMULATION'):
        #    exit()
        try:
            self.KillChildProcesses(multiprocessing.current_process().pid)
        except:
            pass
        try:
            self.PluginHandler.CleanUp()
        except AttributeError:  # DB not instantiated yet!
            pass
        finally:
            if getattr(self, "ProxyMode", None) is not None:
                try:
                    cprint("Stopping inbound proxy processes and "
                           "cleaning up, Please wait!")
                    self.KillChildProcesses(self.ProxyProcess.pid)
                    self.ProxyProcess.terminate()
                    # No signal is generated during closing process by
                    # terminate()
                    self.TransactionLogger.poison_q.put('done')
                    self.TransactionLogger.join()
                except:  # It means the proxy was not started.
                    pass
            exit()

    def IsIPInternal(self, IP):
        return len(self.IsIPInternalRegexp.findall(IP)) == 1

    def KillChildProcesses(self, parent_pid, sig=signal.SIGINT):
        ps_command = subprocess.Popen("ps -o pid --ppid %d --noheaders" %
                                      parent_pid,
                                      shell=True,
                                      stdout=subprocess.PIPE)
        ps_output = ps_command.stdout.read()
        for pid_str in ps_output.split("\n")[:-1]:
            self.KillChildProcesses(int(pid_str), sig)
            try:
                os.kill(int(pid_str), sig)
            except:
                cprint("unable to kill it")

    def decorate_io(self):
        """Decorate different I/O functions to ensure OWTF to properly quit."""
        def catch_error(func):
            """Decorator on I/O functions.

            If an error is detected, force OWTF to quit properly.

            """
            def io_error(*args, **kwargs):
                """Call the original function while checking for errors.

                If `owtf_clean` parameter is not explicitely passed or if it is
                set to `True`, it force OWTF to properly exit.

                """
                owtf_clean = kwargs.pop('owtf_clean', True)
                try:
                    return func(*args, **kwargs)
                except (OSError, IOError) as e:
                    if owtf_clean:
                        self.Error.FrameworkAbort(
                            "Error when calling '%s'! %s." %
                            (func.__name__, str(e)))
                    raise e

            return io_error

        # Decorated functions
        self.open = catch_error(open)
        self.codecs_open = catch_error(codecs.open)
        self.mkdir = catch_error(os.mkdir)
        self.makedirs = catch_error(os.makedirs)
        self.rmtree = catch_error(shutil.rmtree)
        self.FileHandler = catch_error(logging.FileHandler)