def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() urlconfig.url = raw_input('Input url > ') urlconfig.url = makeurl(urlconfig.url) urlconfig.scanport = False input_scanport = raw_input('Need scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( 'You need start number of thread(Recommendation number is 5) > ') urlconfig.threadNum = int(urlconfig.threadNum) e = Exploit_run(urlconfig.threadNum) print '[***] ScanStart Target:%s' % urlconfig.url e.load_modules("www", urlconfig.url) e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) s.craw() logger.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: print "[xxx] MainError", info exit()
def webScan(): startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for url in urlconfig.url: logger.info('ScanStart Target:%s' % url) e.setCurrentUrl(url) e.load_modules("www", url) e.run() if not urlconfig.mutiurl: e.init_spider() s = crawler.SpiderMain(url) s.craw() time.sleep(0.1) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report()
def pluginScan(): if not urlconfig.usePlugin: return False urlconfig.scanport = False urlconfig.find_service = False urlconfig.diyPlugin = LIST_PLUGINS startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for u in urlconfig.url: logger.info('ScanStart Target:%s' % u) e.setCurrentUrl(u) e.load_modules(urlconfig.plugin, u) e.run() time.sleep(0.01) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() sys.exit()
def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() Test_Url = raw_input('Input url > ') Test_Url = Test_Url.strip() #Test_Url = "https://blog.hacking8.com/" e = Exploit_run(Test_Url) print '[***] ScanStart Target:%s' % Test_Url e.load_modules("www", Test_Url) logger.report() except KeyboardInterrupt: logger.critical("[***] UserInterrupt") exit()
def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() Test_Url = raw_input('Input url > ') Test_Url = Test_Url.strip() #Test_Url = "https://blog.hacking8.com/" e = Exploit_run(Test_Url) print '[***] ScanStart Target:%s' % Test_Url e.load_modules("www",Test_Url) logger.report() except KeyboardInterrupt: logger.critical("[***] UserInterrupt") exit()
def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() urlconfig.url = raw_input('Input url > ') urlconfig.url = makeurl(urlconfig.url) urlconfig.scanport = False input_scanport = raw_input('Need scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( 'You need start number of thread(Recommendation number is 5) > ') urlconfig.threadNum = int(urlconfig.threadNum) startTime = time.clock() e = Exploit_run(urlconfig.threadNum) print '[***] ScanStart Target:%s' % urlconfig.url e.load_modules("www", urlconfig.url) e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) time.sleep(0.5) s.craw() endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: print "[xxx] MainError:", Exception, " :", info errinfo = Get_lineNumber_fileName() data = e.buildHtml.getData() aax = "error:%s urlconfig:%s date:%s" % (errinfo, str(urlconfig), data) createIssueForBlog(aax) exit()
def pluginScan(): if not urlconfig.mutiurl: return False urlconfig.scanport = False urlconfig.find_service = False urlconfig.threadNum = 5 urlconfig.deepMax = 100 urlconfig.diyPlugin = LIST_PLUGINS startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for u in urlconfig.url: printMessage('[***] ScanStart Target:%s' % u) e.setCurrentUrl(u) e.load_modules(urlconfig.plugin,u) e.run() time.sleep(0.01) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() sys.exit()
def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() # url config urlconfig.url = raw_input('Input url > ') urlconfig.url = urlconfig.url.strip() urlconfig.scanport = False input_scanport = raw_input('Need scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True e = Exploit_run() print '[***] ScanStart Target:%s' % urlconfig.url e.load_modules("www", urlconfig.url) logger.report() except KeyboardInterrupt: logger.critical("[***] UserInterrupt") exit()
def main(): """ Main function of w9scan when running from command line. """ checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() parser = argparse.ArgumentParser(description="w9scan scanner") parser.add_argument("--update", help="update w9scan", action="store_true") parser.add_argument("--guide", help="w9scan to guide", action="store_true") parser.add_argument("-u", help="url") parser.add_argument("-p", "--plugin", help="plugins") parser.add_argument("-s", "--search", help="find infomation of plugin") args = parser.parse_args() urlconfig.mutiurl = False urlconfig.url = [] if args.update: updateProgram() return 0 if args.search: print(getPluginNum(args.search)) return 0 if args.u and args.plugin: url = args.u if url.startswith("@"): urlconfig.mutiurl = True fileName = url[1:] try: o = open(fileName, "r").readlines() for u in o: urlconfig.url.append(makeurl(u.strip())) except IOError as error: logger.critical("Filename:'%s' open faild" % fileName) exit() if len(o) == 0: logger.critical("[xxx] The target address is empty") exit() print urlconfig.url else: urlconfig.url.append(makeurl(url)) urlconfig.scanport = False urlconfig.find_service = False urlconfig.threadNum = 5 urlconfig.deepMax = 100 urlconfig.diyPlugin = LIST_PLUGINS startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for u in urlconfig.url: print('[***] ScanStart Target:%s' % u) e.setCurrentUrl(u) e.load_modules(args.plugin, u) e.run() time.sleep(0.01) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() return 0 try: inputUrl = raw_input('[1] Input url > ') if inputUrl is '': logger.critical("[xxx] You have to enter the url") exit() if inputUrl.startswith("@"): urlconfig.mutiurl = True fileName = inputUrl[1:] try: o = open(fileName, "r").readlines() for url in o: urlconfig.url.append(makeurl(url.strip())) except IOError as error: logger.critical("Filename:'%s' open faild" % fileName) exit() if len(o) == 0: logger.critical("[xxx] The target address is empty") exit() print urlconfig.url else: urlconfig.url.append(makeurl(inputUrl)) print '[***] URL has been loaded:%d' % len(urlconfig.url) print("[Tips] You can select these plugins (%s) or select all" % (' '.join(LIST_PLUGINS))) diyPlugin = raw_input("[2] Please select the required plugins > ") if diyPlugin.lower() == 'all': urlconfig.diyPlugin = LIST_PLUGINS else: urlconfig.diyPlugin = diyPlugin.strip().split(' ') print "[***] You select the plugins:%s" % (' '.join( urlconfig.diyPlugin)) urlconfig.scanport = False urlconfig.find_service = False if 'find_service' in urlconfig.diyPlugin: urlconfig.find_service = True input_scanport = raw_input( '[2.1] Need you scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( '[3] You need start number of thread (default 5) > ') if urlconfig.threadNum == '': urlconfig.threadNum = 5 urlconfig.threadNum = int(urlconfig.threadNum) urlconfig.deepMax = raw_input( '[4] Set the depth of the crawler (default 200 | 0 don\'t use crawler ) > ' ) if urlconfig.deepMax == '': urlconfig.deepMax = 100 startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for url in urlconfig.url: print('[***] ScanStart Target:%s' % url) e.setCurrentUrl(url) e.load_modules("www", url) e.run() if not urlconfig.mutiurl: e.init_spider() s = crawler.SpiderMain(url) s.craw() time.sleep(0.01) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() except KeyboardInterrupt: print("[***] User Interrupt") exit() except Exception as info: logger.critical("[xxx] MainError: %s:%s" % (str(Exception), info)) data = e.buildHtml.getData() aax = "error:%s urlconfig:%s date:%s" % ( str(Exception) + " " + str(info), str(urlconfig), data) createIssueForBlog(aax) exit()
def main(): """ Main function of w9scan when running from command line. """ checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() parser = argparse.ArgumentParser(description="w9scan scanner") parser.add_argument("--update", help="update w9scan", action="store_true") parser.add_argument("--guide", help="w9scan to guide", action="store_true") args = parser.parse_args() if args.update: updateProgram() return 0 try: urlconfig.url = raw_input('[1] Input url > ') if urlconfig.url is '': logger.critical("[xxx] You have to enter the url") exit() urlconfig.url = makeurl(urlconfig.url) print '[***] ScanStart Target:%s' % urlconfig.url print("[Tips] You can select these plugins (%s) or select all" % (' '.join(LIST_PLUGINS))) diyPlugin = raw_input("[2] Please select the required plugins > ") if diyPlugin.lower() == 'all': urlconfig.diyPlugin = LIST_PLUGINS else: urlconfig.diyPlugin = diyPlugin.strip().split(' ') print "[***] You select the plugins:%s" % (' '.join( urlconfig.diyPlugin)) urlconfig.scanport = False if 'find_service' in urlconfig.diyPlugin: input_scanport = raw_input( '[2.1] Need you scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( '[3] You need start number of thread (default 5) > ') if urlconfig.threadNum == '': urlconfig.threadNum = 5 urlconfig.threadNum = int(urlconfig.threadNum) urlconfig.deepMax = raw_input( '[4] Set the depth of the crawler (default 200 | 0 don\'t use crawler ) > ' ) if urlconfig.deepMax == '': urlconfig.deepMax = 200 startTime = time.clock() e = Exploit_run(urlconfig.threadNum) e.load_modules("www", urlconfig.url) e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) time.sleep(0.5) s.craw() endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: logger.critical("[xxx] MainError: %s:%s" % (str(Exception), info)) data = e.buildHtml.getData() aax = "error:%s urlconfig:%s date:%s" % ( str(Exception) + " " + str(info), str(urlconfig), data) createIssueForBlog(aax) exit()