def guideRegister(args): #向导模式 #有url 没选插件参数时 if args.u and not args.plugin: inputUrl = args.u urlconfig.url.append(makeurl(inputUrl)) printMessage('[Prompt] URL has been loaded:%d' % len(urlconfig.url)) urlconfig.diyPlugin = ["find_service", "whatcms"] #插件选择 printMessage("[Prompt] You select the plugins:%s" % (' '.join(urlconfig.diyPlugin))) urlconfig.scanport = False #端口扫描默认关闭 urlconfig.find_service = True #服务信息扫描默认开启 return True #有url 有选插件参数时 if args.u and args.plugin: return False #无url 和插件参数时 inputUrl = input('[1] Input url > ') if inputUrl == '': raise ToolkitSystemException("You have to enter the url") #输入为文件时: if inputUrl.startswith("@"): urlconfig.mutiurl = True filename = inputUrl[1:] try: o = open(filename, "r").readlines() #一行行读取 for url in o: urlconfig.url.append(makeurl(url.strip())) except IOError: raise ToolkitSystemException("Filename:'%s' open faild" % fileName) if len(o) == 0: raise ToolkitSystemException("The target address is empty") else: urlconfig.url.append(makeurl(inputUrl)) printMessage('[Prompt] URL has been loaded:%d' % len(urlconfig.url)) printMessage("[Prompt] You can select these plugins (%s) or select all" % (' '.join(LIST_PLUGINS))) diyPlugin = input("[2] Please select the required plugins > ") if diyPlugin.lower() == 'all': urlconfig.diyPlugin = LIST_PLUGINS #sessting里的设置插件 else: urlconfig.diyPlugin = diyPlugin.strip().split(' ') urlconfig.scanport = False urlconfig.find_service = False #是否开启端口和服务器信息扫描 if 'find_service' in urlconfig.diyPlugin: urlconfig.find_service = True input_scanport = input( '[2.1] Need you scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True
def set_running_options(args): running_config.multiurl = False running_config.urls = [] running_config.custom_plugin = False running_config.plugins = [] # 指定单个url if args.url: running_config.urls.append(makeurl(args.url)) # 指定批量导入url if args.file: running_config.multiurl = True checkFile(args.file) with open(args.file, 'r') as fd: contents = fd.readlines() for _ in contents: running_config.urls.append(makeurl(_.strip())) print(running_config.urls) # 查找插件 if args.search: search_plugin(args.search) # 注册用户指定插件 if args.plugins: # print(args.plugins) search_plugin(args.plugins) running_config.custom_plugin = True register_plugins(args.plugins) elif args.graphic: running_config.custom_plugin = True else: plugins = os.listdir(vulnscan_paths['vulnscan_plugins_path']) try: plugins.remove('__init__.py') plugins.remove('__pycache__') except: pass register_plugins(plugins) # print(running_config.plugins) # 自定义线程数 running_config.threads = vulnscan_config.threads if running_config.threads is None: running_config.threads = 10 running_config.threads = int(running_config.threads) # 自定义超时时间 running_config.timeout = vulnscan_config.TimeOut if running_config.timeout is None: running_config.timeout = 10 running_config.timeout = int(running_config.timeout)
def guideRegister(args): if args.plugin and args.u: return False inputUrl = raw_input('[1] Input url > ') if inputUrl is '': raise ToolkitSystemException("You have to enter the url") if inputUrl.startswith("@"): urlconfig.mutiurl = True fileName = inputUrl[1:] try: o = open(fileName, "r").readlines() for url in o: urlconfig.url.append(makeurl(url.strip())) except IOError: raise ToolkitSystemException("Filename:'%s' open faild" % fileName) if len(o) == 0: raise ToolkitSystemException("The target address is empty") else: urlconfig.url.append(makeurl(inputUrl)) printMessage('[Prompt] URL has been loaded:%d' % len(urlconfig.url)) printMessage("[Prompt] You can select these plugins (%s) or select all" % (' '.join(LIST_PLUGINS))) diyPlugin = raw_input("[2] Please select the required plugins > ") if diyPlugin.lower() == 'all': urlconfig.diyPlugin = LIST_PLUGINS else: urlconfig.diyPlugin = diyPlugin.strip().split(' ') printMessage("[Prompt] You select the plugins:%s" % (' '.join(urlconfig.diyPlugin))) urlconfig.scanport = False urlconfig.find_service = False if 'find_service' in urlconfig.diyPlugin: urlconfig.find_service = True input_scanport = raw_input( '[2.1] Need you scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( '[3] You need start number of thread (default 5) > ') if urlconfig.threadNum == '': urlconfig.threadNum = 5 urlconfig.threadNum = int(urlconfig.threadNum) urlconfig.deepMax = raw_input( '[4] Set the depth of the crawler (default 100 | 0 don\'t use crawler ) > ' ) if urlconfig.deepMax == '': urlconfig.deepMax = 100
def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() urlconfig.url = raw_input('Input url > ') urlconfig.url = makeurl(urlconfig.url) urlconfig.scanport = False input_scanport = raw_input('Need scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( 'You need start number of thread(Recommendation number is 5) > ') urlconfig.threadNum = int(urlconfig.threadNum) e = Exploit_run(urlconfig.threadNum) print '[***] ScanStart Target:%s' % urlconfig.url e.load_modules("www", urlconfig.url) e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) s.craw() logger.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: print "[xxx] MainError", info exit()
def pluginScanRegister(args): if args.u and args.plugin: url = args.u urlconfig.mutiurl = True urlconfig.plugin = args.plugin if url.startswith("@"): fileName = url[1:] try: o = open(fileName,"r").readlines() for u in o: urlconfig.url.append(makeurl(u.strip())) except IOError: raise ToolkitMissingPrivileges("Filename:'%s' open faild"%fileName) if len(o) == 0: raise ToolkitMissingPrivileges("The target address is empty") printMessage(urlconfig.url) else: urlconfig.url.append(makeurl(url))
def urlRegister(args): #url处理 url = args.u urlconfig.usePlugin = True urlconfig.plugin = args.plugin urlconfig.diyPlugin = [urlconfig.plugin] if url.startswith("@"): #判断是否是文件 urlconfig.mutiurl = True filename = url[1:] #取文件名 try: o = open(filename, "r").readlines() for u in o: u = makeurl(u.strip()) #处理url urlconfig.url.append(u) printMessage(u) #获取url的信息 except IOError: raise ToolkitMissingPrivileges("Filename:'%s' open faild" % fileName) if len(o) == 0: raise ToolkitMissingPrivileges("The target address is empty") else: #单个url处理 urlconfig.url.append(makeurl(url))
def main(): """ Main function of w9scan when running from command line. """ try: checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() urlconfig.url = raw_input('Input url > ') urlconfig.url = makeurl(urlconfig.url) urlconfig.scanport = False input_scanport = raw_input('Need scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( 'You need start number of thread(Recommendation number is 5) > ') urlconfig.threadNum = int(urlconfig.threadNum) startTime = time.clock() e = Exploit_run(urlconfig.threadNum) print '[***] ScanStart Target:%s' % urlconfig.url e.load_modules("www", urlconfig.url) e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) time.sleep(0.5) s.craw() endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: print "[xxx] MainError:", Exception, " :", info errinfo = Get_lineNumber_fileName() data = e.buildHtml.getData() aax = "error:%s urlconfig:%s date:%s" % (errinfo, str(urlconfig), data) createIssueForBlog(aax) exit()
def on_importPushButton_clicked(self): if self.scan_thread.isRunning(): for t in threading.enumerate(): print(t) self.showdialog('警告', f'正在扫描') return url = self.urlLineEdit.text() if not url: self.showdialog('警告', 'URL不能为空') return row = self.urlsTableWidget.rowCount() self.urlsTableWidget.insertRow(row) item = QTableWidgetItem('%s' % (row + 1)) self.urlsTableWidget.setItem(row, 0, item) item = QTableWidgetItem('%s' % makeurl(url)) self.urlsTableWidget.setItem(row, 1, item)
def on_importFromFilePushButton_clicked(self): if self.scan_thread.isRunning(): self.showdialog('警告', '正在扫描') return filename, filetype = QFileDialog.getOpenFileName( self, "choose file", "", "*.txt") if not filename: return running_config.multiurl = True checkFile(filename) with open(filename, 'r') as fd: contents = fd.readlines() urls_count = len(contents) offset = self.urlsTableWidget.rowCount() for row in range(urls_count): self.urlsTableWidget.insertRow(offset + row) item = QTableWidgetItem('%s' % (offset + row + 1)) self.urlsTableWidget.setItem(offset + row, 0, item) item = QTableWidgetItem('%s' % makeurl(contents[row].strip())) self.urlsTableWidget.setItem(offset + row, 1, item)
def main(): """ Main function of w9scan when running from command line. """ checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() parser = argparse.ArgumentParser(description="w9scan scanner") parser.add_argument("--update", help="update w9scan", action="store_true") parser.add_argument("--guide", help="w9scan to guide", action="store_true") parser.add_argument("-u", help="url") parser.add_argument("-p", "--plugin", help="plugins") parser.add_argument("-s", "--search", help="find infomation of plugin") args = parser.parse_args() urlconfig.mutiurl = False urlconfig.url = [] if args.update: updateProgram() return 0 if args.search: print(getPluginNum(args.search)) return 0 if args.u and args.plugin: url = args.u if url.startswith("@"): urlconfig.mutiurl = True fileName = url[1:] try: o = open(fileName, "r").readlines() for u in o: urlconfig.url.append(makeurl(u.strip())) except IOError as error: logger.critical("Filename:'%s' open faild" % fileName) exit() if len(o) == 0: logger.critical("[xxx] The target address is empty") exit() print urlconfig.url else: urlconfig.url.append(makeurl(url)) urlconfig.scanport = False urlconfig.find_service = False urlconfig.threadNum = 5 urlconfig.deepMax = 100 urlconfig.diyPlugin = LIST_PLUGINS startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for u in urlconfig.url: print('[***] ScanStart Target:%s' % u) e.setCurrentUrl(u) e.load_modules(args.plugin, u) e.run() time.sleep(0.01) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() return 0 try: inputUrl = raw_input('[1] Input url > ') if inputUrl is '': logger.critical("[xxx] You have to enter the url") exit() if inputUrl.startswith("@"): urlconfig.mutiurl = True fileName = inputUrl[1:] try: o = open(fileName, "r").readlines() for url in o: urlconfig.url.append(makeurl(url.strip())) except IOError as error: logger.critical("Filename:'%s' open faild" % fileName) exit() if len(o) == 0: logger.critical("[xxx] The target address is empty") exit() print urlconfig.url else: urlconfig.url.append(makeurl(inputUrl)) print '[***] URL has been loaded:%d' % len(urlconfig.url) print("[Tips] You can select these plugins (%s) or select all" % (' '.join(LIST_PLUGINS))) diyPlugin = raw_input("[2] Please select the required plugins > ") if diyPlugin.lower() == 'all': urlconfig.diyPlugin = LIST_PLUGINS else: urlconfig.diyPlugin = diyPlugin.strip().split(' ') print "[***] You select the plugins:%s" % (' '.join( urlconfig.diyPlugin)) urlconfig.scanport = False urlconfig.find_service = False if 'find_service' in urlconfig.diyPlugin: urlconfig.find_service = True input_scanport = raw_input( '[2.1] Need you scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( '[3] You need start number of thread (default 5) > ') if urlconfig.threadNum == '': urlconfig.threadNum = 5 urlconfig.threadNum = int(urlconfig.threadNum) urlconfig.deepMax = raw_input( '[4] Set the depth of the crawler (default 200 | 0 don\'t use crawler ) > ' ) if urlconfig.deepMax == '': urlconfig.deepMax = 100 startTime = time.clock() e = Exploit_run(urlconfig.threadNum) for url in urlconfig.url: print('[***] ScanStart Target:%s' % url) e.setCurrentUrl(url) e.load_modules("www", url) e.run() if not urlconfig.mutiurl: e.init_spider() s = crawler.SpiderMain(url) s.craw() time.sleep(0.01) endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() except KeyboardInterrupt: print("[***] User Interrupt") exit() except Exception as info: logger.critical("[xxx] MainError: %s:%s" % (str(Exception), info)) data = e.buildHtml.getData() aax = "error:%s urlconfig:%s date:%s" % ( str(Exception) + " " + str(info), str(urlconfig), data) createIssueForBlog(aax) exit()
def main(): """ Main function of w9scan when running from command line. """ checkEnvironment() # 检测环境 setPaths(modulePath()) # 为一些目录和文件设置了绝对路径 banner() parser = argparse.ArgumentParser(description="w9scan scanner") parser.add_argument("--update", help="update w9scan", action="store_true") parser.add_argument("--guide", help="w9scan to guide", action="store_true") args = parser.parse_args() if args.update: updateProgram() return 0 try: urlconfig.url = raw_input('[1] Input url > ') if urlconfig.url is '': logger.critical("[xxx] You have to enter the url") exit() urlconfig.url = makeurl(urlconfig.url) print '[***] ScanStart Target:%s' % urlconfig.url print("[Tips] You can select these plugins (%s) or select all" % (' '.join(LIST_PLUGINS))) diyPlugin = raw_input("[2] Please select the required plugins > ") if diyPlugin.lower() == 'all': urlconfig.diyPlugin = LIST_PLUGINS else: urlconfig.diyPlugin = diyPlugin.strip().split(' ') print "[***] You select the plugins:%s" % (' '.join( urlconfig.diyPlugin)) urlconfig.scanport = False if 'find_service' in urlconfig.diyPlugin: input_scanport = raw_input( '[2.1] Need you scan all ports ?(Y/N) (default N)> ') if input_scanport.lower() in ("y", "yes"): urlconfig.scanport = True urlconfig.threadNum = raw_input( '[3] You need start number of thread (default 5) > ') if urlconfig.threadNum == '': urlconfig.threadNum = 5 urlconfig.threadNum = int(urlconfig.threadNum) urlconfig.deepMax = raw_input( '[4] Set the depth of the crawler (default 200 | 0 don\'t use crawler ) > ' ) if urlconfig.deepMax == '': urlconfig.deepMax = 200 startTime = time.clock() e = Exploit_run(urlconfig.threadNum) e.load_modules("www", urlconfig.url) e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) time.sleep(0.5) s.craw() endTime = time.clock() urlconfig.runningTime = endTime - startTime e.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: logger.critical("[xxx] MainError: %s:%s" % (str(Exception), info)) data = e.buildHtml.getData() aax = "error:%s urlconfig:%s date:%s" % ( str(Exception) + " " + str(info), str(urlconfig), data) createIssueForBlog(aax) exit()