std.stdout("scanning server information") vulnerableurls = [result[0] for result in vulnerables] table_data = serverinfo.check(vulnerableurls) # add db name to info for result, info in zip(vulnerables, table_data): info.insert(1, result[1]) # database name std.fullprint(table_data) # do reverse domain of given site elif args.target != None and args.reverse: std.stdout("finding domains with same server as {}".format( args.target)) domains = reverseip.reverseip(args.target) if domains == []: std.stdout("no domain found with reversing ip") exit(0) # if there are domains std.stdout("found {} websites".format(len(domains))) # ask whether user wants to save domains std.stdout("scanning multiple websites with crawling will take long") option = std.stdin("do you want save domains? [Y/N]", ["Y", "N"], upper=True) if option == 'Y': std.stdout("saved as domains.txt")
std.stdout("scanning server information") vulnerableurls = [result[0] for result in vulnerables] table_data = serverinfo.check(vulnerableurls) # add db name to info for result, info in zip(vulnerables, table_data): info.insert(1, result[1]) # database name std.fullprint(table_data) # do reverse domain of given site elif args.target != None and args.reverse: std.stdout("finding domains with same server as {}".format(args.target)) domains = reverseip.reverseip(args.target) if domains == []: std.stdout("no domain found with reversing ip") exit(0) # if there are domains std.stdout("found {} websites".format(len(domains))) # ask whether user wants to save domains std.stdout("scanning multiple websites with crawling will take long") option = std.stdin("do you want save domains? [Y/N]", ["Y", "N"], upper=True) if option == 'Y': std.stdout("saved as domains.txt") std.dump(domains, "domains.txt")
def run(self, argsttt): self.initparser() args = parser.parse_args() args.target = argsttt.target args.output = argsttt.output # find random SQLi by dork if args.dork != None and args.engine != None: std.stdout("searching for websites with given dork") # get websites based on search engine if args.engine in ["bing", "google", "yahoo"]: websites = eval(args.engine).search(args.dork, args.page) else: std.stderr("invalid search engine") exit(1) std.stdout("{} websites found".format(len(websites))) vulnerables = scanner.scan(websites) if not vulnerables: std.stdout( "you can still scan those websites by crawling or reverse domain." ) option = std.stdin("do you want save search result? [Y/N]", ["Y", "N"], upper=True) if option == 'Y': std.stdout("saved as searches.txt") std.dump(websites, "searches.txt") exit(0) std.stdout("scanning server information") vulnerableurls = [result[0] for result in vulnerables] table_data = serverinfo.check(vulnerableurls) # add db name to info for result, info in zip(vulnerables, table_data): info.insert(1, result[1]) # database name std.fullprint(table_data) # do reverse domain of given site elif args.target != None and args.reverse: std.stdout("finding domains with same server as {}".format( args.target)) domains = reverseip.reverseip(args.target) if domains == []: std.stdout("no domain found with reversing ip") exit(0) # if there are domains std.stdout("found {} websites".format(len(domains))) # ask whether user wants to save domains std.stdout( "scanning multiple websites with crawling will take long") option = std.stdin("do you want save domains? [Y/N]", ["Y", "N"], upper=True) if option == 'Y': std.stdout("saved as domains.txt") std.dump(domains, "domains.txt") # ask whether user wants to crawl one by one or exit option = std.stdin("do you want start crawling? [Y/N]", ["Y", "N"], upper=True) if option == 'N': exit(0) vulnerables = [] for domain in domains: vulnerables_temp = self.singlescan(domain) if vulnerables_temp: vulnerables += vulnerables_temp std.stdout("finished scanning all reverse domains") if vulnerables == []: std.stdout("no vulnerables webistes from reverse domains") exit(0) std.stdout("scanning server information") vulnerableurls = [result[0] for result in vulnerables] table_data = serverinfo.check(vulnerableurls) # add db name to info for result, info in zip(vulnerables, table_data): info.insert(1, result[1]) # database name std.fullprint(table_data) # scan SQLi of given site elif args.target: vulnerables = self.singlescan(args.target) if not vulnerables: exit(0) # show domain information of target urls std.stdout("getting server info of domains can take a few mins") table_data = serverinfo.check([args.target]) std.printserverinfo(table_data) print "" # give space between two table std.normalprint(vulnerables) # print help message, if no parameter is provided else: parser.print_help() # dump result into json if specified if args.output != None: #print vulnerables re = list() fo = open("Sqliv_scan.txt", "w") new_vul = {} for i in vulnerables: new_vul["url"] = i[0] new_vul["ways"] = i[1] re.append(new_vul) fo.write(str(re)) return jsonify(re)