def singlescan(url): """instance to scan single targeted domain""" if urlparse(url).query != '': if scanner.scan([url]) != []: # scanner.scan print if vulnerable # therefore exit exit(0) else: print "" # move carriage return to newline io.stdout("no SQL injection vulnerability found") option = io.stdin("do you want to crawl and continue scanning? [Y/N]", ["Y", "N"], upper=True) if option == 'N': return False # crawl and scan the links # if crawl cannot find links, do some reverse domain io.stdout("crawling {}".format(url)) urls = crawler.crawl(url) if not urls: io.stdout("found no suitable urls to test SQLi") #io.stdout("you might want to do reverse domain") return False io.stdout("found {} urls from crawling".format(len(urls))) vulnerables = scanner.scan(urls) if vulnerables == []: io.stdout("no SQL injection vulnerability found") return False return vulnerables
def singleScan(url): """instance to scan single targeted domain""" if urlparse(url).query != '': io.stdout("scanning {}".format(url), end="") if scanner.scan(url): io.showsign(" vulnerable") exit(0) else: print "" # move carriage return to newline io.stdout("no SQL injection vulnerability found") option = io.stdin( "do you want to crawl and continue scanning? [Y/N]").upper() while option != 'Y' and option != 'N': option = io.stdin( "do you want to crawl and continue scanning? [Y/N]").upper( ) if option == 'N': return False # crawl and scan the links # if crawl cannot find links, do some reverse domain io.stdout("crawling {}".format(url)) websites = crawler.crawl(url) if not websites: io.stdout("found no suitable urls to test SQLi") #io.stdout("you might want to do reverse domain") return False io.stdout("found {} urls from crawling".format(len(websites))) vulnerables = massiveScan(websites) if vulnerables == []: io.stdout("no SQL injection vulnerability found") return False return vulnerables
if args.engine in ["bing", "google", "yahoo"]: websites = eval(args.engine).search(args.dork, args.page) else: io.stderr("invalid search engine") exit(1) io.stdout("{} websites found".format(len(websites))) vulnerables = scanner.scan(websites) if not vulnerables: io.stdout( "you can still scan those websites by crawling or reverse domain." ) option = io.stdin("do you want save search result? [Y/N]", ["Y", "N"], upper=True) if option == 'Y': io.stdout("saved as searches.txt") io.dump(websites, "searches.txt") exit(0) io.stdout("scanning server information") table_data = serverinfo.check(vulnerables) io.printVulnerablesWithInfo(table_data) # do reverse domain of given site elif args.target != None and args.reverse: io.stdout("finding domains with same server as {}".format(args.target))
websites = [ "http://www.sallatykka.com/web/index.php?id=31", "http://www.icdcprague.org/index.php?id=10", "http://www.tadspec.com/index.php?id=15", "http://www.redseahotels.com/index.php?id=289" ] io.stdout("{} websites found".format(len(websites))) vulnerables = massiveScan(websites) if not vulnerables: io.stdout( "you can still scan those websites by crawling or reverse domain." ) option = io.stdin("do you want save search result? [Y/N]").upper() while option != 'Y' and option != 'N': option = io.stdin( "do you want save search result? [Y/N]").upper() if option == 'Y': io.stdout("saved as searches.txt") io.dump(websites, "searches.txt") exit(0) io.stdout("vulnerable websites") table_data = [] for each in vulnerables: server_info = serverinfo.check(each)