示例#1
0
文件: sqliv.py 项目: icysun/sqliv
def singlescan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        if scanner.scan([url]) != []:
            # scanner.scan print if vulnerable
            # therefore exit
            exit(0)

        else:
            print ""  # move carriage return to newline
            io.stdout("no SQL injection vulnerability found")
            option = io.stdin("do you want to crawl and continue scanning? [Y/N]", ["Y", "N"], upper=True)

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    io.stdout("crawling {}".format(url))
    urls = crawler.crawl(url)

    if not urls:
        io.stdout("found no suitable urls to test SQLi")
        #io.stdout("you might want to do reverse domain")
        return False

    io.stdout("found {} urls from crawling".format(len(urls)))
    vulnerables = scanner.scan(urls)

    if vulnerables == []:
        io.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
示例#2
0
def showDomainInfo(urls):
    """return array of urls with server info"""

    io.stdout("getting server info of domains can take a few mins")
    domains_info = []

    for each in urls:
        server_info = serverinfo.check(each)
        domains_info.append([each, server_info[0], server_info[1]])

    # print in table
    io.printServerInfo(domains_info)
示例#3
0
def massiveScan(websites):
    """scan multiple websites / urls"""

    # scan each website one by one
    vulnerables = []
    for website in websites:
        io.stdout("scanning {}".format(website), end="")
        if scanner.scan(website):
            io.showsign(" vulnerable")
            vulnerables.append(website)
            continue

        print ""  # move carriage return to newline

    if vulnerables:
        return vulnerables

    io.stdout("no vulnerable websites found")
    return False
示例#4
0
                        help="scan target website",
                        type=str,
                        metavar="www.example.com")
    parser.add_argument('-r',
                        dest="reverse",
                        help="reverse domain",
                        action='store_true')


if __name__ == "__main__":
    initParser()
    args = parser.parse_args()

    # find random SQLi by dork
    if args.dork != None and args.engine != None:
        io.stdout("searching for websites with given dork")

        # get websites based on search engine
        if args.engine in ["bing", "google", "yahoo"]:
            websites = eval(args.engine).search(args.dork, args.page)
        else:
            io.stderr("invalid search engine")
            exit(1)

        io.stdout("{} websites found".format(len(websites)))

        vulnerables = scanner.scan(websites)

        if not vulnerables:
            io.stdout(
                "you can still scan those websites by crawling or reverse domain."
示例#5
0
def singleScan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        io.stdout("scanning {}".format(url), end="")

        if scanner.scan(url):
            io.showsign(" vulnerable")
            exit(0)

        else:
            print ""  # move carriage return to newline
            io.stdout("no SQL injection vulnerability found")

            option = io.stdin(
                "do you want to crawl and continue scanning? [Y/N]").upper()
            while option != 'Y' and option != 'N':
                option = io.stdin(
                    "do you want to crawl and continue scanning? [Y/N]").upper(
                    )

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    io.stdout("crawling {}".format(url))
    websites = crawler.crawl(url)
    if not websites:
        io.stdout("found no suitable urls to test SQLi")
        #io.stdout("you might want to do reverse domain")
        return False

    io.stdout("found {} urls from crawling".format(len(websites)))
    vulnerables = massiveScan(websites)

    if vulnerables == []:
        io.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables