Ejemplo n.º 1
0
def singlescan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        if scanner.scan([url]) != []:
            # scanner.scan print if vulnerable
            # therefore exit
            exit(0)

        else:
            print ""  # move carriage return to newline
            io.stdout("no SQL injection vulnerability found")
            option = io.stdin("do you want to crawl and continue scanning? [Y/N]", ["Y", "N"], upper=True)

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    io.stdout("crawling {}".format(url))
    urls = crawler.crawl(url)

    if not urls:
        io.stdout("found no suitable urls to test SQLi")
        #io.stdout("you might want to do reverse domain")
        return False

    io.stdout("found {} urls from crawling".format(len(urls)))
    vulnerables = scanner.scan(urls)

    if vulnerables == []:
        io.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
Ejemplo n.º 2
0
def test_scanner_throws_error_on_bad_integer():
    tests = ("5abc", "4_", "59595a4949")
    for test in tests:
        try:
            scan(test)
        except ValueError:
            continue
        else:
            assert False, test + " must throw a ValueError"
Ejemplo n.º 3
0
def test_scanner_throws_error_on_invalid_char():
    tests = ("£", "$", "@")
    for test in tests:
        try:
            scan(test)
        except ValueError:
            continue
        else:
            assert False, test + " must throw a ValueError"
Ejemplo n.º 4
0
def test_scanner_on_lists():
    tests = (
        ("()", [(TokenType.LEFT_BRACKET, "("),
                (TokenType.RIGHT_BRACKET, ")")]),
        ("(", [(TokenType.LEFT_BRACKET, "(")]),
        (")", [(TokenType.RIGHT_BRACKET, ")")]),
        (
            "()(())",
            [
                (TokenType.LEFT_BRACKET, "("),
                (TokenType.RIGHT_BRACKET, ")"),
                (TokenType.LEFT_BRACKET, "("),
                (TokenType.LEFT_BRACKET, "("),
                (TokenType.RIGHT_BRACKET, ")"),
                (TokenType.RIGHT_BRACKET, ")"),
            ],
        ),
        (
            "(abc(123)ghi)",
            [
                (TokenType.LEFT_BRACKET, "("),
                (TokenType.SYMBOL, "abc"),
                (TokenType.LEFT_BRACKET, "("),
                (TokenType.INTEGER, "123"),
                (TokenType.RIGHT_BRACKET, ")"),
                (TokenType.SYMBOL, "ghi"),
                (TokenType.RIGHT_BRACKET, ")"),
            ],
        ),
    )
    for given, expected in tests:
        assert scan(given) == expected
Ejemplo n.º 5
0
def test_scanner_on_single_literals():
    tests = (
        ("123", (TokenType.INTEGER, "123")),
        ("abc", (TokenType.SYMBOL, "abc")),
        ("a53e", (TokenType.SYMBOL, "a53e")),
    )
    for given, expected in tests:
        assert scan(given) == [expected]
Ejemplo n.º 6
0
def check_file(urls):
    with open(urls, "r") as lines:
        urls_list = []
        for line in lines:
            urls_list.append(line)
        vuls = scanner.scan(urls_list)
        with open("result.txt", "a") as result:
            for item in vuls:
                result.write("{}\n".format(item))
Ejemplo n.º 7
0
def singlescan(URL):
    """instance to scan single targeted domain"""

    if urlparse(URL).query != '':
        Result = scanner.scan([URL])
        if Result != []:
            # scanner.scan print if vulnerable
            # therefore exit
            return Result

        else:
            print ""  # move carriage return to newline
            std.stdout("no SQL injection vulnerability found")
            Option = std.stdin(
                "do you want to crawl and continue scanning? [Y/N]",
                ["Y", "N"],
                upper=True)

            if Option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    std.stdout("going to crawl {}".format(URL))
    URLS = crawler.crawl(URL)

    if not URLS:
        std.stdout("found no suitable urls to test SQLi")
        #std.stdout("you might want to do reverse domain")
        return False

    std.stdout("found {} urls from crawling".format(len(URLS)))
    vulnerables = scanner.scan(URLS)

    if vulnerables == []:
        std.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
Ejemplo n.º 8
0
def singlescan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        result = scanner.scan([url])
        if result != []:
            # scanner.scan print if vulnerable
            # therefore exit
            return result

        else:
            print ""  # move carriage return to newline
            std.stdout("no SQL injection vulnerability found")
            option = std.stdin("do you want to crawl and continue scanning? [Y/N]", ["Y", "N"], upper=True)

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    std.stdout("going to crawl {}".format(url))
    urls = crawler.crawl(url)

    if not urls:
        std.stdout("found no suitable urls to test SQLi")
        #std.stdout("you might want to do reverse domain")
        return False

    std.stdout("found {} urls from crawling".format(len(urls)))
    vulnerables = scanner.scan(urls)

    if vulnerables == []:
        std.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
Ejemplo n.º 9
0
def massiveScan(websites):
    """scan multiple websites / urls"""

    # scan each website one by one
    vulnerables = []
    for website in websites:
        io.stdout("scanning {}".format(website), end="")
        if scanner.scan(website):
            io.showsign(" vulnerable")
            vulnerables.append(website)
            continue

        print ""  # move carriage return to newline

    if vulnerables:
        return vulnerables

    io.stdout("no vulnerable websites found")
    return False
Ejemplo n.º 10
0
def singleScan(url):
    """instance to scan single targeted domain"""

    if urlparse(url).query != '':
        io.stdout("scanning {}".format(url), end="")

        if scanner.scan(url):
            io.showsign(" vulnerable")
            exit(0)

        else:
            print ""  # move carriage return to newline
            io.stdout("no SQL injection vulnerability found")

            option = io.stdin(
                "do you want to crawl and continue scanning? [Y/N]").upper()
            while option != 'Y' and option != 'N':
                option = io.stdin(
                    "do you want to crawl and continue scanning? [Y/N]").upper(
                    )

            if option == 'N':
                return False

    # crawl and scan the links
    # if crawl cannot find links, do some reverse domain
    io.stdout("crawling {}".format(url))
    websites = crawler.crawl(url)
    if not websites:
        io.stdout("found no suitable urls to test SQLi")
        #io.stdout("you might want to do reverse domain")
        return False

    io.stdout("found {} urls from crawling".format(len(websites)))
    vulnerables = massiveScan(websites)

    if vulnerables == []:
        io.stdout("no SQL injection vulnerability found")
        return False

    return vulnerables
Ejemplo n.º 11
0
def test_scanner_on_compound_literals():
    tests = (
        (
            "123 abc 123",
            [
                (TokenType.INTEGER, "123"),
                (TokenType.SYMBOL, "abc"),
                (TokenType.INTEGER, "123"),
            ],
        ),
        (
            "abc 40 a3e2",
            [
                (TokenType.SYMBOL, "abc"),
                (TokenType.INTEGER, "40"),
                (TokenType.SYMBOL, "a3e2"),
            ],
        ),
    )
    for given, expected in tests:
        assert scan(given) == expected
Ejemplo n.º 12
0
    args = parser.parse_args()

    # find random SQLi by dork
    if args.dork != None and args.engine != None:
        std.stdout("searching for websites with given dork")

        # get websites based on search engine
        if args.engine in ["bing", "google", "yahoo"]:
            websites = eval(args.engine).search(args.dork, args.page)
        else:
            std.stderr("invalid search engine")
            exit(1)

        std.stdout("{} websites found".format(len(websites)))

        vulnerables = scanner.scan(websites)

        if not vulnerables:
            if args.s:
                std.stdout("saved as searches.txt")
                std.dump(websites, "searches.txt")

            exit(0)

        std.stdout("scanning server information")

        vulnerableurls = [result[0] for result in vulnerables]
        table_data = serverinfo.check(vulnerableurls)
        # add db name to info
        for result, info in zip(vulnerables, table_data):
            info.insert(1, result[1])  # database name
Ejemplo n.º 13
0
    args = parser.parse_args()

    # find random SQLi by dork
    if args.dork != None and args.engine != None:
        std.stdout("searching for websites with given dork")

        # get websites based on search engine
        if args.engine in ["bing", "google", "yahoo"]:
            websites = eval(args.engine).search(args.dork, args.page)
        else:
            std.stderr("invalid search engine")
            exit(1)

        std.stdout("{} websites found".format(len(websites)))

        vulnerables = scanner.scan(websites)

        if not vulnerables:
            if args.s:
                std.stdout("saved as searches.txt")
                std.dump(websites, "searches.txt")

            exit(0)

        std.stdout("scanning server information")

        vulnerableurls = [result[0] for result in vulnerables]
        table_data = serverinfo.check(vulnerableurls)
        # add db name to info
        for result, info in zip(vulnerables, table_data):
            info.insert(1, result[1])  # database name
Ejemplo n.º 14
0
    def run(self, argsttt):
        self.initparser()
        args = parser.parse_args()

        args.target = argsttt.target
        args.output = argsttt.output
        # find random SQLi by dork
        if args.dork != None and args.engine != None:
            std.stdout("searching for websites with given dork")

            # get websites based on search engine
            if args.engine in ["bing", "google", "yahoo"]:
                websites = eval(args.engine).search(args.dork, args.page)
            else:
                std.stderr("invalid search engine")
                exit(1)

            std.stdout("{} websites found".format(len(websites)))

            vulnerables = scanner.scan(websites)

            if not vulnerables:
                std.stdout(
                    "you can still scan those websites by crawling or reverse domain."
                )
                option = std.stdin("do you want save search result? [Y/N]",
                                   ["Y", "N"],
                                   upper=True)

                if option == 'Y':
                    std.stdout("saved as searches.txt")
                    std.dump(websites, "searches.txt")

                exit(0)

            std.stdout("scanning server information")

            vulnerableurls = [result[0] for result in vulnerables]
            table_data = serverinfo.check(vulnerableurls)
            # add db name to info
            for result, info in zip(vulnerables, table_data):
                info.insert(1, result[1])  # database name

            std.fullprint(table_data)

        # do reverse domain of given site
        elif args.target != None and args.reverse:
            std.stdout("finding domains with same server as {}".format(
                args.target))
            domains = reverseip.reverseip(args.target)

            if domains == []:
                std.stdout("no domain found with reversing ip")
                exit(0)

            # if there are domains
            std.stdout("found {} websites".format(len(domains)))

            # ask whether user wants to save domains
            std.stdout(
                "scanning multiple websites with crawling will take long")
            option = std.stdin("do you want save domains? [Y/N]", ["Y", "N"],
                               upper=True)

            if option == 'Y':
                std.stdout("saved as domains.txt")
                std.dump(domains, "domains.txt")

            # ask whether user wants to crawl one by one or exit
            option = std.stdin("do you want start crawling? [Y/N]", ["Y", "N"],
                               upper=True)

            if option == 'N':
                exit(0)

            vulnerables = []
            for domain in domains:
                vulnerables_temp = self.singlescan(domain)
                if vulnerables_temp:
                    vulnerables += vulnerables_temp

            std.stdout("finished scanning all reverse domains")
            if vulnerables == []:
                std.stdout("no vulnerables webistes from reverse domains")
                exit(0)

            std.stdout("scanning server information")

            vulnerableurls = [result[0] for result in vulnerables]
            table_data = serverinfo.check(vulnerableurls)
            # add db name to info
            for result, info in zip(vulnerables, table_data):
                info.insert(1, result[1])  # database name

            std.fullprint(table_data)

        # scan SQLi of given site
        elif args.target:
            vulnerables = self.singlescan(args.target)

            if not vulnerables:
                exit(0)

            # show domain information of target urls
            std.stdout("getting server info of domains can take a few mins")
            table_data = serverinfo.check([args.target])

            std.printserverinfo(table_data)
            print ""  # give space between two table
            std.normalprint(vulnerables)

        # print help message, if no parameter is provided
        else:
            parser.print_help()

        # dump result into json if specified
        if args.output != None:
            #print vulnerables
            re = list()
            fo = open("Sqliv_scan.txt", "w")
            new_vul = {}
            for i in vulnerables:
                new_vul["url"] = i[0]
                new_vul["ways"] = i[1]
                re.append(new_vul)
            fo.write(str(re))

            return jsonify(re)
Ejemplo n.º 15
0
def test_scanner_on_special_symbol_characters():
    tests = ("?", "??", "is-alpha", "+", "-", "*", "/")
    for given in tests:
        assert scan(given) == [(TokenType.SYMBOL, given)]