Example #1
0
def showDomainInfo(urls):
    """return array of urls with server info"""

    io.stdout("getting server info of domains can take a few mins")
    domains_info = []

    for each in urls:
        server_info = serverinfo.check(each)
        domains_info.append([each, server_info[0], server_info[1]])

    # print in table
    io.printServerInfo(domains_info)
Example #2
0
        std.stdout("{} websites found".format(len(websites)))

        vulnerables = scanner.scan(websites)

        if not vulnerables:
            if args.s:
                std.stdout("saved as searches.txt")
                std.dump(websites, "searches.txt")

            exit(0)

        std.stdout("scanning server information")

        vulnerableurls = [result[0] for result in vulnerables]
        table_data = serverinfo.check(vulnerableurls)
        # add db name to info
        for result, info in zip(vulnerables, table_data):
            info.insert(1, result[1])  # database name

        std.fullprint(table_data)


    # do reverse domain of given site
    elif args.target != None and args.reverse:
        std.stdout("finding domains with same server as {}".format(args.target))
        domains = reverseip.reverseip(args.target)

        if domains == []:
            std.stdout("no domain found with reversing ip")
            exit(0)
Example #3
0
        std.stdout("{} websites found".format(len(websites)))

        vulnerables = scanner.scan(websites)

        if not vulnerables:
            if args.s:
                std.stdout("saved as searches.txt")
                std.dump(websites, "searches.txt")

            exit(0)

        std.stdout("scanning server information")

        vulnerableurls = [result[0] for result in vulnerables]
        table_data = serverinfo.check(vulnerableurls)
        # add db name to info
        for result, info in zip(vulnerables, table_data):
            info.insert(1, result[1])  # database name

        std.fullprint(table_data)

    # do reverse domain of given site
    elif args.target != None and args.reverse:
        std.stdout("finding domains with same server as {}".format(
            args.target))
        domains = reverseip.reverseip(args.target)

        if domains == []:
            std.stdout("no domain found with reversing ip")
            exit(0)
Example #4
0
    def run(self, argsttt):
        self.initparser()
        args = parser.parse_args()

        args.target = argsttt.target
        args.output = argsttt.output
        # find random SQLi by dork
        if args.dork != None and args.engine != None:
            std.stdout("searching for websites with given dork")

            # get websites based on search engine
            if args.engine in ["bing", "google", "yahoo"]:
                websites = eval(args.engine).search(args.dork, args.page)
            else:
                std.stderr("invalid search engine")
                exit(1)

            std.stdout("{} websites found".format(len(websites)))

            vulnerables = scanner.scan(websites)

            if not vulnerables:
                std.stdout(
                    "you can still scan those websites by crawling or reverse domain."
                )
                option = std.stdin("do you want save search result? [Y/N]",
                                   ["Y", "N"],
                                   upper=True)

                if option == 'Y':
                    std.stdout("saved as searches.txt")
                    std.dump(websites, "searches.txt")

                exit(0)

            std.stdout("scanning server information")

            vulnerableurls = [result[0] for result in vulnerables]
            table_data = serverinfo.check(vulnerableurls)
            # add db name to info
            for result, info in zip(vulnerables, table_data):
                info.insert(1, result[1])  # database name

            std.fullprint(table_data)

        # do reverse domain of given site
        elif args.target != None and args.reverse:
            std.stdout("finding domains with same server as {}".format(
                args.target))
            domains = reverseip.reverseip(args.target)

            if domains == []:
                std.stdout("no domain found with reversing ip")
                exit(0)

            # if there are domains
            std.stdout("found {} websites".format(len(domains)))

            # ask whether user wants to save domains
            std.stdout(
                "scanning multiple websites with crawling will take long")
            option = std.stdin("do you want save domains? [Y/N]", ["Y", "N"],
                               upper=True)

            if option == 'Y':
                std.stdout("saved as domains.txt")
                std.dump(domains, "domains.txt")

            # ask whether user wants to crawl one by one or exit
            option = std.stdin("do you want start crawling? [Y/N]", ["Y", "N"],
                               upper=True)

            if option == 'N':
                exit(0)

            vulnerables = []
            for domain in domains:
                vulnerables_temp = self.singlescan(domain)
                if vulnerables_temp:
                    vulnerables += vulnerables_temp

            std.stdout("finished scanning all reverse domains")
            if vulnerables == []:
                std.stdout("no vulnerables webistes from reverse domains")
                exit(0)

            std.stdout("scanning server information")

            vulnerableurls = [result[0] for result in vulnerables]
            table_data = serverinfo.check(vulnerableurls)
            # add db name to info
            for result, info in zip(vulnerables, table_data):
                info.insert(1, result[1])  # database name

            std.fullprint(table_data)

        # scan SQLi of given site
        elif args.target:
            vulnerables = self.singlescan(args.target)

            if not vulnerables:
                exit(0)

            # show domain information of target urls
            std.stdout("getting server info of domains can take a few mins")
            table_data = serverinfo.check([args.target])

            std.printserverinfo(table_data)
            print ""  # give space between two table
            std.normalprint(vulnerables)

        # print help message, if no parameter is provided
        else:
            parser.print_help()

        # dump result into json if specified
        if args.output != None:
            #print vulnerables
            re = list()
            fo = open("Sqliv_scan.txt", "w")
            new_vul = {}
            for i in vulnerables:
                new_vul["url"] = i[0]
                new_vul["ways"] = i[1]
                re.append(new_vul)
            fo.write(str(re))

            return jsonify(re)
Example #5
0
            option = io.stdin("do you want save search result? [Y/N]").upper()
            while option != 'Y' and option != 'N':
                option = io.stdin(
                    "do you want save search result? [Y/N]").upper()

            if option == 'Y':
                io.stdout("saved as searches.txt")
                io.dump(websites, "searches.txt")

            exit(0)

        io.stdout("vulnerable websites")

        table_data = []
        for each in vulnerables:
            server_info = serverinfo.check(each)
            table_data.append([each, server_info[0], server_info[1]])

        io.printVulnerablesWithInfo(table_data)

    # do reverse domain of given site
    elif args.t is not None and args.r:
        io.stdout("finding domains with same server as {}".format(args.t))
        domains = reverseip.reverseip(args.t)

        if domains == []:
            io.stdout("no domain found with reversing ip")
            exit(0)

        # if there are domains
        io.stdout("found {} websites".format(len(domains)))