Пример #1
0
    def startGoogleScan(self):
        print "Querying Bing Search: '%s' with max pages %d..." % (
            self.config["p_query"], self.config["p_pages"])

        pagecnt = 0
        curtry = 0

        last_request_time = datetime.datetime.now()

        while (pagecnt < self.config["p_pages"]):
            pagecnt = pagecnt + 1
            redo = True
            while (redo):
                try:
                    current_time = datetime.datetime.now()
                    diff = current_time - last_request_time
                    diff = int(diff.seconds)

                    if (diff <= self.cooldown):
                        if (diff > 0):
                            print "Commencing %ds bing cooldown..." % (
                                self.cooldown - diff)
                            time.sleep(self.cooldown - diff)

                    last_request_time = datetime.datetime.now()
                    resp = self.bs.search_web(
                        self.config["p_query"], {
                            'Web.Count': 50,
                            'Web.Offset': (pagecnt - 1) * self.results_per_page
                        })
                    results = resp['SearchResponse']['Web']['Results']
                    redo = False
                except KeyboardInterrupt:
                    raise
                except Exception, err:
                    raise
                    redo = True
                    sys.stderr.write("[RETRYING PAGE %d]\n" % (pagecnt))
                    curtry = curtry + 1
                    if (curtry > self.config["p_maxtries"]):
                        print "MAXIMUM COUNT OF (RE)TRIES REACHED!"
                        sys.exit(1)

            curtry = 0

            if (len(results) == 0): break
            sys.stderr.write("[PAGE %d]\n" % (pagecnt))
            try:
                for r in results:
                    single = singleScan(self.config)
                    single.setURL(r["Url"])
                    single.setQuite(True)
                    single.scan()
            except KeyboardInterrupt:
                raise
            time.sleep(1)
Пример #2
0
    def startGoogleScan(self):
        print "Querying Google Search: '%s' with max pages %d..."%(self.config["p_query"], self.config["p_pages"])

        pagecnt = 0
        curtry = 0
        
        last_request_time = datetime.datetime.now()

        while(pagecnt < self.config["p_pages"]):
            pagecnt = pagecnt +1
            redo = True
            while (redo):
              try:
                current_time = datetime.datetime.now()
                diff = current_time - last_request_time
                diff = int(diff.seconds)

                if (diff <= self.cooldown):
                    if (diff > 0): 
                        print "Commencing %ds google cooldown..." %(self.cooldown - diff)
                        time.sleep(self.cooldown - diff)
                    
                last_request_time = datetime.datetime.now()
                results = self.getNextPage()
                
                redo = False
              except KeyboardInterrupt:
                raise
              except Exception, err:
                print err
                redo = True
                sys.stderr.write("[RETRYING PAGE %d]\n" %(pagecnt))
                curtry = curtry +1
                if (curtry > self.config["p_maxtries"]):
                    print "MAXIMAL COUNT OF (RE)TRIES REACHED!"
                    sys.exit(1)
            
              
            curtry = 0
              

            if (len(results) == 0): break
            sys.stderr.write("[PAGE %d]\n" %(pagecnt))
            try:
                for r in results:
                    single = singleScan(self.config)
                    single.setURL(r.url)
                    single.setQuite(True)
                    single.scan()
            except KeyboardInterrupt:
                raise
            time.sleep(1)
Пример #3
0
    def startGoogleScan(self):
        print "Querying Bing Search: '%s' with max pages %d..."%(self.config["p_query"], self.config["p_pages"])

        pagecnt = 0
        curtry = 0
        
        last_request_time = datetime.datetime.now()

        while(pagecnt < self.config["p_pages"]):
            pagecnt = pagecnt +1
            redo = True
            while (redo):
              try:
                current_time = datetime.datetime.now()
                diff = current_time - last_request_time
                diff = int(diff.seconds)

                if (diff <= self.cooldown):
                    if (diff > 0): 
                        print "Commencing %ds bing cooldown..." %(self.cooldown - diff)
                        time.sleep(self.cooldown - diff)
                    
                last_request_time = datetime.datetime.now()
                resp = self.bs.search_web(self.config["p_query"], {'Web.Count':50,'Web.Offset':(pagecnt-1)*self.results_per_page})
                results = resp['SearchResponse']['Web']['Results']
                redo = False
              except KeyboardInterrupt:
                raise
              except Exception, err:
                raise
                redo = True
                sys.stderr.write("[RETRYING PAGE %d]\n" %(pagecnt))
                curtry = curtry +1
                if (curtry > self.config["p_maxtries"]):
                    print "MAXIMUM COUNT OF (RE)TRIES REACHED!"
                    sys.exit(1)
            
              
            curtry = 0
              

            if (len(results) == 0): break
            sys.stderr.write("[PAGE %d]\n" %(pagecnt))
            try:
                for r in results:
                    single = singleScan(self.config)
                    single.setURL(r["Url"])
                    single.setQuite(True)
                    single.scan()
            except KeyboardInterrupt:
                raise
            time.sleep(1)
Пример #4
0
    def startMassScan(self):
        print "MassScan reading file: '%s'..." % self.list

        f = open(self.list, "r")
        idx = 0
        for l in f:
            if idx >= 0:
                l = l.strip()
                if (l.startswith("http://"), l.startswith("https://")):
                    print "[%d][MASS_SCAN] Scanning: '%s'..." % (idx, l)
                    single = singleScan(self.config)
                    single.setURL(l)
                    single.setQuite(True)
                    single.scan()

                    idx = idx + 1

        print "MassScan completed."
Пример #5
0
    def startMassScan(self):
        print "MassScan reading file: '%s'..."%self.list

        f = open(self.list, "r")
        idx = 0
        for l in f:
            if idx >= 0:
                l = l.strip()
                if (l.startswith("http://"), l.startswith("https://")):
                    print "[%d][MASS_SCAN] Scanning: '%s'..." %(idx,l)
                    single = singleScan(self.config)
                    single.setURL(l)
                    single.setQuite(True)
                    single.scan()

                    idx = idx +1

        print "MassScan completed."
Пример #6
0
    def scan(self):
        print "Requesting '%s'..." % (self.URL)

        extHeader = ""
        code, headers = self.doRequest(self.URL, self.config["p_useragent"],
                                       self.config["p_post"],
                                       self.config["header"],
                                       self.config["p_ttl"])

        if (headers != None):
            for head in headers:
                if head[0] in ("set-cookie", "set-cookie2"):
                    cookie = head[1]
                    c = Cookie.SimpleCookie()
                    c.load(cookie)
                    for k, v in c.items():
                        extHeader += "%s=%s; " % (k, c[k].value)

        if (code == None):
            print "Code == None!"
            print "Does the target exist?!"
            print "AutoAwesome mode failed. -> Aborting."
            sys.exit(1)

        if (extHeader != ""):
            print "Cookies retrieved. Using them for further requests."
            extHeader = extHeader.strip()[:-1]

        if (self.config["header"].has_key("Cookie") and extHeader != ""):
            print "WARNING: AutoAwesome mode got some cookies from the server."
            print "Your defined cookies will be overwritten!"

        if (extHeader != ""):
            print "Testing file inclusion against given cookies..."
            self.config["header"]["Cookie"] = extHeader
            single = singleScan(self.config)
            single.setURL(self.URL)
            single.setQuite(True)
            single.scan()

        soup = BeautifulSoup.BeautifulSoup(''.join(code))
        idx = 0
        for form in soup.findAll("form"):
            idx += 1
            caption = None
            desturl = None
            method = None

            if (soup.has_key("action")):
                desturl = soup["action"]
            else:
                desturl = self.URL

            if (form.has_key("name")):
                caption = form["name"]
            else:
                caption = "Unnamed Form #%d" % (idx)

            if (form.has_key("method")):
                if (form["method"].lower() == "get"):
                    method = 0
                else:
                    method = 1
            else:
                method = 1  # If no method is defined assume it's POST.

            params = ""
            for input in form.findAll("input"):
                if (input.has_key("name")):
                    input_name = input["name"]
                    input_val = None
                    if (input.has_key("value")):
                        input_val = input["value"]

                    if (input_val == None):
                        params += "%s=&" % (input_name)
                    else:
                        params += "%s=%s&" % (input_name, input_val)
                else:
                    print "An input field doesn't have an 'name' attribute! Skipping it."

            if ("&" in params):
                params = params[:-1]

            print "Analyzing form '%s' for file inclusion bugs." % (caption)
            modConfig = deepcopy(self.config)
            if (method == 0):
                # Append the current get params to the current URL.
                if ("?" in desturl):
                    # There are already params in the URL.
                    desturl = "%s&%s" % (desturl, params)
                else:
                    # There are no other params.
                    desturl = "%s&?%s" % (desturl, params)

            else:
                currentPost = modConfig["p_post"]
                if (currentPost == None or currentPost == ""):
                    currentPost = params
                else:
                    currentPost = currentPost + "&" + params

                modConfig["p_post"] = currentPost

            single = singleScan(modConfig)
            single.setURL(desturl)
            single.setQuite(True)
            single.scan()

        print "Starting harvester engine to get links (Depth: 0)..."
        crawl = crawler(self.config)
        crawl.crawl_url(self.URL, 0)
        if (len(crawl.urlpool) == 0):
            print "No links found."
        else:
            print "Harvesting done. %d links found. Analyzing links now..." % (
                len(crawl.urlpool))
            for url in crawl.urlpool:
                try:
                    single = singleScan(self.config)
                    single.setURL(str(url[0]))
                    single.setQuite(True)
                    single.scan()
                except:
                    print "Caught an exception. Continuing..."

        print "AutoAwesome is done."
Пример #7
0
    if (config["p_url"] == None and config["p_mode"] == 4):
        print "Root URL required for AutoAwesome. (-u)"
        sys.exit(1)
    if (config["p_monkeymode"] == True):
        print "Blind FI-error checking enabled."

    if (config["force-os"] != None):
        if (config["force-os"] != "unix" and config["force-os"] != "windows"):
            print "Invalid parameter for 'force-os'."
            print "Only 'unix' or 'windows' are allowed!"
            sys.exit(1)


    try:
        if (config["p_mode"] == 0):
            single = singleScan(config)
            single.setURL(config["p_url"])
            single.scan()

        elif(config["p_mode"] == 1):
            if (not os.path.exists(config["p_list"])):
                print "Your defined URL-List doesn't exist: '%s'" %config["p_list"]
                sys.exit(0)
            print "MassScanner is loading URLs from file: '%s'" %config["p_list"]
            m = massScan(config)
            m.startMassScan()
            show_report()

        elif(config["p_mode"] == 2):
            print "GoogleScanner is searching for Query: '%s'" %config["p_query"]
            g = googleScan(config)
Пример #8
0
    def scan(self):
        print "Requesting '%s'..." %(self.URL)
        
        extHeader = ""
        code, headers = self.doRequest(self.URL, self.config["p_useragent"], self.config["p_post"], self.config["header"], self.config["p_ttl"])
        
        if (headers != None):
            for head in headers:
                if head[0] in ("set-cookie", "set-cookie2"):
                    cookie = head[1]
                    c = Cookie.SimpleCookie()
                    c.load(cookie)
                    for k,v in c.items():
                        extHeader += "%s=%s; " %(k, c[k].value)
        
        if (code == None):
            print "Code == None!"
            print "Does the target exist?!"
            print "AutoAwesome mode failed. -> Aborting."
            sys.exit(1)
        
        if (extHeader != ""):
            print "Cookies retrieved. Using them for further requests."
            extHeader = extHeader.strip()[:-1]
            
        if (self.config["header"].has_key("Cookie") and extHeader != ""):
            print "WARNING: AutoAwesome mode got some cookies from the server."
            print "Your defined cookies will be overwritten!"


        if (extHeader != ""):
            print "Testing file inclusion against given cookies..."
            self.config["header"]["Cookie"] = extHeader
            single = singleScan(self.config)
            single.setURL(self.URL)
            single.setQuite(True)
            single.scan()
            
        soup = BeautifulSoup.BeautifulSoup(''.join(code))
        idx = 0
        for form in soup.findAll("form"):
            idx += 1
            caption = None
            desturl = None
            method  = None
            
            if (soup.has_key("action")):
                desturl = soup["action"]
            else:
                desturl = self.URL
            
            if (form.has_key("name")):
                caption = form["name"]
            else:
                caption = "Unnamed Form #%d" %(idx)
                
            if (form.has_key("method")):
                if (form["method"].lower() == "get"):
                    method = 0
                else:
                    method = 1
            else:
                method = 1 # If no method is defined assume it's POST.
            
            
            params = ""
            for input in form.findAll("input"):
                if (input.has_key("name")):
                    input_name = input["name"]
                    input_val  = None
                    if (input.has_key("value")):
                        input_val  = input["value"]
                    
                    if (input_val == None):
                        params += "%s=&" %(input_name)
                    else:
                        params += "%s=%s&" %(input_name, input_val)
                else:
                    print "An input field doesn't have an 'name' attribute! Skipping it."
            
            if ("&" in params):
                params = params[:-1]
                
            print "Analyzing form '%s' for file inclusion bugs." %(caption) 
            modConfig = deepcopy(self.config)
            if (method == 0):
                # Append the current get params to the current URL.
                if ("?" in desturl):
                    # There are already params in the URL.
                    desturl = "%s&%s" %(desturl, params)
                else:
                    # There are no other params.
                    desturl = "%s&?%s" %(desturl, params)
            
            else:
                currentPost = modConfig["p_post"]
                if (currentPost == None or currentPost == ""): 
                    currentPost = params
                else:
                    currentPost = currentPost + "&" + params
            
                modConfig["p_post"] = currentPost
            
            single = singleScan(modConfig)
            single.setURL(desturl)
            single.setQuite(True)
            single.scan()
            
        print "Starting harvester engine to get links (Depth: 0)..."
        crawl = crawler(self.config)
        crawl.crawl_url(self.URL, 0)
        if (len(crawl.urlpool) == 0):
            print "No links found."
        else:
            print "Harvesting done. %d links found. Analyzing links now..."%(len(crawl.urlpool))
            for url in crawl.urlpool:
                single = singleScan(self.config)
                single.setURL(str(url[0]))
                single.setQuite(True)
                single.scan()
                
        print "AutoAwesome is done."
Пример #9
0
        sys.exit(1)
    if (config["p_url"] == None and config["p_mode"] == 4):
        print("Root URL required for AutoAwesome. (-u)")
        sys.exit(1)
    if (config["p_monkeymode"] == True):
        print("Blind FI-error checking enabled.")

    if (config["force-os"] != None):
        if (config["force-os"] != "unix" and config["force-os"] != "windows"):
            print("Invalid parameter for 'force-os'.")
            print("Only 'unix' or 'windows' are allowed!")
            sys.exit(1)

    try:
        if (config["p_mode"] == 0):
            single = singleScan(config)
            single.setURL(config["p_url"])
            single.scan()

        elif (config["p_mode"] == 1):
            if (not os.path.exists(config["p_list"])):
                print("Your defined URL-List doesn't exist: '%s'" %
                      config["p_list"])
                sys.exit(0)
            print("MassScanner is loading URLs from file: '%s'" %
                  config["p_list"])
            m = massScan(config)
            m.startMassScan()
            show_report()

        elif (config["p_mode"] == 2):