コード例 #1
0
def run_tests(links, testLevel = 1, v = False) :
    if (links == [] or links == None) :
        bcolors.printFail("[-]Invalid input parameters! Exiting...")
        return
    proxyhandler = Proxy(True)
    proxyhandler.proxify()
    proxyhandler.validate_proxy()
    TestObjects = []
    for link in links :
        sqlI = True # Used to check if we need to perform a second time-based sqlInjection test
        res = sql_error_scan(link, v)
        if(res != None) :
            TestObjects.append(TestObject.TestObject(link, "SQLi", res))
            sqlI = False
        #Time based SQLi    
        if (testLevel > 1 and sqlI ) :
            res = sql_time_scan(link, v)
            if(res != None) :
                TestObjects.append(TestObject.TestObject(link, "SQLiT", res))
        #XSS
        if (testLevel > 2) :
            res = xss_vuln_scan(link, v)
            if(res != None) :
                TestObjects.append(TestObject.TestObject(link, "XSS", res))
    if (v) :
        if (TestObjects != []) :
            bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~")
            for t in TestObjects :
                t.print_test()
        else :
            bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~")
    return TestObjects
コード例 #2
0
def run_tests(links, testLevel=1, v=False):
    if (links == [] or links == None):
        bcolors.printFail("[-]Invalid input parameters! Exiting...")
        return
    proxyhandler = Proxy(True)
    proxyhandler.proxify()
    proxyhandler.validate_proxy()
    TestObjects = []
    for link in links:
        sqlI = True  # Used to check if we need to perform a second time-based sqlInjection test
        res = sql_error_scan(link, v)
        if (res != None):
            TestObjects.append(TestObject.TestObject(link, "SQLi", res))
            sqlI = False
        #Time based SQLi
        if (testLevel > 1 and sqlI):
            res = sql_time_scan(link, v)
            if (res != None):
                TestObjects.append(TestObject.TestObject(link, "SQLiT", res))
        #XSS
        if (testLevel > 2):
            res = xss_vuln_scan(link, v)
            if (res != None):
                TestObjects.append(TestObject.TestObject(link, "XSS", res))
    if (v):
        if (TestObjects != []):
            bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~")
            for t in TestObjects:
                t.print_test()
        else:
            bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~")
    return TestObjects
コード例 #3
0
class Search:
    #Initializes variables
    def __init__(self, useproxy, retries=None, verbose=False, sleep=5):
        self.urls = []  # contains scraped urls
        self.blacklist = []  # contains blacklisted proxies
        self.useproxy = useproxy  # dictates use of proxy
        self.retries = retries  # sets the number of search retries, if None => unlimited
        self.verbose = verbose  # sets verbosity level
        self.sleep = sleep  # dictates sleep while searching for urls
        self.cookie_jar = cookie_jar = http.cookiejar.CookieJar()

        self.proxyhandler = None
        if (self.useproxy):
            self.proxyhandler = Proxy(self.verbose)
            self.proxyhandler.proxify()
        if (self.verbose):
            bcolors.printGreen("[+]Search object created!")

    def print_state(self):
        bcolors.printBold("****Printing object state****")
        bcolors.printBold("URLs:\n")
        print(str(self.urls))
        bcolors.printBold("Blacklist:\n")
        print(str(self.blacklist))
        bcolors.printBold("Settings:\n")
        print("Retries: " + str(self.retries) + ", verbose: " +
              str(self.verbose) + ", sleep: " + str(self.sleep))

    def print_urls(self):
        bcolors.printBold("****PRINTING URLS****\n")
        for url in self.urls:
            print(str(url))

    # Returns the HTML page of a website.
    # It incorporates error checking and retries
    # If an unknown error was raised we call the fatal_exception() method
    def get_html(self, url):
        if (self.useproxy):
            self.proxyhandler.validate_proxy()
        req = urllib.request.Request(url, None, data.headers)
        tries = 0
        while (self.retries == None or tries < self.retries):
            try:
                res = urllib.request.urlopen(req)
                self.cookie_jar.extract_cookies(res, req)
                src = res.read()
                break
            except urllib.error.HTTPError as e:
                if (e.code != 503):
                    bcolors.printFail("[-]HTTP Error " + str(e) +
                                      " was raised!")
                    return None
                if (self.useproxy):
                    if (self.verbose):
                        bcolors.printWarning(
                            "[*]503 Error raised when acquiring search results! Updating proxy..."
                        )
                    self.update_proxy()

                # If we have to retry, append current proxy to blacklist
            if (self.useproxy):
                # blacklists both proxies if error occured!
                self.proxyhandler.blacklist_current_proxy(True)
            tries += 1

        return html.fromstring(str(src))

    def update_proxy(self, https=False):
        self.proxyhandler.proxify(https, True)
        self.proxyhandler.validate_proxy()

    def fatal_exception(self, e=None, function_name=None):
        bcolors.printFail("A fatal exception has occured!")
        if (not e == None):
            print(str(e))
        if (not function_name == None):
            print(str(function_name))
        bcolors.printBold("****PROGRAM STATE****")
        self.print_state()
        sys.exit(0)
コード例 #4
0
class Search :
    #Initializes variables
    def __init__(self, useproxy, retries = None, verbose = False, sleep = 5):
        self.urls = [] # contains scraped urls
        self.blacklist = [] # contains blacklisted proxies
        self.useproxy = useproxy # dictates use of proxy
        self.retries = retries # sets the number of search retries, if None => unlimited
        self.verbose = verbose # sets verbosity level
        self.sleep = sleep # dictates sleep while searching for urls
        self.cookie_jar = cookie_jar = http.cookiejar.CookieJar()
        
        self.proxyhandler = None
        if (self.useproxy) :
            self.proxyhandler = Proxy(self.verbose)
            self.proxyhandler.proxify()
        if (self.verbose) :
            bcolors.printGreen("[+]Search object created!")
    
    def print_state(self) :
        bcolors.printBold("****Printing object state****")
        bcolors.printBold("URLs:\n")
        print(str(self.urls))
        bcolors.printBold("Blacklist:\n")
        print(str(self.blacklist))
        bcolors.printBold("Settings:\n")
        print("Retries: " + str(self.retries) + ", verbose: " + str(self.verbose) + ", sleep: " + str(self.sleep)) 
    
    def print_urls(self) :
        bcolors.printBold("****PRINTING URLS****\n")
        for url in self.urls :
            print(str(url))
            
    # Returns the HTML page of a website.
    # It incorporates error checking and retries
    # If an unknown error was raised we call the fatal_exception() method
    def get_html(self, url) :
        if (self.useproxy) :
            self.proxyhandler.validate_proxy()
        req = urllib.request.Request(url, None, data.headers)
        tries = 0
        while (self.retries == None or tries < self.retries):
            try :
                res = urllib.request.urlopen(req)
                self.cookie_jar.extract_cookies(res, req)
                src = res.read()
                break
            except urllib.error.HTTPError as e:
                if (e.code != 503) :
                    bcolors.printFail("[-]HTTP Error " + str(e) + " was raised!")
                    return None
                if (self.useproxy) :
                    if (self.verbose) :
                        bcolors.printWarning("[*]503 Error raised when acquiring search results! Updating proxy...")
                    self.update_proxy()
            
                # If we have to retry, append current proxy to blacklist
            if (self.useproxy) :
                # blacklists both proxies if error occured!
                self.proxyhandler.blacklist_current_proxy(True)
            tries += 1
                
        return html.fromstring(str(src))
    
    def update_proxy(self, https=False) :
        self.proxyhandler.proxify(https, True)
        self.proxyhandler.validate_proxy()
    
    def fatal_exception(self,e = None, function_name = None) :
        bcolors.printFail("A fatal exception has occured!")
        if (not e == None) :
            print(str(e))
        if (not function_name == None) :
            print(str(function_name))
        bcolors.printBold("****PROGRAM STATE****")
        self.print_state()
        sys.exit(0)