def _get_tree(self, url, reset=True): counter = 0 if (reset): proxy_handler = urllib.request.ProxyHandler({}) opener = urllib.request.build_opener(proxy_handler) urllib.request.install_opener(opener) req = urllib.request.Request(url, None, data.headers) while (counter < 10): try: response = urllib.request.urlopen(req) page = response.read() tree = html.fromstring(page) return tree except urllib.error.HTTPError as e: if not e.code == 503: bcolors.printFail("[-]Fatal exception '" + str(e) + "' in _get_tree()!") return None except Exception as e: bcolors.printFail("[-]Fatal exception '" + str(e) + "' in _get_tree()!") return None counter += 1 time.sleep( 1) # Loop will execute for 10 seconds at most before breaking return None
def get_html(self, url): if (self.useproxy): self.proxyhandler.validate_proxy() req = urllib.request.Request(url, None, data.headers) tries = 0 while (self.retries == None or tries < self.retries): try: res = urllib.request.urlopen(req) self.cookie_jar.extract_cookies(res, req) src = res.read() break except urllib.error.HTTPError as e: if (e.code != 503): bcolors.printFail("[-]HTTP Error " + str(e) + " was raised!") return None if (self.useproxy): if (self.verbose): bcolors.printWarning( "[*]503 Error raised when acquiring search results! Updating proxy..." ) self.update_proxy() # If we have to retry, append current proxy to blacklist if (self.useproxy): # blacklists both proxies if error occured! self.proxyhandler.blacklist_current_proxy(True) tries += 1 return html.fromstring(str(src))
def get_html(self, url) : if (self.useproxy) : self.proxyhandler.validate_proxy() req = urllib.request.Request(url, None, data.headers) tries = 0 while (self.retries == None or tries < self.retries): try : res = urllib.request.urlopen(req) self.cookie_jar.extract_cookies(res, req) src = res.read() break except urllib.error.HTTPError as e: if (e.code != 503) : bcolors.printFail("[-]HTTP Error " + str(e) + " was raised!") return None if (self.useproxy) : if (self.verbose) : bcolors.printWarning("[*]503 Error raised when acquiring search results! Updating proxy...") self.update_proxy() # If we have to retry, append current proxy to blacklist if (self.useproxy) : # blacklists both proxies if error occured! self.proxyhandler.blacklist_current_proxy(True) tries += 1 return html.fromstring(str(src))
def validate_proxy(self): if (self.verbose): print("[*]Validating proxy...") try: ip = self.get_ip() ips = self.get_ip(True) except Exception as e: bcolors.printFail( ("[-]Error occured while validating proxy!\n" + str(e))) print("Press enter to continue, or ctrl+c to interrupt...") input() ip = "Unknown" ips = "Unknown" if (self.verbose): print("[*]Your IP is: " + str(self.myip) + "\n[*]Proxy(HTTP) ip is: " + str(ip) + "\n[*]Proxy(HTTPS) ip is: " + str(ips)) if (ip == self.myip): bcolors.printWarning( "[-]HTTP proxy error detected! Press enter to continue, or Ctrl+C to interrupt..." ) input() if (ips == self.myip): bcolors.printWarning( "[-]HTTP proxy error detected! Press enter to continue, or Ctrl+C to interrupt..." ) input()
def run_tests(links, testLevel = 1, v = False) : if (links == [] or links == None) : bcolors.printFail("[-]Invalid input parameters! Exiting...") return proxyhandler = Proxy(True) proxyhandler.proxify() proxyhandler.validate_proxy() TestObjects = [] for link in links : sqlI = True # Used to check if we need to perform a second time-based sqlInjection test res = sql_error_scan(link, v) if(res != None) : TestObjects.append(TestObject.TestObject(link, "SQLi", res)) sqlI = False #Time based SQLi if (testLevel > 1 and sqlI ) : res = sql_time_scan(link, v) if(res != None) : TestObjects.append(TestObject.TestObject(link, "SQLiT", res)) #XSS if (testLevel > 2) : res = xss_vuln_scan(link, v) if(res != None) : TestObjects.append(TestObject.TestObject(link, "XSS", res)) if (v) : if (TestObjects != []) : bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~") for t in TestObjects : t.print_test() else : bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~") return TestObjects
def run_tests(links, testLevel=1, v=False): if (links == [] or links == None): bcolors.printFail("[-]Invalid input parameters! Exiting...") return proxyhandler = Proxy(True) proxyhandler.proxify() proxyhandler.validate_proxy() TestObjects = [] for link in links: sqlI = True # Used to check if we need to perform a second time-based sqlInjection test res = sql_error_scan(link, v) if (res != None): TestObjects.append(TestObject.TestObject(link, "SQLi", res)) sqlI = False #Time based SQLi if (testLevel > 1 and sqlI): res = sql_time_scan(link, v) if (res != None): TestObjects.append(TestObject.TestObject(link, "SQLiT", res)) #XSS if (testLevel > 2): res = xss_vuln_scan(link, v) if (res != None): TestObjects.append(TestObject.TestObject(link, "XSS", res)) if (v): if (TestObjects != []): bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~") for t in TestObjects: t.print_test() else: bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~") return TestObjects
def fatal_exception(self, e=None, function_name=None): bcolors.printFail("A fatal exception has occured!") if (not e == None): print(str(e)) if (not function_name == None): print(str(function_name)) bcolors.printBold("****PROGRAM STATE****") self.print_state() sys.exit(0)
def fatal_exception(self,e = None, function_name = None) : bcolors.printFail("A fatal exception has occured!") if (not e == None) : print(str(e)) if (not function_name == None) : print(str(function_name)) bcolors.printBold("****PROGRAM STATE****") self.print_state() sys.exit(0)
def validate_proxy(self) : if (self.verbose) : print("[*]Validating proxy...") try : ip = self.get_ip() ips = self.get_ip(True) except Exception as e: bcolors.printFail(("[-]Error occured while validating proxy!\n" + str(e))) print("Press enter to continue, or ctrl+c to interrupt...") input() ip = "Unknown" ips = "Unknown" if (self.verbose) : print("[*]Your IP is: " + str(self.myip) + "\n[*]Proxy(HTTP) ip is: " + str(ip) + "\n[*]Proxy(HTTPS) ip is: " + str(ips)) if (ip == self.myip) : bcolors.printWarning("[-]HTTP proxy error detected! Press enter to continue, or Ctrl+C to interrupt...") input() if (ips == self.myip) : bcolors.printWarning("[-]HTTP proxy error detected! Press enter to continue, or Ctrl+C to interrupt...") input()
def _get_tree(self, url, reset=True) : counter = 0 if (reset) : proxy_handler = urllib.request.ProxyHandler({}) opener = urllib.request.build_opener(proxy_handler) urllib.request.install_opener(opener) req = urllib.request.Request(url, None, data.headers) while (counter < 10) : try : response = urllib.request.urlopen(req) page = response.read() tree = html.fromstring(page) return tree except urllib.error.HTTPError as e : if not e.code == 503: bcolors.printFail("[-]Fatal exception '" + str(e) + "' in _get_tree()!") return None except Exception as e : bcolors.printFail("[-]Fatal exception '" + str(e) + "' in _get_tree()!") return None counter += 1 time.sleep(1) # Loop will execute for 10 seconds at most before breaking return None
def get_html(self, url) : if (self.useproxy) : self.proxyhandler.validate_proxy() req = urllib.request.Request(url, None, data.headers) tries = 0 while (self.retries == None or tries < self.retries): try : res = urllib.request.urlopen(req) src = res.read() break except urllib.error.HTTPError as e: if (self.useproxy) : self.update_proxy() if (e.code != 503) : bcolors.printFail("[-]HTTP Error " + str(e) + " was raised!") return None # If we have to retry, append current proxy to blacklist if (self.useproxy) : # blacklists both proxies if error occured! self.proxyhandler.blacklist_current_proxy(True) tries += 1 return html.fromstring(str(src))