Exemplo n.º 1
0
    def test_invalid_timeouts(self):
        try:
            Timeout(total=-1)
            self.fail("negative value should throw exception")
        except ValueError as e:
            self.assertTrue('less than' in str(e))
        try:
            Timeout(connect=2, total=-1)
            self.fail("negative value should throw exception")
        except ValueError as e:
            self.assertTrue('less than' in str(e))

        try:
            Timeout(read=-1)
            self.fail("negative value should throw exception")
        except ValueError as e:
            self.assertTrue('less than' in str(e))

        # Booleans are allowed also by socket.settimeout and converted to the
        # equivalent float (1.0 for True, 0.0 for False)
        Timeout(connect=False, read=True)

        try:
            Timeout(read="foo")
            self.fail("string value should not be allowed")
        except ValueError as e:
            self.assertTrue('int or float' in str(e))
Exemplo n.º 2
0
def configure_http_pool():

    global gl_http_pool

    if gl_args.mode == 'auto-scan' or gl_args.mode == 'file-scan':
        timeout = Timeout(connect=1.0, read=3.0)
    else:
        timeout = Timeout(connect=gl_args.timeout, read=6.0)

    if gl_args.proxy:
        # when using proxy, protocol should be informed
        if 'http' not in gl_args.host or 'http' not in gl_args.proxy:
            print_and_flush(RED + " * When using proxy, you must specify the http or https protocol"
                        " (eg. http://%s).\n\n" %(gl_args.host if 'http' not in gl_args.host else gl_args.proxy) +ENDC)
            logging.critical('Protocol not specified')
            exit(1)

        try:
            if gl_args.proxy_cred:
                headers = make_headers(proxy_basic_auth=gl_args.proxy_cred)
                gl_http_pool = ProxyManager(proxy_url=gl_args.proxy, proxy_headers=headers, timeout=timeout, cert_reqs='CERT_NONE')
            else:
                gl_http_pool = ProxyManager(proxy_url=gl_args.proxy, timeout=timeout, cert_reqs='CERT_NONE')
        except:
            print_and_flush(RED + " * An error occurred while setting the proxy. Please see log for details..\n\n" +ENDC)
            logging.critical('Error while setting the proxy', exc_info=traceback)
            exit(1)
    else:
        gl_http_pool = PoolManager(timeout=timeout, cert_reqs='CERT_NONE')
Exemplo n.º 3
0
    def test_enhanced_timeout(self):
        def new_pool(timeout, cert_reqs='CERT_REQUIRED'):
            https_pool = HTTPSConnectionPool(TARPIT_HOST,
                                             self.port,
                                             timeout=timeout,
                                             cert_reqs=cert_reqs)
            return https_pool

        https_pool = new_pool(Timeout(connect=0.001))
        conn = https_pool._new_conn()
        self.assertRaises(ConnectTimeoutError, https_pool.request, 'GET', '/')
        self.assertRaises(ConnectTimeoutError, https_pool._make_request, conn,
                          'GET', '/')

        https_pool = new_pool(Timeout(connect=5))
        self.assertRaises(ConnectTimeoutError,
                          https_pool.request,
                          'GET',
                          '/',
                          timeout=Timeout(connect=0.001))

        t = Timeout(total=None)
        https_pool = new_pool(t)
        conn = https_pool._new_conn()
        self.assertRaises(ConnectTimeoutError,
                          https_pool.request,
                          'GET',
                          '/',
                          timeout=Timeout(total=None, connect=0.001))
Exemplo n.º 4
0
    def test_https_timeout(self):
        timeout = Timeout(connect=0.001)
        https_pool = HTTPSConnectionPool(TARPIT_HOST,
                                         self.port,
                                         timeout=timeout,
                                         cert_reqs='CERT_REQUIRED')

        timeout = Timeout(total=None, connect=0.001)
        https_pool = HTTPSConnectionPool(TARPIT_HOST,
                                         self.port,
                                         timeout=timeout,
                                         cert_reqs='CERT_REQUIRED')
        self.assertRaises(ConnectTimeoutError, https_pool.request, 'GET', '/')

        timeout = Timeout(read=0.001)
        https_pool = HTTPSConnectionPool(self.host,
                                         self.port,
                                         timeout=timeout,
                                         cert_reqs='CERT_REQUIRED')
        https_pool.ca_certs = DEFAULT_CA
        https_pool.assert_fingerprint = 'CC:45:6A:90:82:F7FF:C0:8218:8e:' \
                                        '7A:F2:8A:D7:1E:07:33:67:DE'
        url = '/sleep?seconds=0.005'
        self.assertRaises(ReadTimeoutError, https_pool.request, 'GET', url)

        timeout = Timeout(total=None)
        https_pool = HTTPSConnectionPool(self.host,
                                         self.port,
                                         timeout=timeout,
                                         cert_reqs='CERT_NONE')
        https_pool.request('GET', '/')
Exemplo n.º 5
0
def check_vul(url):
    """
    Test if a GET to a URL is successful
    :param url: The URL to test
    :return: A dict with the exploit type as the keys, and the HTTP status code as the value
    """
    if gl_args.mode == 'auto-scan' or gl_args.mode == 'file-scan':
        timeout = Timeout(connect=1.0, read=3.0)
        pool = PoolManager(timeout=timeout, retries=1, cert_reqs='CERT_NONE')
    else:
        timeout = Timeout(connect=3.0, read=6.0)
        pool = PoolManager(timeout=timeout, cert_reqs='CERT_NONE')

    url_check = parse_url(url)
    if '443' in str(url_check.port) and url_check.scheme != 'https':
        url = "https://"+str(url_check.host)+":"+str(url_check.port)

    print(GREEN + "\n ** Checking Host: %s **\n" % url)

    headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
               "Connection": "keep-alive",
               "User-Agent": user_agents[randint(0, len(user_agents) - 1)]}

    paths = {"jmx-console": "/jmx-console/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo",
             "web-console" 	: "/web-console/ServerInfo.jsp",
             "JMXInvokerServlet": "/invoker/JMXInvokerServlet",
             "admin-console" : "/admin-console/"}

    for i in paths.keys():
        if gl_interrupted: break
        try:
            print(GREEN + " * Checking %s: \t" % i + ENDC),
            r = pool.request('HEAD', url +str(paths[i]), redirect=False, headers=headers)
            paths[i] = r.status

            # check if it's false positive
            if len(r.getheaders()) == 0:
                print(RED + "[ ERROR ]\n * The server %s is not an HTTP server.\n" % url + ENDC)
                paths = {"jmx-console": 505,
                         "web-console": 505,
                         "JMXInvokerServlet": 505,
                         "admin-console": 505}
                break


            if paths[i] in (301, 302, 303, 307, 308):
                url_redirect = r.get_redirect_location()
                print(GREEN + "[ REDIRECT ]\n * The server sent a redirect to: %s\n" % url_redirect)
            elif paths[i] == 200 or paths[i] == 500:
                if i == "admin-console":
                    print(RED + "[ EXPOSED ]" + ENDC)
                else:
                    print(RED + "[ VULNERABLE ]" + ENDC)
            else:
                print(GREEN + "[ OK ]")
        except:
            print(RED + "\n * An error occurred while connecting to the host %s\n" % url + ENDC)
            paths[i] = 505

    return paths
Exemplo n.º 6
0
    def test_timeout_elapsed(self, current_time):
        current_time.return_value = TIMEOUT_EPOCH
        timeout = Timeout(total=3)
        self.assertRaises(TimeoutStateError, timeout.get_connect_duration)

        timeout.start_connect()
        self.assertRaises(TimeoutStateError, timeout.start_connect)

        current_time.return_value = TIMEOUT_EPOCH + 2
        self.assertEqual(timeout.get_connect_duration(), 2)
        current_time.return_value = TIMEOUT_EPOCH + 37
        self.assertEqual(timeout.get_connect_duration(), 37)
Exemplo n.º 7
0
 def test_enhanced_ssl_connection(self):
     conn = VerifiedHTTPSConnection(self.host, self.port)
     https_pool = HTTPSConnectionPool(self.host, self.port,
                                      timeout=Timeout(total=None, connect=5),
                                      cert_reqs='CERT_REQUIRED')
     https_pool.ca_certs = DEFAULT_CA
     https_pool.assert_fingerprint = 'CC:45:6A:90:82:F7FF:C0:8218:8e:' \
                                     '7A:F2:8A:D7:1E:07:33:67:DE'
     https_pool._make_request(conn, 'GET', '/')
Exemplo n.º 8
0
def sqli_checker(site):
    global enable_proxy, http, proxy_list
    error1 = "You have an error in your SQL syntax"
    error2 = "Warning: mysql_fetch_array()"
    if "=" in site:
        try:
            if enable_proxy == "y" or enable_proxy == "Y":
                try:
                    proxy = urllib3.ProxyManager(random.choice(proxy_list),
                                                 headers=header,
                                                 cert_reqs=False)
                    send = proxy.request("GET",
                                         str(site) + "'",
                                         retries=Retry(4),
                                         timeout=Timeout(5))
                except urllib3.exceptions:
                    send = http.request("GET",
                                        str(site) + "'",
                                        retries=Retry(4),
                                        timeout=Timeout(5))
            else:
                send = http.request("GET",
                                    str(site) + "'",
                                    retries=Retry(4),
                                    timeout=Timeout(5))

            if bytes(error1, encoding="utf-8") in send.data:
                print(Fore.GREEN + str(site) + " seems vulnerable!")
                injectable_url = open("sqli.txt", 'a')
                injectable_url.write(str(site) + "\n")
                injectable_url.close()
            elif bytes(error2, encoding="utf-8") in send.data:
                print(Fore.GREEN + str(site) + " seems vulnerable!")
                injectable_url = open("sqli.txt", 'a')
                injectable_url.write(str(site) + "\n")
                injectable_url.close()
            else:
                print(Fore.RED + str(site) + " not vulnerable!")

        except urllib3.exceptions:
            print(Fore.YELLOW + "\n[!] Exception: " + str(site))
    else:
        print(Fore.YELLOW + "Skipping " + site + "")
Exemplo n.º 9
0
    def __init__(
        self,
        host: str,
        timeout: Optional[Union[float, Timeout]] = None,
    ) -> None:
        if isinstance(timeout, float):
            timeout = Timeout.from_float(timeout)

        super().__init__("localhost", timeout=timeout, retries=10)
        self.host = host
Exemplo n.º 10
0
 def test_tunnel(self):
     """ test the _tunnel behavior """
     timeout = Timeout(total=None)
     https_pool = HTTPSConnectionPool(self.host, self.port, timeout=timeout,
                                      cert_reqs='CERT_NONE')
     conn = https_pool._new_conn()
     try:
         conn.set_tunnel(self.host, self.port)
     except AttributeError: # python 2.6
         conn._set_tunnel(self.host, self.port)
     conn._tunnel = mock.Mock()
     https_pool._make_request(conn, 'GET', '/')
     conn._tunnel.assert_called_once_with()
Exemplo n.º 11
0
    def _get_config(self, instance):
        host = instance.get('neo4j_url', '')
        port = int(instance.get('port', 7474))
        user = instance.get('user', '')
        password = str(instance.get('password', ''))
        connect_timeout = instance.get('connect_timeout')
        server_name = instance.get('server_name', '')

        timeout = None
        if connect_timeout:
            timeout = Timeout(connect=connect_timeout)

        return host, port, user, password, timeout, server_name
Exemplo n.º 12
0
    def test_timeout_elapsed(self, current_time):
        current_time.return_value = TIMEOUT_EPOCH
        timeout = Timeout(total=3)
        self.assertRaises(TimeoutStateError, timeout.get_connect_duration)

        timeout.start_connect()
        self.assertRaises(TimeoutStateError, timeout.start_connect)

        current_time.return_value = TIMEOUT_EPOCH + 2
        self.assertEqual(timeout.get_connect_duration(), 2)
        current_time.return_value = TIMEOUT_EPOCH + 37
        self.assertEqual(timeout.get_connect_duration(), 37)
Exemplo n.º 13
0
    def test_timeout(self, current_time):
        timeout = Timeout(total=3)

        # make 'no time' elapse
        timeout = self._make_time_pass(seconds=0,
                                       timeout=timeout,
                                       time_mock=current_time)
        self.assertEqual(timeout.read_timeout, 3)
        self.assertEqual(timeout.connect_timeout, 3)

        timeout = Timeout(total=3, connect=2)
        self.assertEqual(timeout.connect_timeout, 2)

        timeout = Timeout()
        self.assertEqual(timeout.connect_timeout, Timeout.DEFAULT_TIMEOUT)

        # Connect takes 5 seconds, leaving 5 seconds for read
        timeout = Timeout(total=10, read=7)
        timeout = self._make_time_pass(seconds=5,
                                       timeout=timeout,
                                       time_mock=current_time)
        self.assertEqual(timeout.read_timeout, 5)

        # Connect takes 2 seconds, read timeout still 7 seconds
        timeout = Timeout(total=10, read=7)
        timeout = self._make_time_pass(seconds=2,
                                       timeout=timeout,
                                       time_mock=current_time)
        self.assertEqual(timeout.read_timeout, 7)

        timeout = Timeout(total=10, read=7)
        self.assertEqual(timeout.read_timeout, 7)

        timeout = Timeout(total=None, read=None, connect=None)
        self.assertEqual(timeout.connect_timeout, None)
        self.assertEqual(timeout.read_timeout, None)
        self.assertEqual(timeout.total, None)

        timeout = Timeout(5)
        self.assertEqual(timeout.total, 5)
Exemplo n.º 14
0
def dorker(dork):
    global dorks, enable_proxy, http, proxy_list, dorker_urls
    print(Fore.RESET + "\nDORK: " + str(dork))
    for pages in range(1, 16):
        f = open(result_name, "a", encoding="utf=8")

        # -- Search-results --------------------------------------------------------------------------------------------
        print(Fore.RESET + "Search-results:")
        if enable_proxy == "y" or enable_proxy == "Y":
            try:
                proxy = urllib3.ProxyManager(random.choice(proxy_list),
                                             headers=header,
                                             cert_reqs=False)
                send1 = proxy.request('GET',
                                      "http://www1.search-results.com/web?q=" +
                                      dork + "&page=" + str(pages),
                                      retries=Retry(3),
                                      timeout=Timeout(5))
            except urllib3.exceptions:
                send1 = http.request("GET",
                                     "http://www1.search-results.com/web?q=" +
                                     dork + "&page=" + str(pages),
                                     retries=Retry(3),
                                     timeout=Timeout(5))
        else:
            send1 = http.request("GET",
                                 "http://www1.search-results.com/web?q=" +
                                 dork + "&page=" + str(pages),
                                 retries=Retry(3),
                                 timeout=Timeout(5))

        try:
            parsing1 = BeautifulSoup(send1.data.decode('utf-8'),
                                     features="html.parser")
        except Exception as ex:
            print(Fore.YELLOW + "Error:\n" + str(ex) + "Trying latin-1...")
            parsing1 = BeautifulSoup(send1.data.decode('latin-1'),
                                     features="html.parser")

        for data in parsing1.find_all("cite"):
            print(Fore.RESET + data.string)
            # f.write(data.string + "\n")
            if str(data.string) not in dorker_urls:
                dorker_urls.append(str(data.string))

        # -- Auone -----------------------------------------------------------------------------------------------------
        print(Fore.RESET + "Auone:")
        if enable_proxy == "y" or enable_proxy == "Y":
            try:
                proxy = urllib3.ProxyManager(random.choice(proxy_list),
                                             headers=header,
                                             cert_reqs=False)
                send2 = proxy.request("GET",
                                      "https://search.auone.jp/?q=" + dork +
                                      "&ie=UTF-8&page=" + str(pages),
                                      retries=Retry(3),
                                      timeout=Timeout(5))
            except urllib3.exceptions:
                send2 = http.request("GET",
                                     "https://search.auone.jp/?q=" + dork +
                                     "&ie=UTF-8&page=" + str(pages),
                                     retries=Retry(3),
                                     timeout=Timeout(5))
        else:
            send2 = http.request("GET",
                                 "https://search.auone.jp/?q=" + dork +
                                 "&ie=UTF-8&page=" + str(pages),
                                 retries=Retry(3),
                                 timeout=Timeout(5))

        try:
            parsing2 = BeautifulSoup(send2.data.decode('utf-8'),
                                     features="html.parser")
        except Exception as ex:
            print(Fore.YELLOW + "Error:\n" + str(ex) + "Trying latin-1...")
            parsing2 = BeautifulSoup(send2.data.decode('latin-1'),
                                     features="html.parser")
        for data in parsing2.find_all(
                "h2", class_="web-Result__site u-TextEllipsis"):
            for url in data.find_all("a"):
                print(Fore.RESET + str(url.get('href')))
                # f.write(url.get('href') + "\n")
                if str(url.get('href')) not in dorker_urls:
                    dorker_urls.append(str(url.get('href')))

        # -- Qwant -----------------------------------------------------------------------------------------------------
        print(Fore.RESET + "Qwant:")
        if enable_proxy == "y" or enable_proxy == "Y":
            try:
                proxy = urllib3.ProxyManager(random.choice(proxy_list),
                                             headers=header,
                                             cert_reqs=False)
                send3 = proxy.request("GET",
                                      "https://lite.qwant.com/?q=" + dork +
                                      "&p=" + str(pages),
                                      retries=Retry(4),
                                      timeout=Timeout(5))
            except urllib3.exceptions:
                send3 = http.request("GET",
                                     "https://lite.qwant.com/?q=" + dork +
                                     "&p=" + str(pages),
                                     retries=Retry(4),
                                     timeout=Timeout(5))
        else:
            send3 = http.request("GET",
                                 "https://lite.qwant.com/?q=" + dork + "&p=" +
                                 str(pages),
                                 retries=Retry(4),
                                 timeout=Timeout(5))

        try:
            parsing3 = BeautifulSoup(send3.data.decode('utf-8'),
                                     features="html.parser")
        except Exception as ex:
            print("Error:\n" + str(ex) + "Trying latin-1...")
            parsing3 = BeautifulSoup(send3.data.decode('latin-1'),
                                     features="html.parser")
        for data in parsing3.find_all("p", class_="url"):
            print(str(data.string).replace(" ", ""))
            # f.write(str(data.string).replace(" ", "") + "\n")
            if str(data.string).replace(" ", "") not in dorker_urls:
                dorker_urls.append(str(data.string).replace(" ", ""))

        # -- Lilo ------------------------------------------------------------------------------------------------------
        print(Fore.RESET + "Lilo:")
        if enable_proxy == "y" or enable_proxy == "Y":
            try:
                proxy = urllib3.ProxyManager(random.choice(proxy_list),
                                             headers=header,
                                             cert_reqs=False)
                send4 = proxy.request("GET",
                                      "https://search.lilo.org/?q=" + dork +
                                      "&date=All&page=" + str(pages),
                                      retries=Retry(4),
                                      timeout=Timeout(5))
            except urllib3.exceptions:
                send4 = http.request("GET",
                                     "https://search.lilo.org/?q=" + dork +
                                     "&date=All&page=" + str(pages),
                                     retries=Retry(4),
                                     timeout=Timeout(5))
        else:
            send4 = http.request("GET",
                                 "https://search.lilo.org/?q=" + dork +
                                 "&date=All&page=" + str(pages),
                                 retries=Retry(4),
                                 timeout=Timeout(5))

        try:
            parsing4 = BeautifulSoup(send4.data.decode('utf-8'),
                                     features="html.parser")
        except Exception as ex:
            print(Fore.YELLOW + "Error:\n" + str(ex) + "Trying latin-1...")
            parsing4 = BeautifulSoup(send4.data.decode('latin-1'),
                                     features="html.parser")
        for data in parsing4.find_all("a", class_="resulturl d-block"):
            print(Fore.RESET + str(data.get("href")))
            # f.write(data.get("href") + "\n")
            if str(data.get("href")) not in dorker_urls:
                dorker_urls.append(str(data.get("href")))

        # -- Mywebsearch -----------------------------------------------------------------------------------------------
        print(Fore.RESET + "Mywebsearch:")
        if enable_proxy == "y" or enable_proxy == "Y":
            try:
                proxy = urllib3.ProxyManager(random.choice(proxy_list),
                                             headers=header,
                                             cert_reqs=False)
                send5 = proxy.request(
                    "GET",
                    "https://int.search.mywebsearch.com/mywebsearch/GGmain.jhtml?searchfor="
                    + dork + "&pn=" + str(pages),
                    retries=Retry(4),
                    timeout=Timeout(5))
            except urllib3.exceptions:
                send5 = http.request(
                    "GET",
                    "https://int.search.mywebsearch.com/mywebsearch/GGmain.jhtml?searchfor="
                    + dork + "&pn=" + str(pages),
                    retries=Retry(4),
                    timeout=Timeout(5))
        else:
            send5 = http.request(
                "GET",
                "https://int.search.mywebsearch.com/mywebsearch/GGmain.jhtml?searchfor="
                + dork + "&pn=" + str(pages),
                retries=Retry(4),
                timeout=Timeout(5))

        try:
            parsing5 = BeautifulSoup(send5.data.decode('utf-8'),
                                     features="html.parser")
        except Exception as ex:
            print("Error:\n" + str(ex) + "Trying latin-1...")
            parsing5 = BeautifulSoup(send5.data.decode('latin-1'),
                                     features="html.parser")
        for data in parsing5.find_all("cite"):
            print(Fore.RESET + str(data.string))
            # f.write(data.string + "\n")
            if str(data.string) not in dorker_urls:
                dorker_urls.append(str(data.string))

        f.close()
Exemplo n.º 15
0
    exit(0)

try:
    import ipaddress
except:
    print(
        RED1 + BOLD +
        "\n * Package ipaddress not installed. Please install the dependencies before continue.\n"
        "" + GREEN + "   Example: \n"
        "   # pip install -r requires.txt\n" + ENDC)
    exit(0)

from urllib3 import PoolManager
from urllib3.util import Timeout

timeout = Timeout(connect=3.0, read=6.0)
pool = PoolManager(timeout=timeout, cert_reqs='CERT_NONE')

global gl_interrupted
gl_interrupted = False

user_agents = [
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0",
    "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
    "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9",
    "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
    "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)",
    "Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.1)",
    "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727)",
Exemplo n.º 16
0
def lfi_checker(url_list):
    global sites, payload, enable_proxy, http, proxy_list
    if "=" in url_list:
        if not str(url_list).startswith("https://cve.mitre.org") and not str(
                url_list).startswith("http://cve.mitre.org"):
            site = url_list.split("=")
            number_of_parameters = len(site)
            # 1
            if number_of_parameters == 2:
                try:
                    print(Fore.RESET + "Trying " + site[0] + "=PAYLOAD")
                    for exploit in payload:

                        # Request with payload
                        if enable_proxy == "y" or enable_proxy == "Y":
                            try:
                                proxy = urllib3.ProxyManager(
                                    random.choice(proxy_list),
                                    headers=header,
                                    cert_reqs=False)
                                http_request1 = proxy.request(
                                    "GET",
                                    str(site[0]) + "=" + exploit,
                                    retries=Retry(4),
                                    timeout=Timeout(9))
                            except urllib3.exceptions:
                                http_request1 = http.request(
                                    "GET",
                                    str(site[0]) + "=" + exploit,
                                    retries=Retry(4),
                                    timeout=Timeout(9))
                        else:
                            http_request1 = http.request("GET",
                                                         str(site[0]) + "=" +
                                                         exploit,
                                                         retries=Retry(4),
                                                         timeout=Timeout(9))
                        http_response1 = str(http_request1.data)

                        if "root:" in http_response1:
                            print(Fore.GREEN + "[+] Vulnerable URL: " +
                                  site[0] + "=" + exploit)
                            f = open("lfi.txt", "a")
                            f.write(site[0] + "=" + exploit + "\n")
                            f.close()
                except urllib3.exceptions:
                    print(Fore.YELLOW + "\n[!] Exception: " + str(url_list))

            # 2
            elif number_of_parameters == 3:
                try:
                    print(Fore.RESET + "Trying " +
                          str(url_list.split("&")[0]) + "&" +
                          str(url_list.split("&")[1].split("=")[0]) +
                          "=PAYLOAD")
                    for exploit in payload:

                        # Request with payload
                        if enable_proxy == "y" or enable_proxy == "Y":
                            try:
                                proxy = urllib3.ProxyManager(
                                    random.choice(proxy_list),
                                    headers=header,
                                    cert_reqs=False)
                                http_request2_1 = proxy.request(
                                    "GET",
                                    str(url_list.split("&")[0]) + "&" +
                                    str(url_list.split("&")[1].split("=")[0]) +
                                    "=" + exploit,
                                    retries=Retry(4),
                                    timeout=Timeout(9))
                            except urllib3.exceptions:
                                http_request2_1 = http.request(
                                    "GET",
                                    str(url_list.split("&")[0]) + "&" +
                                    str(url_list.split("&")[1].split("=")[0]) +
                                    "=" + exploit,
                                    retries=Retry(4),
                                    timeout=Timeout(9))
                        else:
                            http_request2_1 = http.request(
                                "GET",
                                str(url_list.split("&")[0]) + "&" +
                                str(url_list.split("&")[1].split("=")[0]) +
                                "=" + exploit,
                                retries=Retry(4),
                                timeout=Timeout(9))
                        http_response2_1 = str(http_request2_1.data)

                        if "root:" in http_response2_1:
                            print(Fore.GREEN + "[+] Vulnerable URL: " +
                                  str(url_list.split("&")[0]) + "&" +
                                  str(url_list.split("&")[1].split("=")[0]) +
                                  "=" + exploit)
                            f = open("lfi.txt", "a")
                            f.write(
                                str(url_list.split("&")[0]) + "&" +
                                str(url_list.split("&")[1].split("=")[0]) +
                                "=" + exploit + "\n")
                            f.close()

                        # Request with payload
                        print(
                            str(url_list.split("&")[0].split("=")[0]) + "=" +
                            exploit + "&" + str(url_list.split("&")[1]))
                        http_request2_2 = http.request(
                            "GET",
                            str(url_list.split("&")[0].split("=")[0]) + "=" +
                            exploit + "&" + str(url_list.split("&")[1]),
                            retries=Retry(4),
                            timeout=Timeout(9))
                        http_response2_2 = str(http_request2_2.data)
                        if "root:" in http_response2_2:
                            print(Fore.GREEN + "[+] Vulnerable URL: " +
                                  str(url_list.split("&")[0]) + "&" +
                                  str(url_list.split("&")[1].split("=")[0]) +
                                  "=" + exploit)
                            f = open("lfi.txt", "a")
                            f.write(
                                str(url_list.split("&")[0]) + "&" +
                                str(url_list.split("&")[1].split("=")[0]) +
                                "=" + exploit + "\n")
                            f.close()
                except urllib3.exceptions:
                    print(Fore.YELLOW + "\n[!] Exception: " + str(url_list))

            elif number_of_parameters > 4:
                try:
                    print(Fore.RESET + "Trying " + site[0] + "=PAYLOAD")
                    for exploit in payload:

                        # Request with payload
                        if enable_proxy == "y" or enable_proxy == "Y":
                            try:
                                proxy = urllib3.ProxyManager(
                                    random.choice(proxy_list),
                                    headers=header,
                                    cert_reqs=False)
                                http_request3_1 = proxy.request(
                                    "GET",
                                    str(site[0]) + "=" + exploit,
                                    retries=Retry(4),
                                    timeout=Timeout(9))
                            except urllib3.exceptions:
                                http_request3_1 = http.request(
                                    "GET",
                                    str(site[0]) + "=" + exploit,
                                    retries=Retry(4),
                                    timeout=Timeout(9))
                        else:
                            http_request3_1 = http.request("GET",
                                                           str(site[0]) + "=" +
                                                           exploit,
                                                           retries=Retry(4),
                                                           timeout=Timeout(9))
                        http_request3_1 = str(http_request3_1.data)

                        if "root:" in http_request3_1:
                            print(Fore.GREEN + "[+] Vulnerable URL: " +
                                  site[0] + "=" + exploit)
                            f = open("lfi.txt", "a")
                            f.write(site[0] + "=" + exploit + "\n")
                            f.close()
                except urllib3.exceptions:
                    print(Fore.YELLOW + "\n[!] Exception: " + str(url_list))

            else:
                pass
Exemplo n.º 17
0
    def fetch_url(self,
                  url,
                  user_agent,
                  timeout,
                  limit_len=True,
                  add_headers=dict()):
        """ Fetch a given url, with a given user_agent and timeout"""
        response = None
        try:
            if not add_headers.get('User-Agent'):
                add_headers['User-Agent'] = user_agent
            if not add_headers.get('Connection'):
                add_headers['Connection'] = 'Keep-Alive'
            if not add_headers.get('Host'):
                add_headers['Host'] = conf.target_host

            # Session cookie, priority to used-supplied.
            if conf.cookies:
                add_headers['Cookie'] = conf.cookies
            elif database.session_cookie:
                add_headers['Cookie'] = database.session_cookie

            # Limit request len on binary types
            if limit_len:
                content_range = 'bytes=0-' + str(conf.file_sample_len - 1)
                add_headers['Range'] = content_range
            else:
                if 'Range' in add_headers:
                    del add_headers['Range']

            if conf.proxy_url:
                url = conf.scheme + '://' + conf.target_host + ':' + str(
                    conf.target_port) + url
                textutils.output_debug(url)

            if conf.is_ssl:
                database.connection_pool.ConnectionCls = UnverifiedHTTPSConnection

            # Dynamic timeout
            request_timeout = Timeout(connect=timeout, read=timeout)

            response = database.connection_pool.request(
                'GET',
                url,
                headers=add_headers,
                retries=0,
                redirect=False,
                release_conn=True,
                assert_same_host=False,
                timeout=request_timeout,
                preload_content=False)

            content = response.data
            code = response.status
            headers = response.headers
            response.release_conn()  # return the connection back to the pool
        except Exception as e:
            code = 0
            content = ''
            headers = dict()

        return code, content, headers
Exemplo n.º 18
0
 def test_timeout_str(self):
     timeout = Timeout(connect=1, read=2, total=3)
     self.assertEqual(str(timeout), "Timeout(connect=1, read=2, total=3)")
     timeout = Timeout(connect=1, read=None, total=3)
     self.assertEqual(str(timeout),
                      "Timeout(connect=1, read=None, total=3)")
Exemplo n.º 19
0
class SyncSender(BaseSender):
    """
    Sender based on urllib3 for the Ingestion service.

    :param base_url: (optional) URL of the Ingestion service.
    :param proxy_url: (optional) URL of a Proxy server.
    :param headers: (optional) Headers to send with all requests.
    :param json_encoder: (optional) JSON serializer for data to be sent.
    """

    max_pool_size = 1
    retry_policy = Retry(
        total=3,
        method_whitelist=False,
        status_forcelist={500, 502, 503, 504, 408},
        raise_on_status=True,
    )
    timeout_policy = Timeout(connect=10, read=10)

    def __init__(self,
                 base_url=None,
                 proxy_url=None,
                 headers={},
                 json_encoder=None):
        # type: (Optional[str], Optional[str], Mapping[str, str], Optional[Type[json.JSONEncoder]]) -> None
        base_headers = util.make_headers(keep_alive=True, accept_encoding=True)
        base_headers.update(headers)
        super(SyncSender, self).__init__(base_url=base_url,
                                         proxy_url=proxy_url,
                                         headers=base_headers,
                                         json_encoder=json_encoder)

        options = dict(
            block=True,
            maxsize=self.max_pool_size,
        )
        if self.proxy_url is not None:
            self.pool_manager = poolmanager.ProxyManager(
                self.proxy_url, **options)  # type: poolmanager.PoolManager
        else:
            self.pool_manager = poolmanager.PoolManager(**options)

    def _url(self, endpoint):
        return urlparse.urljoin(self.base_url, endpoint, allow_fragments=False)

    def send(self, endpoint, data, headers={}, **kwargs):
        # type: (str, Union[AnySignal, Batch], Mapping[str, str], **Any) -> None
        assert self.pool_manager is not None
        body = self.serialize_data(data)
        request_headers = dict(self.headers)
        request_headers["Content-Type"] = "application/json"
        request_headers.update(headers)
        url = self._url(endpoint)
        response = self.pool_manager.urlopen("POST",
                                             url,
                                             body=body,
                                             headers=request_headers,
                                             preload_content=True,
                                             release_conn=True,
                                             redirect=False,
                                             retries=self.retry_policy,
                                             timeout=self.timeout_policy,
                                             **kwargs)
        return self.handle_response(response)

    def close(self):  # type: () -> None
        self.pool_manager.clear()
        del self.pool_manager