Ejemplo n.º 1
0
 def check_urls_for_queries(self):
     """ The returned URLS will be run through a query regex to see if they have a query parameter
         http://google.com <- False
         http://example.com/php?id=2 <- True """
     filename = settings.create_random_filename()
     LOGGER.info("File being saved to: {}".format(filename))
     with open("{}\\{}.txt".format(settings.DORK_SCAN_RESULTS_PATH, filename), "a+") as results:
         for url in self.connect_to_search_engine():
             match = settings.QUERY_REGEX.match(url)  # Match by regex for anything that has a ?<PARAM>= in it
             if match:
                 results.write(url + "\n")
     amount_of_urls = len(open(settings.DORK_SCAN_RESULTS_PATH + "\\" + filename + ".txt", 'r').readlines())
     success_rate = ((amount_of_urls // 10) + 1) * 10
     return "Found a total of {} usable links with query (GET) parameters, urls have been saved to {}\\{}.txt. " \
            "This Dork has a success rate of {}%".format(amount_of_urls, settings.DORK_SCAN_RESULTS_PATH, filename,
                                                         success_rate)
Ejemplo n.º 2
0
def attempt_to_connect_to_proxies():
    """ Attempted connections to the proxies pulled from the JSON data """
    results = []
    prox_info = connect_and_pull_info()
    for i, proxy in enumerate(prox_info, start=1):
        if prox_info[i]["type"] == "HTTP":
            candidate = "{}://{}:{}".format(prox_info[i]["type"],
                                            prox_info[i]["ip"],
                                            prox_info[i]["port"])
            opener = urllib2.build_opener(urllib2.ProxyHandler({"http": candidate}))
            urllib2.install_opener(opener)
            request = urllib2.Request("http://google.com")
            try:
                start_time = time.time()
                urllib2.urlopen(request, timeout=10)
                stop_time = time.time() - start_time
                LOGGER.info("Successful: {}\n\t\tLatency: {}s\n\t\tOrigin: {}\n\t\tAnonymity: {}\n\t\tType: {}".format(
                    candidate.lower(), stop_time, prox_info[i]["country"],
                    prox_info[i]["anonymity"], prox_info[i]["type"]
                ))
                results.append("http://" + prox_info[i]["ip"] + ":" + prox_info[i]["port"])
            except urllib2.HTTPError:
                pass
            except urllib2.URLError:
                pass
            except socket.timeout:
                pass
            except httplib.BadStatusLine:
                pass
            except socket.error:
                pass
    amount = len(results)
    if amount != 0:
        LOGGER.info("Found a total of {} proxies.".format(len(results)))
        filename = create_random_filename()
        create_dir(PROXY_SCAN_RESULTS)
        with open(PROXY_SCAN_RESULTS + "/" + filename + ".txt", "a+") as res:
            for prox in results:
                res.write(prox + "\n")
        LOGGER.info("Results saved to: {}".format(PROXY_SCAN_RESULTS + "/" + filename + ".txt"))
    else:
        pass