def do_proxy_test(test_list): test_size = 1000 batch_list = [] final_return_list = [] total = len(test_list) judges = [] jfile = [] f =open("judges.txt") for line in f: jfile.append(line.strip()) f.close() jtest = judgestest.judgesTest() judges = jtest.run_test(jfile) while 1: if len(test_list) > test_size: batch_list.append(test_list[:test_size]) del test_list[:test_size] else: if len(test_list) > 0: batch_list.append(test_list[:]) break print "" print "Test params" print "Testing ", total, "total proxies." a = 0 x = 0 theVar = 0 number_of_batches = len(batch_list) for batch in batch_list: batch_number = batch_list.index(batch) print "* batch", batch_number, " is ", len(batch_list[batch_number]), "proxies long" for batch in batch_list: batch_number = batch_list.index(batch) print strftime("%a, %d %b %Y %H:%M:%S", time.localtime()),"\n", print "* Testing batch #", batch_number, "of", number_of_batches, "which contains", len(batch_list[batch_number]), "proxies." proxy_test = proxytest.Proxytest() return_list = proxy_test.run_test(batch, judges) final_return_list.extend(return_list) if batch_number % 10 == 0 and batch_number != 0: try: jtest = judgestest.judgesTest() judges = jtest.run_test(jfile) except: print "Error running judges test" pass update_database(final_return_list) return final_return_list
def run_test(self, upstream): global plist global threadPool global rejects global working global judges global x global theVar global block_list global renig_judges start = time.time() threadlist = [] judges[:] = [] plist[:] = upstream test_list = [] theVar = 0 x = 1 threadPool = Queue.Queue () with open("block_list.txt") as f: for line in f: block_list.append(line.strip()) with open("judges.txt") as f: for line in f: judges.append(line.strip()) if not renig_judges: jtest = judgestest.judgesTest() judges = jtest.run_test(judges) print "Booting up proxy test" g = 0 for proxy in plist: if proxy.alive == True or proxy.last_checked == "never": g += 1 threadPool.put(proxy) print "* Testing ", g, " proxies." while(theVar < g): if (x < pool_size): t = Proxytest() threadlist.append(t) theVar = theVar + 1 t.start() x += 1 for t in threadlist: t.join() print "Elapsed Time: %s" % (time.time() - start) x = 0 return plist;
def main(): judge_urls = [] scrape = Judge_Scrape() judge_urls = scrape.google_crawl() f = open("judges.txt","r") for result in f: if result.find(".php") > -1: result = result[0:result.find(".php")+4] if result.find(".cgi") > -1: result = result[0:result.find(".cgi")+4] if result.find(".html") > -1: result = result[0:result.find(".html")+5] judge_urls.append(result.strip("\n")) judge_urls = scrape.sort_by_url(judge_urls) test = judgestest.judgesTest() judge_urls = test.run_test(judge_urls) f = open("judges.txt","w") for line in judge_urls: f.write(str(line) + '\n') f.close() print "Wrote", len(judge_urls), "judges to text file."
output.append(proxy) output = sorted(output, key=attrgetter('resp')) for proxy in output: if not proxy.is_dead(): test_list.append(proxy) judges = [] jfile = [] f =open("judges.txt") for line in f: jfile.append(line.strip()) f.close() jtest = judgestest.judgesTest() judges = jtest.run_test(jfile) proxy_test = proxytest.Proxytest() test_result = proxy_test.run_test(test_list, judges) with open("/home/sleven/.mozilla/firefox/fqxkj6v8.default/foxyproxy.xml") as f: bak = f.read() for a in test_result: if a.alive and a.safe: print proxy, proxy.get_flags_bin() """Get some stats""" working_proxies = 0 dead = 0
def run_test(self, test_list, max_to_return=-1): global plist global threadPool global block_list global renig_judges global pool_size global chan_mode global working_proxies global max_return_proxies global proxy max_return_proxies = max_to_return start = time.time() judges = [] update_freq = 1000 all = [] test_size = 10000 print "CHAN_MODE: ", chan_mode f = open("block_list.txt") for line in f: block_list.append(line.strip()) f.close() f =open("judges.txt") for line in f: judges.append(line.strip()) f.close() jtest = judgestest.judgesTest() judges = jtest.run_test(judges) for j in judges: judgePool.put(j) print "Booting up proxy test" g = 0 i = -1 batch = [] return_batch = [] temp = [] v = 0 total = len(test_list) if pool_size > total: pool_size = total while 1: if len(test_list) > test_size: batch.append(test_list[:test_size]) del test_list[:test_size] else: if len(test_list) > 0: batch.append(test_list[:]) break print "Test params" print "Testing ", total, "total proxies." a = 0 for i in batch: print "* batch", a, " is ", len(i), "proxies long" a += 1 g = 0 for i in batch: batch_num = batch.index(i) while len(batch[batch_num]) > 0: print "new pool" print "len batch", len(batch[batch_num]) for u in range(pool_size): try: threadPool.put(batch[batch_num][0]) batch[batch_num].remove(batch[batch_num][0]) u+= 1 except Exception, e: print e pass try: for d in range(pool_size): self.run_in_thread(working_proxies, chan_mode) except Exception, e: print e
def main(): all = [] test_list = [] print "importing..." try: s = select([main_tbl]) conn = engine.connect() result = conn.execute(s) except: print "shit." sys.exit() chan_mode = False result = sorted(result, key=attrgetter('resp')) for row in result: new_proxy = proxy.Proxy(row.ip, row.port, row.location, row.hostname, row.alive, row.safe, row.last_checked, row.url, row.resp, row.good, row.bad, row.flags_bin) if new_proxy.alive and new_proxy.location.find("US") == -1 and new_proxy.location.find("CA") == -1 and new_proxy.location.find("GB") == -1: test_list.append(new_proxy) judges = [] jfile = [] block_list = [] judge_identifier = [] f =open("judges.txt") for line in f: jfile.append(line.strip()) f.close() f = open("block_list.txt") for line in f: block_list.append(line.strip()) f.close() f = open("judge_identifier.txt") for line in f: judge_identifier.append(line.strip()) f.close() jtest = judgestest.judgesTest() judges = jtest.run_test(jfile) proxy_test = proxytest.Proxytest() test_result = proxy_test.run_test(test_list, judges, judge_identifier, block_list) with open("/home/sleven/.mozilla/firefox/4lnqadx1.default/foxyproxy.xml") as f: bak = f.read() for a in test_result: if a.alive and a.safe and a.last_checked != "never": all.append(a) try: if (len(all) > 0): fp = foxyProxy() all = sorted(all, key=attrgetter('resp')) all = all[0:20] fp.execute(all) else: print "No good proxies to add" except Exception, e: with open("/home/sleven/.mozilla/firefox/4lnqadx1.default/foxyproxy.xml","w") as f: f.write(bak) print "Wrote backup." print e