Пример #1
0
def GrabGreyProxy():
    while True:
        harvester = grey_harvest.GreyHarvester()
        Runned = harvester.run()
        for proxy in harvester.run():
            if CheckProxy(proxy) == True and str(proxy) not in CSVToList():
                print('{} added'.format(proxy))
                AddToCSV(proxy)
            else:
                print('Proxy Not Saved')
        print('Started Runned')
Пример #2
0
def GrabGreyProxy():
	while True:
		harvester = grey_harvest.GreyHarvester()
		Runned = harvester.run()
		for proxy in harvester.run():
			print(proxy)
			if CheckProxy(proxy) == True and str(proxy) not in Proxies:
				AddProxyToList(proxy)
				WriteListToFile(Proxies, 'ProxyList.csv')
			else:
				print('Proxy Not Saved')
		print('Started Runned')
Пример #3
0
def gen(num=1):
    proxyList = []
    ''' spawn a harvester '''
    harvester = grey_harvest.GreyHarvester()
    ''' harvest some proxies from teh interwebz '''
    count = 0
    for proxy in harvester.run():
        proxies = {
            "http": str(proxy),
            "https": str(proxy),
        }
        proxyList.append(proxies)
        print proxy
    return proxyList
Пример #4
0
# -*- coding: utf-8 -*-

import grey_harvest  # for scrape available proxies
from pprint import pprint
import json
''' spawn a harvester '''
harvester = grey_harvest.GreyHarvester()
''' harvest some proxies from teh interwebz '''

print "searching for good proxies ip addresses..."

number_of_proxy = 150
count = 0

proxies_list = []

for proxy in harvester.run():
    if proxy['https']:

        proxies_list.append(proxy)

        count += 1
        print(count)
    if count > number_of_proxy:
        break

pprint(proxies_list)

thefile = open('proxies.json', 'w')

json.dump(proxies_list, thefile)
Пример #5
0
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Proxy generator
Check module at s0lst1c3/grey_harvest.
"""

import grey_harvest
import numpy as np

harvester = grey_harvest.GreyHarvester()  #spawn a harvester

#harvest some proxies from teh interwebz
count = 0
proxy_len = 200  #number of proxies to generate
proxy_list = []
for proxy in harvester.run():
    print proxy
    proxy_list.append(proxy)
    count += 1
    if count >= proxy_len:
        break
np.savetxt('proxylist2.txt', proxy_list, fmt='%s')