def freeProxyWallThird(): urls = ['https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-1'] for url in urls: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://list.proxylistplus.com无效') return [] proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r) for proxy in proxies: yield ':'.join(proxy)
def freeProxyWallFirst(): """ 墙外网站 cn-proxy :return: """ urls = ['http://cn-proxy.com/', 'http://cn-proxy.com/archives/218'] for url in urls: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://cn-proxy.com无效') return [] proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\w\W]<td>(\d+)</td>', ) for proxy in proxies: yield ':'.join(proxy)
def freeProxyTen(): """ 云代理 http://www.ip3366.net/free/ :return: """ urls = ['http://www.ip3366.net/free/'] for url in urls: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://www.ip3366.com无效') return [] proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r) for proxy in proxies: yield ":".join(proxy)
def freeProxyNinth(): """ 码农代理 https://proxy.coderbusy.com/ :return: """ urls = ['https://proxy.coderbusy.com/classical/country/cn.aspx?page=1'] for url in urls: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://proxy.coderbusy.com无效') return [] proxies = re.findall('data-ip="(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})".+?>(\d+)</td>', r) for proxy in proxies: yield ':'.join(proxy)
def freeProxySixth(): """ 讯代理 http://www.xdaili.cn/ :return: """ url = 'http://www.xdaili.cn/ipagent/freeip/getFreeIps?page=1&rows=10' try: res = EasyHttp.get(url, timeout=10).json() if not res or not res['RESULT'] or not res['RESULT']['rows']: Log.w('http://www.goubanjia.com无效') return [] for row in res['RESULT']['rows']: yield '{}:{}'.format(row['ip'], row['port']) except Exception as e: pass
def freeProxyWallSecond(): """ https://proxy-list.org/english/index.php :return: """ urls = ['https://proxy-list.org/english/index.php?p=%s' % n for n in range(1, 10)] import base64 for url in urls: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://proxy-list.org/english/index.php无效') return [] proxies = re.findall(r"Proxy\('(.*?)'\)", r) for proxy in proxies: yield base64.b64decode(proxy).decode()
def freeProxyEight(): """ 秘密代理 http://www.mimiip.com """ url_gngao = ['http://www.mimiip.com/gngao/%s' % n for n in range(1, 2)] # 国内高匿 url_gnpu = ['http://www.mimiip.com/gnpu/%s' % n for n in range(1, 2)] # 国内普匿 url_gntou = ['http://www.mimiip.com/gntou/%s' % n for n in range(1, 2)] # 国内透明 url_list = url_gngao + url_gnpu + url_gntou for url in url_list: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://www.mimiip.com无效') return [] proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\w\W].*<td>(\d+)</td>', r) for proxy in proxies: yield ':'.join(proxy)
def freeProxyEleven(): """ IP海 http://www.iphai.com/free/ng :return: """ urls = [ 'http://www.iphai.com/free/ng', 'http://www.iphai.com/free/np', 'http://www.iphai.com/free/wg', 'http://www.iphai.com/free/wp' ] for url in urls: r = EasyHttp.get(url, timeout=10) if not r: Log.w('http://www.iphai.com无效') return [] proxies = re.findall( r'<td>\s*?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s*?</td>[\s\S]*?<td>\s*?(\d+)\s*?</td>', r) for proxy in proxies: yield ":".join(proxy)
def parse(self): content = EasyHttp.get(CityCode.__url, 10) return re.findall(r'([\u4e00-\u9fa5]+)\|([A-Z]+)', content)