Exemple #1
0
def get_page(url):
    headers = {'User-Agent': myAgents.get_agent()}
    try:
        req = requests.get(url, headers=headers, timeout=6)
        time.sleep(random.randint(2, 4))
        if req.status_code == 200:
            return req.text
    except Exception as e:
        print('ydl 连接失败', e)
Exemple #2
0
def with_handler(url):
    headers = {'User-Agent': myAgents.get_agent()}
    proxy = {'http': ''}
    cookiejar = http.cookiejar.CookieJar()
    proxy_handler = urllib.request.ProxyHandler(proxies=proxy)
    cookies_handler = urllib.request.HTTPCookieProcessor(cookiejar)
    opener = urllib.request.build_opener(cookies_handler, proxy_handler)
    req = urllib.request.Request(url, headers=headers)
    res = opener.open(req, timeout=6.66)
    return res
def get_page(url):
    headers = {'User-Agent': myAgents.get_agent(),
               'Referer': 'http://www.yun-daili.com/free.asp?stype=1&page=1'}
    try:
        req = requests.get(url, headers=headers, timeout=6)
        time.sleep(random.randint(2, 4))
        if req.status_code == 200:
            return req.text
    except Exception as e:
        print('xiaoma 连接失败', e)
Exemple #4
0
def get_page(url):
    headers = {'User-Agent': myAgents.get_agent()}
    req = urllib.request.Request(url, headers=headers)
    time.sleep(random.randint(2, 4))
    try:
        res = urllib.request.urlopen(req, timeout=6)
        if res.status == 200:
            return res.read().decode()
    except Exception as e:
        print('xcdl 连接失败', e)
Exemple #5
0
def request_baidu(ip_tup):
    url = 'https://www.baidu.com/'
    proxies = {
        'https': 'https://' + ip_tup[0] + ':' + ip_tup[1],
        'http': 'http://' + ip_tup[0] + ':' + ip_tup[1]
    }
    headers = {'User-Agent': get_agent()}
    try:
        time.sleep(random.randint(2, 3))
        with requests.get(url, headers=headers, timeout=6.66,
                          proxies=proxies) as request:
            return request.text
    except Exception as e:
        print('*****', e)
Exemple #6
0
def get_page(url):
    headers = {
        'User-Agent': myAgents.get_agent(),
        'Host': 'www.66ip.cn',
        'Referer': 'http://www.66ip.cn/4.html'
    }
    req = urllib.request.Request(url, headers=headers)
    time.sleep(random.randint(1, 3))
    try:
        res = urllib.request.urlopen(req, timeout=5)
        if res.status == 200:
            return res.read().decode('gbk')
    except Exception as e:
        print('ip66 连接失败', e)
Exemple #7
0
def get_page(url):
    proxies_list = [
        '182.253.71.227:80', '122.72.18.34:80', '177.136.252.7:3128',
        '182.41.3.223:8118', '182.42.244.205:808'
    ]
    headers = {'User-Agent': myAgents.get_agent()}
    proxy = {'http': random.choice(proxies_list)}
    proxies_handler = urllib.request.ProxyHandler(proxy)
    opener = urllib.request.build_opener(proxies_handler)
    req = urllib.request.Request(url, headers=headers)
    time.sleep(random.randint(2, 4))
    try:
        res = opener.open(req, timeout=6)
        if res.status == 200:
            return res.read().decode()
    except Exception as e:
        print('xcdl 连接失败', e)