コード例 #1
0
def main():
    log_in()
    exit(0)
    time1=time.time()
    mafengwoSession = requests.session()
    # 因为原始的session.cookies 没有save()方法,所以需要用到cookielib中的方法LWPCookieJar,这个类实例化的cookie对象,就可以直接调用save方法。
    mafengwoSession.cookies = cookielib.LWPCookieJar(filename = "myCookies.txt")
    ip_list = ipGenerate.get_ip_list()
    proxies = ipGenerate.get_random_ip(ip_list)
    url="http://jwcas.cczu.edu.cn/login"

    postData = {
        'username': '******',
        'password': '******',
        'lt':" ",
        'execution': " ",
        '_eventId': 'submit',
        'submit': 'LOGIN',
    }
    responseRes = mafengwoSession.post(url, data = postData, headers = headers)
    print(f"statusCode = {responseRes.status_code}")
    mafengwoSession.cookies.save()
    exit (0)
    print(proxies)
    strhtml=requests.get(url,headers=headers,proxies=proxies,timeout=3)
    strhtml.encoding = 'utf-8'
    soup=BeautifulSoup(strhtml.text,'lxml')
    print(soup.contents)
    print(time.time()-time1)
コード例 #2
0
def main():
    time1 = time.time()
    mafengwoSession = requests.session()
    # 因为原始的session.cookies 没有save()方法,所以需要用到cookielib中的方法LWPCookieJar,这个类实例化的cookie对象,就可以直接调用save方法。
    mafengwoSession.cookies = cookielib.LWPCookieJar(filename="myCookies.txt")
    ip_list = ipGenerate.get_ip_list()
    proxies = ipGenerate.get_random_ip(ip_list)
    url = "http://jwcas.cczu.edu.cn/login"
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36'
    }
    postData = {
        'username': '******',
        'password': '******',
        'lt': 'LT-1824810-DWoUNvB73WNmIpfKGbBrZSrPKfCbZ4',
        'execution': 'e1s2',
        '_eventId': 'submit',
        'submit': 'LOGIN',
    }
    responseRes = mafengwoSession.post(url, data=postData, headers=headers)
    print(f"statusCode = {responseRes.status_code}")
    mafengwoSession.cookies.save()
    exit(0)
    print(proxies)
    strhtml = requests.get(url, headers=headers, proxies=proxies, timeout=3)
    strhtml.encoding = 'utf-8'
    soup = BeautifulSoup(strhtml.text, 'lxml')
    print(soup.contents)
    print(time.time() - time1)
コード例 #3
0
def main():
    time1 = time.time()
    ip_list = ipGenerate.get_ip_list()
    proxies = ipGenerate.get_random_ip(ip_list)
    url = 'https://blog.csdn.net/P_e_n_g___/article/details/104201201'
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36',
        'Accept': 'text/html;q=0.9,*/*;q=0.8',
        'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
        'Accept-Encoding': 'gzip',
        'Connection': 'close',
        'Referer': None
    }
    print(proxies)
    strhtml = requests.get(url, headers=headers, proxies=proxies, timeout=3)
    strhtml.encoding = 'utf-8'
    soup = BeautifulSoup(strhtml.text, 'lxml')
    print(soup.contents)
    print(time.time() - time1)