예제 #1
0
    def login(self):
        browser_cookies = {}
        if os.path.isfile(COOKIE_PATH):
            with open(COOKIE_PATH, 'rb') as f:
                browser_cookies = pickle.load(f)
        else:
            print("😎 Starting browser login..., please fill the login form")
            browser = webdriver.Chrome(executable_path="./vendor/chromedriver")
            try:
                # browser login
                login_url = "https://leetcode.com/accounts/login"
                browser.get(login_url)

                WebDriverWait(browser, 24 * 60 * 3600).until(
                    lambda driver: driver.current_url.find("login") < 0
                )
                browser_cookies = browser.get_cookies()
                with open(COOKIE_PATH, 'wb') as f:
                    pickle.dump(browser_cookies, f)
                print("🎉 Login successfully")

            except Exception as e:
                print(f"🤔 Login Failed: {e}, please try again")
                exit()

        cookies = RequestsCookieJar()
        for item in browser_cookies:
            cookies.set(item['name'], item['value'])

            if item['name'] == 'csrftoken':
                self.session.headers.update({
                    "x-csrftoken": item['value']
                })

        self.session.cookies.update(cookies)
예제 #2
0
def get_page_params(url, cookies):
    parms = {}
    request_url = base_url_bl + url
    jar = RequestsCookieJar()
    cookies_str = ""
    for key, value in cookies.items():
        cookies_str = cookies_str + key + "=" + value + ";"
        jar.set(key, value)

    header = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
    }
    response = requests.get(request_url, cookies=jar, headers=header)

    html = etree.HTML(response.text)
    __VIEWSTATE = html.xpath("//input[@id='__VIEWSTATE']/@value")[0]
    __EVENTVALIDATION = html.xpath(
        "//input[@id='__EVENTVALIDATION']/@value")[0]
    __VIEWSTATEGENERATOR = html.xpath(
        "//input[@id='__VIEWSTATEGENERATOR']/@value")[0]
    parms['__VIEWSTATE'] = __VIEWSTATE
    parms['__EVENTVALIDATION'] = __EVENTVALIDATION
    parms['__VIEWSTATEGENERATOR'] = __VIEWSTATEGENERATOR
    return parms
예제 #3
0
파일: test_auth.py 프로젝트: pllim/pyvo
def test_cookie_jar_auth():
    session = AuthSession()
    jar = RequestsCookieJar()
    jar.set('TEST_COOKIE', 'BADCOOKIE')
    session.credentials.set_cookie_jar(jar)
    service = pyvo.dal.TAPService('http://example.com/tap', session)
    service.run_async("SELECT * FROM ivoa.obscore")
예제 #4
0
def check_login(cookie):
    try:
        url = base_url_bl + "/User/Charge.aspx"
        jar = RequestsCookieJar()
        cookies_str = ""
        for key, value in cookie.items():
            cookies_str = cookies_str + key + "=" + value + ";"
            jar.set(key, value)

        # jar.set("ASP.NET_SessionId", "xgvmk1stokeiiwah3fjdb2ye")
        # jar.set(".ASPXAUTH", "518BE5C9561C4B8FE94A5E2FC94F5E1CE4878549F234B5A255A01C2D4AA72E7F74D097AFB3812C56BECDA75CE25D82ECD133A43ED8F891A89DF31181C321BB57FB1FBEB559373EE83503A98D052F715E1D610B95A626F326CE9664CA952C6D60CCB3C0AEB351488DFBB442E6C2F921EFC7F147FA94341F5536C1A3DAE55EAC06")
        header = {
            # cookie 这里就不贴出来了 ![(☆_☆)/~~]
            # "Cookie": "ASP.NET_SessionId=pg1lpw5gm1owdfjnx0il22hf&.ASPXAUTH=1411FAF265576FA799C33E4D989698497B677D1318CC59F9B2E4A581CA5E5E46B7E139A0E1B8347A3BEC2F51956F12D0765CA4B8011E83DB082634DCF496B8890EEE4BCD08E43E9D844B3A8D6D4681A95E0168358EB8DB21C2BC781F659A3D332CAD94904A21FE773106F060925CE88186D792931E4E21702342F1213EE8E8D6",
            # 'content-type': 'charset=gbk',
            # 'origin': 'https://wuliu.taobao.com',
            "User-Agent":
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
        }

        response = requests.get(url, cookies=jar, headers=header)
        if response.text.find("帐户余额") != -1:
            return True
        return False
    except:
        logger.info('%s url ' % (traceback.format_exc()))
    return False
예제 #5
0
def go_refund_tuikuan(params, cookies, url):
    try:
        ret = {'code': "error", 'message': ''}
        request_url = base_url_bl + url
        jar = RequestsCookieJar()
        cookies_str = ""
        for key, value in cookies.items():
            cookies_str = cookies_str + key + "=" + value + ";"
            jar.set(key, value)

        header = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
        }
        response = requests.post(request_url,
                                 cookies=jar,
                                 data=params,
                                 headers=header)
        if response.status_code == 200:
            if response.text.find("申请退款成功") != -1:
                ret['code'] = 'ok'
                ret['message'] = ''
        print(response.text)
    except:

        print(traceback.format_exc())
        logger.info('%s url ' % (traceback.format_exc()))
        ret['code'] = 'error'
        ret['message'] = '申请失败'
    return ret
예제 #6
0
def cnblogs():
    """通过cookies跳过登录验证"""
    url = 'https://account.cnblogs.com/signin?returnUrl=https:%2F%2Fwww.cnblogs.com%2F'
    header = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36"
    }

    s = requests.session()
    r = s.get(url, headers=header)

    c = RequestsCookieJar()
    c.set(
        ".Cnblogs.AspNetCore.Cookies",
        "CfDJ8L-rpLgFVEJMgssCVvNUAjvl6g97AtVxWdY0r6GWTszKAnZdDoV-OYeqazuwiOg0_JqcfAIGRb1ie"
        "DRjzQCf0e9u8r5qW_yhpttyzMKlJj3Qn3tCuh0j55sfPANvBwYfAtxL8_HEAs1h-FAHXD-B_3bdfI4ysaAgQtecJyoakssP8ppwM80F"
        "JwORaYJl5KgD5qXQkP6NqDMzDyT4zcQJMQSBD9hZwK-IiVvHe2mvIhnHk4ZNk_Eitytl7Ihv9UXl14-vyo7yGEo22qlIa3Z9A5"
        "zvaTpsda0ZNduwcOpfPQQ9aCcOE4uDEvU3wB2u5_u-xWI6J7dm5Ur55KR-s5In46aCt3Pjop2NfSXypn1Y0x5SQeYDHwzo5roM9qGnUlu_FMo"
        "QjZbrAehKjpDWM25vro8WKCrcZnqkkKfsnbN1W6aiwmAsSOigGD7ZexG3WY107g1fppEylW9LIQ0d71YViT3fk0zdF2cqHu41l3Iof2I0BbskW9ES"
        "-SnvHpA8jaQQJIyZW_F1xlpunvAAcc6FN1iuaxnvXB556dCSIhYP2sfdo6UrFSMMoVLoFuOvOw4fJQ"
    )
    c.set(
        ".CNBlogsCookie",
        "87FB9B609C0F14DFFE6705DD38C9AD6586B37AF63D0FF8F0DFD069863AD9644606C51336961878C0C0FE124C5D4C03211B053"
        "AF276233AAE4E22FEC98A2069DD6465705331D14EA8EF6C2482C87D9F734997048F")

    s.cookies.update(c)
    print('完整cookies', s.cookies)

    url2 = "https://www.cnblogs.com/97xiaolai/"
    res = s.get(url2)
    return res.text
예제 #7
0
    def where_cookies_in_domain(self, cookies):
        cj = RequestsCookieJar()
        for c in cookies:
            if c.domain in self.source.cookie_domains:
                cj.set(c.name, c.value, domain=c.domain, path=c.path)

        return cj
예제 #8
0
def download_page_index(url, **kargs):
    cookies = pickle.load(open("cookies.pkl", "rb"))
    # kargs['university_id'] = '52ac2e99747aec013fcf4e6f'
    # kargs['year'] = 2019
    # kargs['wenli'] = 2
    # kargs['page_num'] = 3
    newHeaders = {
        'Accept':
        'application/json',
        'Accept-Encoding':
        'gzip, deflate, br',
        'Accept-Language':
        'zh-CN,zh;q=0.9,en;q=0.8',
        'Authorization':
        '4063523 fadinKtTMMPz/uDnv27CnTgDMcoFK9i8+pZKlqlmf8IXYXnNuD7cBlB9G3oJIOXk',
        'Channel':
        'www.wmzy.com pc',
        'Connection':
        'keep-alive',
        'Content-Length':
        '221',
        'Content-Type':
        'application/json',
        'Host':
        'www.wmzy.com',
        'Origin':
        'https://www.wmzy.com',
        'Referer':
        'https://www.wmzy.com/web/school?type=2&sch_id=' + kargs['sch_id'] +
        '',
        'Sec-Fetch-Dest':
        'empty',
        'Sec-Fetch-Mode':
        'cors',
        'Sec-Fetch-Site':
        'same-origin',
        'User-Agent':
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36',
        'x-requested-with':
        'XMLHttpRequest'
    }
    cookie_jar = RequestsCookieJar()
    # batch控制一批还是二批,diploma_id控制本科还是专科
    payload = {
        "sch_id": "" + kargs['sch_id'] + "",
        "stu_province_id": "130000000000",
        "enroll_unit_id": "" + kargs['sch_id'] + "",
        "enroll_adm_type": 2
    }
    for c in cookies:
        cookie_jar.set(c['name'], c['value'], domain="wmzy.com")
    page = requests.post(url,
                         cookies=cookie_jar,
                         headers=newHeaders,
                         json=payload)
    soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8')
    site_json = json.loads(soup.text)
    result = site_json['data']['drop_box']
    print('进度::', kargs['page_num'])
    return result
예제 #9
0
    def web_login(self):
        # 开启浏览器
        self.browser.get('https://lagou.com')
        time.sleep(5)

        # 登录
        loginbutton = self.browser.find_element_by_xpath('//*[@id="lg_tbar"]/div/div[2]/ul/li[3]/a')
        loginbutton.click()

        self.browser.find_element_by_xpath(
            '/html/body/div[2]/div[1]/div/div/div[2]/div[3]/div[1]/div/div[1]/form/div[1]/div/input').send_keys(
            self.username)
        self.browser.find_element_by_xpath(
            '/html/body/div[2]/div[1]/div/div/div[2]/div[3]/div[1]/div/div[1]/form/div[2]/div/input').send_keys(
            self.password)
        # time.sleep(3)
        # self.browser.find_element_by_xpath('/html/body/div[2]/div[1]/div/div/div[2]/div[3]/div[2]/div[2]/div[2]').click()

        # time.sleep(20)
        # 等待手动图形验证
        input("确认验证完毕")
        seleuium_cookies = self.browser.get_cookies()

        # 转换cookies格式
        cookies = RequestsCookieJar()
        for cookie in seleuium_cookies:
            cookies.set(cookie['name'], cookie['value'])
        self.cookies = cookies
예제 #10
0
def login():
    """

    :return: 带有登陆信息的会话
    """
    with open("cookie.txt", "r") as f:
        cookie_list = json.load(f)

    s = requests.session()
    s.verify = False
    s.headers = {
        "Accept": "application/json, text/plain, */*",
        "Accept-Encoding": "gzip, deflate, br",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Host": "index.baidu.com",
        "Referer": "https://index.baidu.com/v2/main/index.html",
        "User-Agent": UserAgent().random
    }

    # 添加cookie
    jar = RequestsCookieJar()
    for cookie in cookie_list:
        jar.set(cookie["name"], cookie["value"])
    s.cookies = jar
    return s
예제 #11
0
def get_score(cookies):
    """
    获取每一项需要的学分
    :param cookies:
    :type cookies:
    :return:
    :rtype :
    """
    try:
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
        total = requests.get("https://pc-api.xuexi.cn/open/api/score/get",
                             cookies=jar).content.decode("utf8")
        total = int(json.loads(total, encoding="utf8")["data"]["score"])
        each = requests.get(
            "https://pc-api.xuexi.cn/open/api/score/today/queryrate",
            cookies=jar).content.decode("utf8")
        each = json.loads(each, encoding="utf8")["data"]["dayScoreDtos"]
        each = [
            int(i["currentScore"]) for i in each
            if i["ruleId"] in [1, 2, 9, 1002, 1003]
        ]
        return total, each
    except:
        print("=" * 120)
        print("get_video_links获取失败")
        print("=" * 120)
        raise
예제 #12
0
def download_page_university(url, page_num):
    cookies = pickle.load(open("cookies.pkl", "rb"))
    print(cookies)
    newHeaders = {
        'Accept': 'application/json',
        'Accept-Encoding': 'gzip, deflate, br',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
        'Channel': 'www.wmzy.com pc',
        'Connection': 'keep-alive',
        'Content-Length': '37',
        'Content-Type': 'application/json',
        'Host': 'www.wmzy.com',
        'Origin': 'https://www.wmzy.com',
        'Referer': 'https://www.wmzy.com/web/school/list',
        'Sec-Fetch-Dest': 'empty',
        'Sec-Fetch-Mode': 'cors',
        'Sec-Fetch-Site': 'same-origin',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36 x-requested-with: XMLHttpRequest}'}
    cookie_jar = RequestsCookieJar()
    payload = {"filter": {}, "page": page_num, "page_size": 20}
    for c in cookies:
        cookie_jar.set(c['name'], c['value'], domain="wmzy.com")
    page = requests.post(url, cookies=cookie_jar, headers=newHeaders, json=payload)
    soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8')
    site_json = json.loads(soup.text)
    result = site_json['data']['sch_short_info']
    print('进度::', page_num)
    return result
예제 #13
0
def download_page_university_detail(url, **kargs):
    cookies = pickle.load(open("cookies.pkl", "rb"))
    newHeaders = {'Accept': 'application/json'
        , 'Accept-Encoding': 'gzip, deflate, br'
        , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8'
        , 'Authorization': '4147430 Fqag82U0gf2JqIN8bJfzptWQLX4zX1hwqpao4VJRFjLhwYGpQrLe9W862e0R62+6'
        , 'Channel': 'www.wmzy.com pc'
        , 'Connection': 'keep-alive'
        , 'Content-Type': 'application/json'
        , 'Host': 'www.wmzy.com'
        , 'Referer': 'https://www.wmzy.com/web/school?sch_id=' + kargs['sch_id'] + '&tab=0'
        , 'Sec-Fetch-Dest': 'empty'
        , 'Sec-Fetch-Mode': 'cors'
        , 'Sec-Fetch-Site': 'same-origin'
        ,
                  'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36'
        , 'x-requested-with': 'XMLHttpRequest'}
    cookie_jar = RequestsCookieJar()
    payload = {
        "sch_id": kargs['sch_id']}
    for c in cookies:
        cookie_jar.set(c['name'], c['value'], domain="wmzy.com")
    page = requests.get(url, cookies=cookie_jar, json=payload)
    soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8')
    soup_done_index = soup.text.find('{"props')
    if soup_done_index != -1:
        soup_done = soup.text[soup_done_index:]
        site_json = json.loads(soup_done)
        result = site_json['props']['pageProps']['schoolInfor']
        if int(kargs['page_num']) % 100 == 0:
            print('进度::', kargs['page_num'])
        return result
    else:
        print(soup.getText())
        raise ValueError()
예제 #14
0
def download_major_detial(url, **kargs):
    cookies = pickle.load(open("cookies.pkl", "rb"))
    newHeaders = {'Accept': 'application/json'
        , 'Accept-Encoding': 'gzip, deflate, br'
        , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8'
        , 'Authorization': '4147430 Fqag82U0gf2JqIN8bJfzptWQLX4zX1hwqpao4VJRFjLhwYGpQrLe9W862e0R62+6'
        , 'Channel': 'www.wmzy.com pc'
        , 'Connection': 'keep-alive'
        , 'Content-Type': 'application/json'
        , 'Host': 'www.wmzy.com'
        , 'Referer': 'https://www.wmzy.com/'
        , 'Sec-Fetch-Dest': 'empty'
        , 'Sec-Fetch-Mode': 'cors'
        , 'Sec-Fetch-Site': 'same-origin'
        ,
                  'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36'
        , 'x-requested-with': 'XMLHttpRequest'}
    cookie_jar = RequestsCookieJar()
    payload = {"major_id": kargs['mid']}
    for c in cookies:
        cookie_jar.set(c['name'], c['value'], domain="wmzy.com")
    page = requests.get(url, cookies=cookie_jar, headers=newHeaders, json=payload)
    soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8')
    site_json = json.loads(soup.text)
    result = site_json['data']
    print('进度::', kargs['page_num'])
    return result
예제 #15
0
def zhihu_cookies_name():  #调用登录的COOKIES
    rjar = RequestsCookieJar()
    with open("zhihucookies.json", 'r') as f:
        cookies = json.load(f)
    for cookie in cookies:
        rjar.set(cookie["name"], cookie["value"])
    return rjar
예제 #16
0
def read_cookie():
    jar = RequestsCookieJar()
    with open("pixiv_cookies.txt", "r") as fp:
        cookies = json.load(fp)
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
    return jar
예제 #17
0
 def load_cookies(self, file_path):
     with open(file_path) as f:
         cookies = json.load(f)
     cookie_jar = RequestsCookieJar()
     for cookie in cookies:
         cookie_jar.set(cookie['name'], cookie['value'])
     self.session.cookies = cookie_jar
예제 #18
0
    def login(self, id_, password_):
        request_url = 'https://idc.ibaraki.ac.jp/portal/Login.aspx'
        id_ = id_
        password_ = password_

        self.driver.get(request_url)
        login_button = self.driver.find_element_by_xpath(
            '//*[@id="ctl22_btnShibLogin"]')
        login_button.click()
        id_input = self.driver.find_element_by_xpath(
            '//*[@id="userNameInput"]')
        password_input = self.driver.find_element_by_xpath(
            '//*[@id="passwordInput"]')
        id_input.clear()
        password_input.clear()
        id_input.send_keys(id_)
        password_input.send_keys(password_)
        login_button = self.driver.find_element_by_xpath(
            '//*[@id="submitButton"]')
        login_button.click()

        cookies = self.driver.get_cookies()
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])

        return jar
예제 #19
0
def getCookiesFromTxt():
    jar = RequestsCookieJar()
    with open("cookies.txt", "r") as fp:
        cookies = json.load(fp)
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
    return jar
예제 #20
0
def login():

    s.headers = {
        "User-Agent":
        "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1"
    }

    #这里我们使用cookie对象进行处理
    jar = RequestsCookieJar()
    with open("cookies.txt", "r") as fp:
        cookies = json.load(fp)
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])

    #百度个人中心
    #r = s.get("https://www.baidu.com/p/setting/profile/basic", cookies=jar)

    # 也可以使用字典设置
    cookies_dict = dict()
    with open("cookies.txt", "r") as fp:
        cookies = json.load(fp)
        for cookie in cookies:
            cookies_dict[cookie['name']] = cookie['value']
    r = s.get("https://om.qq.com/omIndex/getIndexChangeDetail?relogin=1",
              cookies=cookies_dict)

    r.encoding = "utf-8"
    print(r.text)
예제 #21
0
    def get(cls, server, endpoint, params = None):

        endpoint = endpoint.lstrip('/')
        logger.debug('REQUEST: endpoint = %s' % endpoint)

        if server.has_key('AuthType'):
            if server['AuthType'] == AUTH_TYPE_HTTP_BASIC:
                endpoint = GERRIT_AUTH_PREFIX + endpoint
                auth = HTTPBasicAuth(server['Username'], server['Password'])
                cookies = None
            elif server['AuthType'] == AUTH_TYPE_HTTP_DIGEST:
                endpoint = GERRIT_AUTH_PREFIX + endpoint
                auth = HTTPDigestAuth(server['Username'], server['Password'])
                cookies = None
            elif server['AuthType'] == AUTH_TYPE_HTTP_COOKIE:
                endpoint = GERRIT_AUTH_PREFIX + endpoint
                auth = None
                cookies = RequestsCookieJar()
                cookies.set(server['Username'], server['Password'],
                            domain = server['URL'], path = '/')
        else:
            auth = None
            cookies = None

        headers = {'Content-Type': 'application/json'}
        if server.has_key('Proxy'):
            proxies = {'http': server['Proxy'], 'https': server['Proxy']}
        else:
            proxies = None

        cmd = 'https://%s:%s/%s' % (server['URL'], server['Port'], endpoint,)

        return http_get(cmd, headers = headers, proxies = proxies,
                        auth = auth, cookies = cookies, params = params)
예제 #22
0
 def config_requests(self):
     #从Google获取cookies配置requests
     cookies = driver.get_cookies()
     # print("从se获取改变前的cookies:\n", se.cookies)
     with open(C_Path + "\\Danbooru" + r'\cookies.json', 'w') as fp:
         json.dump(cookies, fp)
     # try:
     #     with open(C_Path +"\\Danbooru" + r'\cookies.json', 'w') as fp:
     #         json.dump(cookies, fp)
     # except:
     #     print("从chrome cookies存放失败!\n")
     #这里用cookies对象进行处理
     jar = RequestsCookieJar()
     with open(C_Path + "\\Danbooru" + r'\cookies.json', 'r') as fp:
         cookies = json.load(fp)
         for cookie in cookies:
             jar.set(cookie['name'], cookie['value'])
     # try:
     #     jar = RequestsCookieJar()
     #     with open(C_Path +"\\Danbooru" + r'\cookies.json', 'r') as fp:
     #         cookies = json.load(fp)
     #         for cookie in cookies:
     #             jar.set(cookie['name'], cookie['value'])
     # except:
     #     print("cookies读取失败!\n")
     se.cookies = jar  #配置session
예제 #23
0
파일: cardfetch.py 프로젝트: Fan24/juice
def cookies_transfer(driver):
    for cookie in driver.get_cookies():
        cookie_jar = RequestsCookieJar()
        cookie_jar.set(cookie['name'],
                       cookie['value'],
                       domain=cookie['domain'])
        s.cookies.update(cookie_jar)
예제 #24
0
def load_cookie(s, file):
    with open(file, 'r') as f:
        cookies = json.load(f)
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
        s.cookies = jar
예제 #25
0
def get_diandian(cookies):
    try:
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
        total = requests.get("https://pc-proxy-api.xuexi.cn/api/point/get", cookies=jar).content.decode("utf8")
        total = int(json.loads(total, encoding="utf8")["data"]["pointString"])
        mydian = {'点点通': total}
        each = requests.get("https://pc-proxy-api.xuexi.cn/api/point/today/queryrate", cookies=jar).content.decode(
            "utf8")

        todaydian = int(json.loads(each, encoding='utf8')['data']['dayEarnPoint'])
        mydian.update({'今日点点通': todaydian})

        others = json.loads(each, encoding="utf8")["data"]["taskProgressDtos"]
        # each = [int(i['completedCount']) for i in each]
        # j = 6 # 每几题算一级
        mydian.update({i['taskName']: int(i['completedCount']) * int(i['target'])+int(i['progress']) % int(i['target'])
                       for i in others})
        mydian.update({i['taskName']+'目标': int(i['maxCompletedCount']) * int(i['target']) for i in others})
        # each = [int(i[j]) for j in ['completedCount','progress'] for i in each] # if i["ruleId"] in [1, 2, 9, 1002, 1003]]
        # print('diandian',each)
        # print(mydian)
        return mydian
    except:
        print("=" * 120)
        print("get_diandian获取失败")
        print("=" * 120)
        raise
예제 #26
0
def get_cookies():
    cookiejar = RequestsCookieJar()
    with open("./cookies.txt", "r") as f:
        cookies = json.loads(f.read())
        for cookie in cookies:
            cookiejar.set(cookie['name'], cookie['value'])
    return cookiejar
예제 #27
0
def get_score(cookies):
    try:
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
        total = requests.get("https://pc-api.xuexi.cn/open/api/score/get", cookies=jar).content.decode("utf8")
        total = int(json.loads(total, encoding="utf8")["data"]["score"])
        myscores = {'总积分': total}
        each = requests.get("https://pc-api.xuexi.cn/open/api/score/today/queryrate", cookies=jar).content.decode(
            "utf8")
        # 用户ID
        others = json.loads(each, encoding="utf8")["data"]
        myscores.update({i: others[i] for i in ['userId', 'inBlackList', 'blackListTip']})

        # each = json.loads(each, encoding="utf8")["data"]["dayScoreDtos"]
        each = others['dayScoreDtos']
        todayscore = 0
        for i in each:
            todayscore += int(i["currentScore"])
        myscores.update({'今日积分': todayscore})

        myscores.update({i['name']: int(i["currentScore"]) for i in each})  # if i["ruleId"] in [1, 2, 9, 1002, 1003]})
        myscores.update({i['name']+'目标': int(i["dayMaxScore"]) for i in each})
        # each = [int(i["currentScore"]) for i in each if i["ruleId"] in [1, 2, 9, 1002, 1003]]
        # print(myscores)
        return myscores
    except:
        print("=" * 120)
        print("get_score获取失败")
        print("=" * 120)
        raise
예제 #28
0
 def get_cookies(self):
     jar = RequestsCookieJar()
     with open("pixiv_cookie", "a") as fp:
         cookies = json.load(fp)
         for cookie in cookies:
             jar.set(cookie['name'], cookie['value'])
     return jar
예제 #29
0
def Get_login_cookies():
    jar = RequestsCookieJar()
    with open('cookie.json', 'r') as cookie_txt:
        cookies_list = json.loads(cookie_txt.read())
        for cookie in cookies_list:
            jar.set(cookie['name'], cookie['value'])
    return jar
예제 #30
0
def download_page_school_score(url, **kargs):
    cookies = pickle.load(open("cookies.pkl", "rb"))
    newHeaders = {'Accept': 'application/json'
        , 'Accept-Encoding': 'gzip, deflate, br'
        , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8'
        , 'Authorization': '4147430 Fqag82U0gf2JqIN8bJfzphzL4nzAkXx2MoLm6pmfQCbXY/W0B+4AK/teTvAbIXnF'
        , 'Channel': 'www.wmzy.com pc'
        , 'Connection': 'keep-alive'
        , 'Content-Length': '221'
        , 'Content-Type': 'application/json'
        , 'Host': 'www.wmzy.com'
        , 'Origin': 'https://www.wmzy.com'
        , 'Referer': 'https://www.wmzy.com/web/school?type=2&sch_id=' + kargs['sch_id'] + ''
        , 'Sec-Fetch-Dest': 'empty'
        , 'Sec-Fetch-Mode': 'cors'
        , 'Sec-Fetch-Site': 'same-origin'
        ,
                  'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36'
        , 'x-requested-with': 'XMLHttpRequest'}
    cookie_jar = RequestsCookieJar()
    payload = {"page": 1, "page_size": 10, "sch_id": kargs['sch_id'],
               "enroll_unit_id": kargs['sch_id'], "enroll_category": 1, "enroll_mode": 1,
               "diploma_id": 1,
               "stu_province_id": "130000000000", "wenli": kargs['wenli'], "only_admission": True}
    for c in cookies:
        cookie_jar.set(c['name'], c['value'], domain="wmzy.com")
    page = requests.post(url, cookies=cookie_jar, headers=newHeaders, json=payload)
    soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8')
    site_json = json.loads(soup.text)
    result = site_json['data']['eu_list']
    print('进度::', kargs['page_num'])
    return result
예제 #31
0
def server():
    cookies = RequestsCookieJar()
    cookies.set('device', 'python-client')
    server = Nuxeo(host=os.environ.get('NXDRIVE_TEST_NUXEO_URL',
                                       'http://localhost:8080/nuxeo'),
                   auth=('Administrator', 'Administrator'),
                   cookies=cookies)
    server.client.set(schemas=['dublincore'])
    return server
예제 #32
0
def test_set_cookiejar(httpbin):
    """Set cookies locally and test that they are received remotely."""
    # construct a phony cookiejar and attach it to the session
    jar = RequestsCookieJar()
    jar.set('field', 'value')
    assert jar.get('field') == 'value'

    browser = mechanicalsoup.Browser()
    browser.set_cookiejar(jar)
    resp = browser.get(httpbin + "/cookies")
    assert resp.json() == {'cookies': {'field': 'value'}}
예제 #33
0
파일: utils.py 프로젝트: OptimalBPM/of
def call_api(_url, _session_id, _data, _timeout=None, _print_log=None, _verify_SSL=True):
    """

    :param _url:
    :param _session_id:
    :param _data:
    :param _timeout:
    :param _print_log: Do not call write to log
    :return:
    """

    def do_log(_error, _category=EC_NOTIFICATION, _severity=SEV_DEBUG):
        if _print_log:
            print(_error)
        else:
            write_to_log(_data, _category=_category, _severity=_severity)
        return _error

    _cookie_jar = RequestsCookieJar()
    _cookie_jar.set(name="session_id", value=_session_id, secure=True)

    _headers = {'content-type': 'application/json'}

    _response = requests.post(_url, data=json.dumps(_data), headers=_headers, timeout=_timeout,
                              verify=_verify_SSL, cookies=_cookie_jar)

    _response_dict = None

    if _response.status_code != 200:
        do_log("Response code :" + str(_response.status_code))
        try:
            _response.raise_for_status()
        except Exception as e:
            raise Exception(do_log("Error in call_api:" + str(e), _category=EC_COMMUNICATION, _severity=SEV_ERROR))
    else:
        if _response.content:
            try:
                _response_dict = _response.json()
            except Exception as e:
                do_log("response.content didn't contain JSON data", _category=EC_COMMUNICATION, _severity=SEV_ERROR)
                _response_dict = None

    if _response_dict is not None:
        return _response_dict
    else:
        do_log("Got an empty response from server:" + str(_response.content), _category=EC_COMMUNICATION,
                     _severity=SEV_ERROR)

        return None
예제 #34
0
    def __merge_cookies(self, request_cookies):
        if type(request_cookies) is RequestsCookieJar:
            cookies = request_cookies
        else:
            cookies = RequestsCookieJar()
            for name, value in request_cookies:
                cookies.set(name, value)

        if type(self.cookies) is RequestsCookieJar:
            for cookie in iter(self.cookies):
                cookies.set_cookie(cookie)
        elif type(self.cookies) is dict:
            for name, value in self.cookies.items():
                cookies.set(name, value)

        return cookies
예제 #35
0
    def test_is_authenticated_false_if_authenticated_request_fails(self):
        cookie_jar = RequestsCookieJar()
        cookie_jar.set('gross_cookie', 'yum', domain='senscritique.com')
        user = User(email="", password="", username="", session_cookies=cookie_jar)

        self.assertFalse(AuthService().is_authenticated(user=user))