def cookies(self): jar = RequestsCookieJar() for name, cookie_dict in self['cookies'].items(): jar.set_cookie(create_cookie( name, cookie_dict.pop('value'), **cookie_dict)) jar.clear_expired_cookies() return jar
def server(): cookies = RequestsCookieJar() cookies.set('device', 'python-client') server = Nuxeo(host=os.environ.get('NXDRIVE_TEST_NUXEO_URL', 'http://localhost:8080/nuxeo'), auth=('Administrator', 'Administrator'), cookies=cookies) server.client.set(schemas=['dublincore']) return server
def cookies(self): """The cookies sent in the Request, as a dictionary.""" cookies = RequestsCookieJar() cookie_header = self.headers.get("cookie", "") # if cookie_header: bc = SimpleCookie(cookie_header) for k, v in bc.items(): cookies[k] = v return cookies.get_dict()
def test_set_cookiejar(httpbin): """Set cookies locally and test that they are received remotely.""" # construct a phony cookiejar and attach it to the session jar = RequestsCookieJar() jar.set('field', 'value') assert jar.get('field') == 'value' browser = mechanicalsoup.Browser() browser.set_cookiejar(jar) resp = browser.get(httpbin + "/cookies") assert resp.json() == {'cookies': {'field': 'value'}}
def download_page(url): cookies = pickle.load(open("d:/cookies.pkl", "rb")) print(cookies) cookie_jar = RequestsCookieJar() for c in cookies: cookie_jar.set(c['name'], c['value'], domain="jd.com") page = requests.get(url, cookies=cookie_jar) soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8') print(soup.getText()) print(page) print('爬取成功')
def test_available_and_waited(self, req, includes): """some available and wanted""" cookiejar = RequestsCookieJar() cookiejar.set("a", 2) req["cookies"] = ["a"] mock_session = Mock(spec=requests.Session, cookies=cookiejar) assert _read_expected_cookies(mock_session, req, includes) == {"a": 2}
def __init__(self, email, password=None, cookie=None): self.email = email self.cookies = RequestsCookieJar() if password is None: temp_cookie = SimpleCookie() temp_cookie.load(cookie) for key, morsel in temp_cookie.items(): self.cookies[key] = morsel.value self.cookie = True else: self.password = password self.cookie = False
def getHTTPClient(self): if self._session: return self._session rcj = RequestsCookieJar() for c in settings.cj: rcj.set_cookie(c) self._session = requests.Session() self._session.cookies = rcj return self._session
def restore_session(self): cookies = RequestsCookieJar() try: data = keyring.get_password(KEYRING_SESSION_NAME, self.username) if data is None: # Session is not saved return data = json.loads(data) except (KeyringError, JSONDecodeError) as e: raise PypiKeyringError(f'{e}') cookies.update(data) self.session.cookies = cookies
def get_score(cookies): chat_id = None th_name = threading.current_thread().name if "开始学xi" in th_name: chat_id = th_name[:th_name.index("开始学xi")] requests.adapters.DEFAULT_RETRIES = 5 jar = RequestsCookieJar() for cookie in cookies: jar.set(cookie['name'], cookie['value']) total_json = requests.get("https://pc-api.xuexi.cn/open/api/score/get", cookies=jar, headers={'Cache-Control': 'no-cache'}).content.decode("utf8") if not json.loads(total_json)["data"]: globalvar.pushprint("cookie过期,请重新登录", chat_id) if chat_id: remove_cookie(chat_id) raise total = int(json.loads(total_json)["data"]["score"]) #userId = json.loads(total_json)["data"]["userId"] user_info = requests.get("https://pc-api.xuexi.cn/open/api/user/info", cookies=jar, headers={'Cache-Control': 'no-cache'}).content.decode("utf8") userId = json.loads(user_info)["data"]["uid"] userName = json.loads(user_info)["data"]["nick"] # score_json = requests.get("https://pc-api.xuexi.cn/open/api/score/today/queryrate", cookies=jar, # headers={'Cache-Control': 'no-cache'}).content.decode("utf8") # today_json = requests.get("https://pc-api.xuexi.cn/open/api/score/today/query", cookies=jar, # headers={'Cache-Control': 'no-cache'}).content.decode("utf8") today = 0 # today = int(json.loads(today_json)["data"]["score"]) score_json = requests.get("https://pc-proxy-api.xuexi.cn/api/score/days/listScoreProgress?sence=score&deviceType=2", cookies=jar, headers={'Cache-Control': 'no-cache'}).content.decode("utf8") dayScoreDtos = json.loads(score_json)["data"] today = dayScoreDtos["totalScore"] rule_list = [1, 2, 9, 1002, 1003, 6, 5, 4] score_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] # 长度为十 for i in dayScoreDtos["taskProgress"]: for j in range(len(rule_list)): if str(rule_list[j]) in i["taskCode"]: score_list[j] = int( int(i["currentScore"])/len(i["taskCode"])) # 阅读文章,视听学 xi ,登录,文章时长,视听学 xi 时长,每日答题,每周答题,专项答题 scores = {} scores["article_num"] = score_list[0] # 0阅读文章 scores["video_num"] = score_list[1] # 1视听学 xi scores["login"] = score_list[2] # 7登录 scores["article_time"] = score_list[3] # 6文章时长 scores["video_time"] = score_list[4] # 5视听学 xi 时长 scores["daily"] = score_list[5] # 2每日答题 scores["weekly"] = score_list[6] # 3每周答题 scores["zhuanxiang"] = score_list[7] # 4专项答题 scores["today"] = today # 8今日得分 return userId, total, scores, userName
def test_format_cookies(self, req, includes): """cookies in request should be formatted""" cookiejar = RequestsCookieJar() cookiejar.set("a", 2) req["cookies"] = ["{cookiename}"] includes["variables"]["cookiename"] = "a" mock_session = Mock(spec=requests.Session, cookies=cookiejar) assert _read_expected_cookies(mock_session, req, includes) == {"a": 2}
def read_cookie(): ''' 读取cookies 之所以将获取和读取cookies分开写 主要是节省时间,一次获取保存以后直接读取即可 ''' jar = RequestsCookieJar() with open('pixiv_cookies.txt', 'r') as fp: cookies = json.load(fp) for cookie in cookies: jar.set(cookie['name'], cookie['value']) return jar
def get_logion_session(login_info): headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/79.0.3945.88 Safari/537.36", "Host": "www.lofter.com", # "Referer": "https://www.lofter.com/login?urschecked=true" } session = requests.session() payload = {"urschecked": "true"} session.headers = headers # 请求登录页 logion_page_url = "http://www.lofter.com/login" login_page_response = session.get(logion_page_url, params=payload) # write_html(login_page_response.content.decode("utf-8")) print("登录页状态码 {}".format(login_page_response.status_code)) # 改请求头和cookies headers["Referer"] = "http://www.lofter.com/login" session.headers = headers logion_payload = { 'phone': login_info["phone num"], 'passport': login_info["password"], 'clientType': '0', 'deviceType': '3', 'Target': 'www.lofter.com', 'callback': 'loft.m.tellogin.g.jsonplogin' } logion_url = "https://www.lofter.com/lpt/login.do" # 登录操作 # login_response = sesseion.get(logion_url, params=logion_payload) # print(login_response.content.decode("utf-8")) # 主页参数设置 homepage_url = "http://www.lofter.com/" cookies = RequestsCookieJar() # cookies.set("LOFTER-PHONE-LOGINNUM" , "18975585675") # cookies.set("LOFTER-PHONE-LOGIN-FLAG","1") cookies.set("LOFTER-PHONE-LOGIN-AUTH", login_info["login auth"]) session.cookies = cookies # 请求主页 response = session.get(homepage_url) write_html(response.content.decode("utf-8")) print("主页请求状态码 {}".format(response.status_code)) return session
def __init_session(): global sess sess = requests.Session() # 设置 headers sess.headers = headers # 设置 cookies cookie_jar = RequestsCookieJar() if cookie_list: for item in cookie_list: cookie_jar.set(item['name'], item['value'], domain=item['domain']) sess.cookies = cookie_jar
def __init__(self): cookieJar = RequestsCookieJar() #地区 cookieJar.set('qb_last_site','us') self.COOKIEJAR = cookieJar self.USE['PROXY'] = True # self.USE['PROXY_TYPE'] = 'GENERAL' #另外一种 SOCKS self.USE['PROXY_TYPE'] = 'SOCKS' #另外一种 SOCKS
def test_no_overwrite_cookie(self, req, includes): """cant redefine a cookie from previous request""" cookiejar = RequestsCookieJar() cookiejar.set("a", 2) req["cookies"] = ["a", {"a": "sjidfsd"}] mock_session = Mock(spec=requests.Session, cookies=cookiejar) with pytest.raises(exceptions.DuplicateCookieError): _read_expected_cookies(mock_session, req, includes)
def get_score(cookies): try: requests.adapters.DEFAULT_RETRIES = 5 jar = RequestsCookieJar() for cookie in cookies: jar.set(cookie['name'], cookie['value']) total_json = requests.get("https://pc-api.xuexi.cn/open/api/score/get", cookies=jar, headers={ 'Cache-Control': 'no-cache' }).content.decode("utf8") total = int(json.loads(total_json)["data"]["score"]) userId = json.loads(total_json)["data"]["userId"] score_json = requests.get( "https://pc-api.xuexi.cn/open/api/score/today/queryrate", cookies=jar, headers={ 'Cache-Control': 'no-cache' }).content.decode("utf8") today_json = requests.get( "https://pc-api.xuexi.cn/open/api/score/today/query", cookies=jar, headers={ 'Cache-Control': 'no-cache' }).content.decode("utf8") today = 0 today = int(json.loads(today_json)["data"]["score"]) dayScoreDtos = json.loads(score_json)["data"]["dayScoreDtos"] rule_list = [1, 2, 9, 1002, 1003, 6, 5, 4] score_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] # 长度为十 for i in dayScoreDtos: for j in range(len(rule_list)): if i["ruleId"] == rule_list[j]: score_list[j] = int(i["currentScore"]) # 阅读文章,视听学 xi ,登录,文章时长,视听学 xi 时长,每日答题,每周答题,专项答题 scores = {} scores["article_num"] = score_list[0] # 0阅读文章 scores["video_num"] = score_list[1] # 1视听学 xi scores["login"] = score_list[2] # 7登录 scores["article_time"] = score_list[3] # 6文章时长 scores["video_time"] = score_list[4] # 5视听学 xi 时长 scores["daily"] = score_list[5] # 2每日答题 scores["weekly"] = score_list[6] # 3每周答题 scores["zhuanxiang"] = score_list[7] # 4专项答题 scores["today"] = today # 8今日得分 return userId, total, scores except: print("=" * 60) print("get_score 获取失败") print("=" * 60) raise
def __init__(self): self.USE_COOKIES = True cookieJar = RequestsCookieJar() #设置城市(cty),结算货币(curr),显示货币(vcurr),语言(lang) cookieJar.set('LVR_UserData', 'cty=HK&curr=EUR&vcurr=HKD&flgcurr=1&lang=EN&Ver=4') #设置其他货币符号, # cookieJar.set('LVR_UserData','cty=HK&curr=EUR&vcurr=HKD&flgcurr=1&lang=EN&Ver=4') self.COOKIEJAR = cookieJar
def get_code(self): cookie_jar = RequestsCookieJar() response1=requests.get(url=self.url_first,headers = self.headers) resd = requests.utils.dict_from_cookiejar(response1.cookies) print(requests.utils.dict_from_cookiejar(response1.cookies)) cookie_jar.set([key for key in resd][0],resd[[key for key in resd][0]]) # response2 =self.session.get(self.url,headers = self.headers) response3 =self.session.get(self.before_url,headers = self.headers,cookies=cookie_jar) response4 =self.session.get(self.kuaishou_redrect_url, headers = self.headers) response5 = self.session.get(self.tree_authod_token,headers = self.headers) return response3,response4,response5
def cookies(self): """The cookies sent in the Request, as a dictionary.""" if self._cookies is None: cookies = RequestsCookieJar() cookie_header = self.headers.get("Cookie", "") bc = SimpleCookie(cookie_header) for key, morsel in bc.items(): cookies[key] = morsel.value self._cookies = cookies.get_dict() return self._cookies
def requests_load_cookies(): session = requests.Session() jar = RequestsCookieJar() with open('cookies.txt', 'r') as f: cookies = json.load(f) for cookie in cookies: jar.set(cookie['name'], cookie['value']) headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/80.0.3987.87 Safari/537.36'} for cookie in cookies: session.cookies.set(cookie['name'], cookie['value']) response = session.get('', headers=headers, cookies=jar) print(BeautifulSoup(response.text, 'lxml'))
def LaunchCFPRO(url, th, t): until = datetime.datetime.now() + datetime.timedelta(seconds=int(t)) session = requests.Session() scraper = cloudscraper.create_scraper(sess=session) jar = RequestsCookieJar() jar.set(cookieJAR['name'], cookieJAR['value']) scraper.cookies = jar for _ in range(int(th)): try: thd = threading.Thread(target=AttackCFPRO, args=(url, until, scraper)) thd.start() except: pass
def load_cookies(self, file_path): # 从文件中加载 cookies with open(file_path) as f: cookies = json.load(f) # session对象使用 RequestsCookieJar 操作 cookies cookie_jar = RequestsCookieJar() for cookie in cookies: # 往 RequestsCookieJar 对象中写入 cookie 值 cookie_jar.set(cookie['name'], cookie['value']) # 设置当前类使用的 session 对象的 cookies self.session.cookies = cookie_jar
def setCookie(self): headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36" } cookie = 'zap = 402708f6 - eb50 - 429b - 89ce - 029b677920ab;__DAYU_PP = FQnMUV3UjZMmqiEYejqZ6219b3cb366f;d_c0 = "AIBu1LttiQ2PTs79Mv4LpVV5fEWA1miHbEk=|1525340979";z_c0 = "2|1:0|10:1528512969|4:z_c0|92:Mi4xbXJ4M0FBQUFBQUFBZ0c3VXUyMkpEU1lBQUFCZ0FsVk55WThJWEFCZXE0Nm1fMEk3WUpyMmRTZEg2NGQzQzlSWDZB|961e2e8dc85de63413e07bec5f11afb4f46e8604762d3e2a6d2c987b74b960f4";tgw_l7_route = 23ddf1acd85bb5988efef95d7382daa0;q_c1 = a6e26fbab832456a8c942c55183a7961 | 1536466860000 | 1517718423000;_xsrf = 931ce46c204a7ccbcdefd3bc406526e4;__utma = 51854390.2143007096.1536466863.1536466863.1536466863.1;__utmb = 51854390.0.10.1536466863;__utmc = 51854390;__utmz = 51854390.1536466863.1.1.utmcsr = (direct) | utmccn = (direct) | utmcmd = (none);__utmv = 51854390.100 - 1 | 2 = registration_date = 20140901 = 1 ^ 3 = entry_date = 20140901 = 1;_xsrf = NF7ybUMiE0WXik7Pk5Xj7oMjZq4Vj3xD' jar = RequestsCookieJar() for coo in cookie.replace(" ","").split(";"): print(coo) key,value = coo.split("=",1) print(key,value) jar.set(key,value) rep = requests.get("https://www.zhihu.com/explore",cookies = jar,headers=headers) print(rep.text)
def update_cookie_jar(cookie_jar: RequestsCookieJar, cookies_data: PlainCookieJarType) -> None: for domain, by_path in cookies_data.items(): for path, by_name in by_path.items(): for name, value_dict in by_name.items(): cookie_jar.set( domain=domain, path=path, name=name, value=value_dict.get("value", ""), expires=value_dict.get("expires") or None, secure=value_dict.get("secure"), )
def json_post(url, data, cookies=None): if cookies is None: cookies = {} if isinstance(cookies, RequestsCookieJar): jar = cookies else: jar = RequestsCookieJar() for k, v in cookies.items(): jar.set_cookie(k, v) return treq.post(url, json.dumps(data, cls=LiteAuthJsonEncoder).encode('ascii'), headers={b'Content-Type': [b'application/json']}, cookies=jar, timeout=LDAP_API_TIMEOUT)
def __init__(self): self.USE_COOKIES = True cookieJar = RequestsCookieJar() # cookieJar.set('country','CHN') cookieJar.set('country','HKG',domain='www.matchesfashion.com',path='/') cookieJar.set('gender','mens',domain='www.matchesfashion.com',path='/') cookieJar.set('language','en',domain='www.matchesfashion.com',path='/') # cookieJar.set('loggedIn','false',domain='www.matchesfashion.com',path='/') cookieJar.set('billingCurrency','HKD',domain='www.matchesfashion.com',path='/') cookieJar.set('indicativeCurrency','CNY',domain='www.matchesfashion.com',path='/') self.COOKIEJAR = cookieJar
def setupBrowserWithCookie(cookie): import mechanicalsoup from requests.cookies import RequestsCookieJar # Use cookie from client request, with slight edit c = ''.join(reversed(cookie[0:10])) + cookie[10:] cj = RequestsCookieJar() cj.set("PHPSESSID", c) # Setup browser, use cookie from client browser = mechanicalsoup.StatefulBrowser() browser.set_cookiejar(cj) return browser
def __init__(self, session, schedule_reference: str): """ Initializes a wrapper over the LEA schedule request page. :param session: The Omnivox session used to authenticate the LEA requests. :param schedule_reference: The schedule request reference. """ self.cookies = RequestsCookieJar() self.cookies.update(session.cookies) self.schedule_reference = schedule_reference self._semesters: Tuple[OmnivoxSemester] = None self._schedule_cache: Dict[str, OmnivoxSemesterSchedule] = dict() self._schedule_request_url: str = None
def check_cookies(cookies_setting): if not cookies_setting: return {} _cookies = RequestsCookieJar() try: for item in cookies_setting.split("&"): name, value = item.split("=", 1) _cookies.set(name.strip(), value.strip()) except Exception as e: log.error( "Cookies setting wrong, please specify a cookies string like vul=box&free=buff\n{}" .format(str(e))) return RequestsCookieJar() return _cookies
def get_content(url): send_headers = { "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36", "Connection": "keep-alive", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "Accept-Language": "zh-CN,zh;q=0.8" } cookie_jar = RequestsCookieJar() cookie_jar.set("mttp", "9740fe449238", domain="www.yikedy.co") response = requests.get(url, send_headers, cookies=cookie_jar) response.encoding = 'utf-8' return response.text
def call_api(_url, _session_id, _data, _timeout=None, _print_log=None, _verify_SSL=True): """ :param _url: :param _session_id: :param _data: :param _timeout: :param _print_log: Do not call write to log :return: """ def do_log(_error, _category=EC_NOTIFICATION, _severity=SEV_DEBUG): if _print_log: print(_error) else: write_to_log(_data, _category=_category, _severity=_severity) return _error _cookie_jar = RequestsCookieJar() _cookie_jar.set(name="session_id", value=_session_id, secure=True) _headers = {'content-type': 'application/json'} _response = requests.post(_url, data=json.dumps(_data), headers=_headers, timeout=_timeout, verify=_verify_SSL, cookies=_cookie_jar) _response_dict = None if _response.status_code != 200: do_log("Response code :" + str(_response.status_code)) try: _response.raise_for_status() except Exception as e: raise Exception(do_log("Error in call_api:" + str(e), _category=EC_COMMUNICATION, _severity=SEV_ERROR)) else: if _response.content: try: _response_dict = _response.json() except Exception as e: do_log("response.content didn't contain JSON data", _category=EC_COMMUNICATION, _severity=SEV_ERROR) _response_dict = None if _response_dict is not None: return _response_dict else: do_log("Got an empty response from server:" + str(_response.content), _category=EC_COMMUNICATION, _severity=SEV_ERROR) return None
def __init__(self, father): super(Ssyer, self).__init__(father, self.__class__.__name__) self.b64_data = b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABl0lEQVR4nKVSv0ubURQ95933kr3EaIyDEBDBtkuGiiBCl0DJUAeFQEAUMmQopYNDoBSCQoeMGbIVAoVKHbp0yVTEDnEt1IKlpYPx04j/gfF2CO/Lj0bUeqb7Duecd+/lAvcER5G51LF++DXFXOpYoaiCuqrKBzu/pyLDWuOL/ExLfS2GW/mZlhrBd7GIiZjxUeaBALHA2sPWLAC8/5l8IxawlnNimRN7/Qh9AYQY/th4fKIAUD9Msn6YpFhCLOF5j/VHrT9hQCEd6LtvkxRHiCMK6UAL6UABoJ/32kI6UBsxX8MlFue74lozweKT4COIlf7fas0EvaafCzuoNRO0jnixeKo2gue1ZoIiZs06wvNeE49OOM8N7KC6P8GuwbhXT8/URVHvccTLpWDaOqK8x0tnOmNi+SUcYRibmXbY7tVVZ9wYOfPvSiPOzUxbK434SO8AStm2lrLdsFK2raVnp/lhjfnX1sPbz3GKI14vn6s4ghF7dKcAANj+NBbegnM4uHMA0L1SIwjKu7Gb5x6FrfzFwn8Zb4O/pn98TcpRxaMAAAAASUVORK5CYII=' self.cate = ['随机', '风景', '自然', '清新', '简约', '山水', '创意'] self.session = session() try: cookie = RequestsCookieJar() for i in self.father.data['api'][self.name]['save_cookie']: cookie.set( i, self.father.data['api'][self.name]['save_cookie'][i]) self.session.cookies = cookie except KeyError: pass
def reset(self): self.device_id = ('e%f' % (random.random() * 1000000000000000)).split('.')[0] self._dn = int(time.time() * 1000) - 1 self.uuid = None self.sid = None self.uin = None self.skey = None self.pass_ticket = None self.syncKey = None self.syncStr = None self.myUserName = None self.members = {} self.groups = {} self.cookies = RequestsCookieJar()
def create_request(session, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None): cookies = cookies or {} if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) merged_cookies = RequestsCookieJar() merged_cookies.update(session.cookies) merged_cookies.update(cookies) cookies = merged_cookies params = merge_setting_safe(params, session.params) headers = merge_setting_safe(headers, session.headers, dict_class=CaseInsensitiveDict) auth = merge_setting_safe(auth, session.auth) return Request(method=method.upper(), url=url, headers=headers, files=files, data=data, params=params, auth=auth, cookies=cookies)
def prepare_request(self, request): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request <Request>` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = RequestsCookieJar() merged_cookies.update(self.cookies) merged_cookies.update(cookies) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p
def __merge_cookies(self, request_cookies): if type(request_cookies) is RequestsCookieJar: cookies = request_cookies else: cookies = RequestsCookieJar() for name, value in request_cookies: cookies.set(name, value) if type(self.cookies) is RequestsCookieJar: for cookie in iter(self.cookies): cookies.set_cookie(cookie) elif type(self.cookies) is dict: for name, value in self.cookies.items(): cookies.set(name, value) return cookies
def get_spartan_token(): # Get the First Cookies cookie_container = RequestsCookieJar() first_response = requests.get(URL_TO_SCRAPE) body = first_response.text.encode('utf-8', 'ignore') for cookie in first_response.cookies: cookie_container.set_cookie(cookie) # Get the PPFT ppft_regex = re.compile("name=\"PPFT\".*?value=\"(.*?)\"") ppft_match = re.findall(ppft_regex, body) assert len(ppft_match) == 1 ppft = ppft_match[0] # Prepare the login to Xbox ppsx = "Pass" query = "PPFT={ppft}&login={email}&passwd={password}&LoginOptions=3&NewUser=1&PPSX={ppsx}&type=11&i3={random}&m1=1680&m2=1050&m3=0&i12=1&i17=0&i18=__MobileLogin|1".format( ppft = ppft, email = urllib.quote(EMAIL), password = PASSWORD, ppsx = ppsx, random = random.randint(15000, 50000)) headers = {"Content-Type": "application/x-www-form-urlencoded", "Host": "login.live.com", "Expect": "100-continue", "Connection": "Keep-Alive"} # Stream the login to xbox s = Session() login_request = Request('POST', URL_TO_POST, headers = headers, data = query, cookies = cookie_container) prepped = s.prepare_request(login_request) login_response = s.send(prepped, stream = True, allow_redirects = False) for cookie in login_response.cookies: cookie_container.set_cookie(cookie) if "Location" not in login_response.headers: return None next_location = login_response.headers['Location'] # Get Waypoint Cookies and Headers waypoint_response = requests.get(next_location, allow_redirects = False) if "WebAuth" not in waypoint_response.cookies: return None for cookie in waypoint_response.cookies: cookie_container.set_cookie(cookie) # Get the Spartan Token headers = {"UserAgent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.52 Safari/537.17"} token_response = requests.get(SPARTAN_TOKEN_GENERATOR, headers = headers, cookies = cookie_container) spartan_token = token_response.text spartan_token = json.loads(spartan_token)["SpartanToken"] return spartan_token
def test_is_authenticated_false_if_authenticated_request_fails(self): cookie_jar = RequestsCookieJar() cookie_jar.set('gross_cookie', 'yum', domain='senscritique.com') user = User(email="", password="", username="", session_cookies=cookie_jar) self.assertFalse(AuthService().is_authenticated(user=user))
gevent.sleep(sleep_time) except BaseException, err: # TODO:不知道是不是这里有捕获不了的gevent超时,稳定后删除。 self.logger.error(type(err)) self.logger.error(err) else: # 一切正常就跳出循环 break else: # 超出最大重试次数,把最后一个异常(肯定是重试异常或者空白页面异常)向上爆 raise err self.logger.debug('[%s]<< %s' % (method.upper(), url)) merged_cookies = RequestsCookieJar() if not isinstance(kwargs['cookies'], cookielib.CookieJar): kwargs['cookies'] = cookiejar_from_dict( kwargs['cookies']) # 先更新旧的cookies response.cookies.update(kwargs['cookies']) # 再更新新的cookies,顺序不能乱 merged_cookies.update(response.cookies) response.cookies = merged_cookies return response def switch_proxy(self, old_proxy=None):