def test_param_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({'foo' : 'bar'}, cj) s = requests.session() r = s.get(httpbin('cookies'), cookies=cj) # Make sure the cookie was sent assert r.json()['cookies']['foo'] == 'bar'
def test_param_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({'foo' : 'bar'}, cj) s = requests.session() r = s.get(httpbin('cookies'), cookies=cj) # Make sure the cookie was sent assert r.json()['cookies']['foo'] == 'bar'
def test_param_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({"foo": "bar"}, cj) s = requests.session() r = s.get(httpbin("cookies"), cookies=cj) # Make sure the cookie was sent assert r.json()["cookies"]["foo"] == "bar"
def parameter_relevance(self, response=None): if response is not None: req_data_list = reg_of_param(self.req_data) cookie_list = reg_of_param(self.cookie) if len(req_data_list) != 0: # for k, n, u in req_data_list: # try: # expr = parse(u) # m = [match.value for match in expr.find(response[n].json())] # m = m[0] # self.req_data[k] = m # except Exception: # try: # reg_result = re.search(u, response[n].text).group(1) # self.req_data[k] = reg_result # except IndexError: # pass for k, n, u in req_data_list: try: expr = parse(u) m = [ match.value for match in expr.find(response[n].json()) ][0] except Exception: try: m = re.search(u, response[n].text).group(1) except IndexError: pass self.req_data[k] = m if len(cookie_list) != 0: for k, n, u in cookie_list: try: expr = parse(u) m = [ match.value for match in expr.find(response[n].json()) ][0] except Exception: try: m = re.search(u, response[n].text).group(1) except IndexError: pass cookiejar_from_dict({k: m}, COOKIES) # for k, n, u in cookie_list: # expr = parse(u) # m = [match.value for match in expr.find(response[n].json())] # m = m[0] # cookiejar_from_dict({k: m}, COOKIES) # SingleTestCase.__add_cookies_custom(k, m) return self.test_runner()
def test_generic_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({'foo': 'bar'}, cj) s = requests.session() s.cookies = cj r = s.get(httpbin('cookies')) # Make sure the cookie was sent assert r.json()['cookies']['foo'] == 'bar' # Make sure the session cj is still the custom one assert s.cookies is cj
def test_generic_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({"foo": "bar"}, cj) s = requests.session() s.cookies = cj r = s.get(httpbin("cookies")) # Make sure the cookie was sent assert r.json()["cookies"]["foo"] == "bar" # Make sure the session cj is still the custom one assert s.cookies is cj
def test_generic_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({'foo': 'bar'}, cj) s = requests.session() s.cookies = cj r = s.get(httpbin('cookies')) # Make sure the cookie was sent assert r.json()['cookies']['foo'] == 'bar' # Make sure the session cj is still the custom one assert s.cookies is cj
def cookieUpdate(self, cookie_dict): """ 更新cookie :param cookie_dict: 字典格式 :return: """ if not isinstance(cookie_dict, dict): warnings.warn(u"%s type not dict,Not Enabled Update." % str(type(cookie_dict))) return cookiejar_from_dict(cookie_dict, self.ss.cookies)
def create(username, password): tokens = getCookie.get(username, password) if tokens is None: return None s = Session() s.cookies = cookiejar_from_dict({'DZIENNIKSID': tokens['clientToken']}) return {'session': s, 'token': tokens['clientToken']}
def re_download_bad_files(self): try: download_path = None cookies = self.get_cookie_dict_from_str() with requests.Session() as dl_session: dl_session.cookies = cookiejar_from_dict(cookies) for file_item in self.lists_of_files["to redownload"]: download_url = self.info_map_table.get_sync_url_for_file_item( file_item) download_path = file_item.download_path with DownloadFileAndCheckChecksum( download_url, download_path, file_item.checksum, report_own_progress=False) as dler: dler(session=dl_session) super().increment_and_output_progress( increment_by=0, prog_msg=f"redownloaded {file_item.download_path}") self.num_bad_files -= 1 except Exception as ex: log.error( f"""Exception while redownloading {download_path}, {ex}""") super().increment_and_output_progress( increment_by=0, prog_msg= f"""Exception while redownloading {download_path}, {ex}""")
def login(self): location = 'login/' csrf_token = self.get_token() headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Cookie': f'csrftoken={csrf_token}' } data = { 'csrfmiddlewaretoken': csrf_token, 'login': self.user, 'password': self.password } response = self._request('POST', location, headers=headers, data=data, json=False) cookies = response.headers['Set-Cookie'].split(';') new_csrf_token = [c for c in cookies if c.startswith('csrftoken=')][0].split('=')[-1] session_id_gtp = [c for c in cookies if 'secure, sessionid_gtp=' in c][0].split('=')[-1] self.csrf_token = new_csrf_token self.session.cookies = cookiejar_from_dict({ 'csrftoken': new_csrf_token, 'sessionid_gtp': session_id_gtp }) return response.json()
def __init__(self, cookie: str): d = {} for item in cookie.split("; "): k, v = item.split("=") d[k] = v self.session = requests.session() self.session.cookies = cookiejar_from_dict(d)
def _initialize(self): """ .. warning: Called automatically by GoLismero. Do not call! """ # Start a new session. self.__session = Session() # Load the proxy settings. proxy_addr = Config.audit_config.proxy_addr if proxy_addr: proxy_port = Config.audit_config.proxy_port if proxy_port: proxy_addr = "%s:%s" % (proxy_addr, proxy_port) auth_user = Config.audit_config.proxy_user auth_pass = Config.audit_config.proxy_pass auth, _ = detect_auth_method(proxy_addr) self.__session.auth = get_auth_obj(auth, auth_user, auth_pass) self.__session.proxies = { "http": proxy_addr, "https": proxy_addr, "ftp": proxy_addr, } # Load the cookies. cookie = Config.audit_config.cookie if cookie: self.__session.cookies = cookiejar_from_dict(cookie) # Set User Agent self.__user_agent = Config.audit_config.user_agent
def __init__(self, login, password, cookies=None, db_session=None): if cookies is None: log.debug('rutracker cookie not found. Requesting new one') payload_ = {'login_username': login, 'login_password': password, 'login': '******'} auth_response = post( "http://login.rutracker.org/forum/login.php", data=payload_, follow_redirects=True, cookies=cookiejar_from_dict({'spylog_test': '1'})) if len(auth_response.cookies) == 0 or auth_response.cookies is None: log.fatal('unable to obtain cookies from rutracker') raise PluginError('unable to obtain cookies from rutracker') self.cookies_ = auth_response.cookies if db_session: db_session.add( RutrackerAccount( login=login, cookies=dict_from_cookiejar( self.cookies_), expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() else: raise ValueError( 'db_session can not be None if cookies is None') else: log.debug('Using previously saved cookie') self.cookies_ = cookies
def request_with_cookie(method: str, url: str, return_when: str, reset_cookie: bool) -> Dict[str, Any]: cookie_raw = "k=v" if reset_cookie else read_cookie() cookies = parse_cookies(cookie_raw) while 1: session.cookies = cookiejar_from_dict(cookies) resp = session_request(method, url) data = resp.json() keys = return_when.split(".") value = data for key in keys: value = value.get(key, {}) if value: save_cookie(cookie_raw) break error = data.get("ERROR") if error: raise ValueError(f"{error} {url}") cookie_raw = input( "The cookie has expired, please enter a new one:\n" "(Log in https://xvideos.com with your account via a browser, " "then open the developer mode, copy and paste the cookie here)\n" ).strip() cookies = parse_cookies(cookie_raw) return data
def set_online(self): """对请求服务器进行逻辑判断 :return: 结果字典 """ from requests import cookies, utils online_response = self.online_request() if online_response: if online_response.status_code == 200: try: temp_cookie = utils.dict_from_cookiejar(self.cookies) load_cookie = utils.dict_from_cookiejar( online_response.cookies) temp_cookie.update(load_cookie) self.cookies = cookies.cookiejar_from_dict(temp_cookie) return return_result(2000, [], desc=u'获取 session 成功') except: return return_result(4100, [], desc=u'获取 session 解析错误') else: return_result(4000, [], desc=u'获取 session 网络错误: {}'.format( online_response.status_code)) else: return return_result(4000, [], desc=u'获取 session 网络错误')
def _initialize(self): """ .. warning: Called automatically by GoLismero. Do not call! """ # Initialize the CA bundle. if not environ.get("CURL_CA_BUNDLE"): environ["CURL_CA_BUNDLE"] = join(get_data_folder(), "cacert.pem") # Start a new session. self.__session = Session() # Load the proxy settings. proxy_addr = Config.audit_config.proxy_addr if proxy_addr: proxy_port = Config.audit_config.proxy_port if proxy_port: proxy_addr = "%s:%s" % (proxy_addr, proxy_port) auth_user = Config.audit_config.proxy_user auth_pass = Config.audit_config.proxy_pass auth, _ = detect_auth_method(proxy_addr) self.__session.auth = get_auth_obj(auth, auth_user, auth_pass) self.__session.proxies = { "http": proxy_addr, "https": proxy_addr, "ftp": proxy_addr, } # Load the cookies. cookie = Config.audit_config.cookie if cookie: self.__session.cookies = cookiejar_from_dict(cookie) # Set User Agent self.__user_agent = Config.audit_config.user_agent
def set_cookies(cookies): if isinstance(cookies, str): if cookies == "from_browser": try: import browser_cookie3 cookies = browser_cookie3.load(domain_name='.facebook.com') except: raise ModuleNotFoundError( "browser_cookie3 must be installed to use browser cookies") else: try: cookies = parse_cookie_file(cookies) except ValueError as e: raise exceptions.InvalidCookies( f"Cookies are in an invalid format: {e}") elif isinstance(cookies, dict): cookies = cookiejar_from_dict(cookies) if cookies is not None: cookie_names = [c.name for c in cookies] missing_cookies = [ c for c in ['c_user', 'xs'] if c not in cookie_names ] if missing_cookies: raise exceptions.InvalidCookies( f"Missing cookies with name(s): {missing_cookies}") _scraper.session.cookies.update(cookies) if not _scraper.is_logged_in(): raise exceptions.InvalidCookies(f"Cookies are not valid")
def merge_cookies_into_session(cookies_input): jar = _context.session.cookies if isinstance(cookies_input, list): for item in cookies_input: cookie = Cookie( 0, item['name'], item['value'], None, False, item['domain'], True, bool(item['domain'].startswith(".")), item['path'], True, item['secure'], None, False, "", "", {}, ) logging.debug("Set cookie into context: %r", cookie) jar.set_cookie(cookie) else: attrs_set = parse_ns_headers(cookies_input.split('; ')) merge_cookies( jar, cookiejar_from_dict({x[0][0]: x[0][1] for x in attrs_set}))
def __init__(self, cookies, file_format='flac', download_directory=None): self.session = AsyncIOSession() self.session.cookies = cookiejar_from_dict(cookies) self.file_format = file_format self.download_status = {} self.download_directory = download_directory if download_directory is not None else os.getcwd( )
def __init__(self, username, password, filename=None): self.username = username self.password = password self.filename = filename self.session = requests.Session() self.session.verify = False sessid = self._get_sessid() if sessid: self.session.cookies = cookiejar_from_dict( {'KOSPELSESSID': sessid}) self.session.headers.update({ 'X-Requested-With': 'XMLHttpRequest', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Sec-Fetch-Dest': 'empty', 'Connection': 'keep-alive', 'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8', })
async def wrap_async(response: ClientResponse) -> Response: """Build a ``requests`` response from a ``aiohttp`` response. A ``requests.Response`` instance is built to provide synchronous access to the original response's data. Note that the returned response does not have proper data for :attr:``elapsed`` or :attr:``request``. The response will be consumed if it has not already. """ # Ensure the response data is read so that the wrapped response # does not require any async methods. await response.read() wrapped = Response() wrapped._content = response._body # type: ignore wrapped._content_consumed = True # type: ignore wrapped.status_code = response.status wrapped.headers = CaseInsensitiveDict(response.headers) wrapped.url = str(response.url) # `aiohttp` uses a `URL` object. wrapped.encoding = response.get_encoding() wrapped.history = [await wrap_async(rsp) for rsp in response.history] wrapped.reason = response.reason or "" wrapped.cookies = cookiejar_from_dict(response.cookies) return wrapped
def _initialize(self): """ .. warning: Called automatically by GoLismero. Do not call! """ # Initialize the CA bundle. if not environ.get("CURL_CA_BUNDLE"): environ["CURL_CA_BUNDLE"] = join(get_data_folder(), "cacert.pem") # Start a new session. self.__session = Session() # Load the proxy settings. proxy_addr = Config.audit_config.proxy_addr if proxy_addr: proxy_port = Config.audit_config.proxy_port if proxy_port: proxy_addr = "%s:%s" % (proxy_addr, proxy_port) auth_user = Config.audit_config.proxy_user auth_pass = Config.audit_config.proxy_pass auth, _ = detect_auth_method(proxy_addr) self.__session.auth = get_auth_obj(auth, auth_user, auth_pass) self.__session.proxies = { "http": proxy_addr, "https": proxy_addr, "ftp": proxy_addr, } # Load the cookies. cookie = Config.audit_config.cookie if cookie: self.__session.cookies = cookiejar_from_dict(cookie) # Set User Agent self.__user_agent = Config.audit_config.user_agent
def get_session(self, auth=None): """ クッキーを読み込み、必要ならばログインし、そのセッションを返す。 :param dict[str, str] | None auth: :rtype: requests.Session """ session = requests.session() cook = self.load_cookies() if auth or cook: if cook: session.cookies = cookies.cookiejar_from_dict(cook) else: session.post(URL.URL_LogIn, params=auth) self.token = self.get_token(session) if self.token: # if self._we_have_logged_in(res.text): self.cookie = self.save_cookies(session.cookies) self.is_login = True else: return self.get_session(self.ask_credentials()) else: return self.get_session(self.ask_credentials()) return session
def _hello(self): log.debug('_hello ...') # a new session is required for the app_token self.session.http.cookies = cookiejar_from_dict({}) res = self.session.http.get(self.base_url) match = self._app_token_re.search(res.text) app_token = match.group(1) hello_url = self.API_HELLO.format(self.base_url) if self._uuid: __uuid = self._uuid else: __uuid = str(uuid.uuid4()) self._session_attributes.set( 'uuid', __uuid, expires=self.TIME_SESSION) params = { 'client_app_token': app_token, 'uuid': __uuid, 'lang': 'en', 'format': 'json' } res = self.session.http.post(hello_url, headers=self.headers, data=params)
def fetch(url, args): global session global headers url_parsed = urlparse(url) params = parse_qs(url_parsed.query) param_q = None if '_q' in params: param_q = params['_q'][0] param_s = None if '_s' in params: param_s = params['_s'][0] # # recalculate param_s # tk = '32056b7abf66fd42bddfc24e575c6107_1609856909772' # v = tk.split('_')[0] + '_xmMain_' + url_parsed.path + '_' + json.dumps(json.loads(param_q), separators=(',', ':')) # print(f'recal token value: {v}') # print(f'recal token: {get_md5_hex(v.encode())}') # print(f'actual token: {param_s}') headers = args['headers'] if 'referrer' in args: headers['referer'] = args['referrer'] cookies = cookie_str_to_dict(headers.pop('cookie')) # print(cookies) s = requests.Session() cookiejar = cookiejar_from_dict(cookies) # print(cookiejar) # print(s.cookies) s.cookies = cookiejar # r = s.get('http://httpbin.org/cookies') # print(r.json()) r = s.get(url, proxies=proxies) """ Possible responses: - {'code': 'SG_TOKEN_EXPIRED', 'msg': '令牌过期'} - {"code":"SG_INVALID","msg":"请求无效"} """ data = r.json() if 'code' not in data: print(f'fetch.py response: {r.content}') sys.exit(1) if data['code'] == 'SUCCESS': session = s print('test fetch() ok') if param_s: q_dict = None if param_q: q_dict = json.loads(param_q) token = create_token(s, url_parsed.path, q_dict) if token == param_s: print(f'recal token correct: {token}') else: print(f'recal token unequal:\n- recal : {token}\n- actual: {param_s}') raise Exception('stop proceessing due to token recal failure') else: print(f'test fetch() failed: {r.text}') raise Exception('stop proceessing due to response failed')
def get_response(self, request): # if an error was requested then raise that instead of doing response if self._exc: raise self._exc # If a cookie dict is passed convert it into a CookieJar so that the # cookies object available in a callback context is always a jar. cookies = self._params.get('cookies', CookieJar()) if isinstance(cookies, dict): cookies = cookiejar_from_dict(cookies, CookieJar()) context = _Context(self._params.get('headers', {}).copy(), self._params.get('status_code', _DEFAULT_STATUS), self._params.get('reason'), cookies) # if a body element is a callback then execute it def _call(f, *args, **kwargs): return f(request, context, *args, **kwargs) if callable(f) else f return create_response(request, json=_call(self._params.get('json')), text=_call(self._params.get('text')), content=_call(self._params.get('content')), body=_call(self._params.get('body')), raw=self._params.get('raw'), status_code=context.status_code, reason=context.reason, headers=context.headers, cookies=context.cookies)
def __init__( self, token_v2=None, monitor=False, start_monitoring=False, enable_caching=False, cache_key=None, email=None, password=None, client_specified_retry=None, ): self.session = create_session(client_specified_retry) if token_v2: self.session.cookies = cookiejar_from_dict({"token_v2": token_v2}) else: self._set_token(email=email, password=password) if enable_caching: cache_key = cache_key or hashlib.sha256( token_v2.encode()).hexdigest() self._store = RecordStore(self, cache_key=cache_key) else: self._store = RecordStore(self) if monitor: self._monitor = Monitor(self) if start_monitoring: self.start_monitoring() else: self._monitor = None self._update_user_info()
def set_login_cookie(self): """对得到的 cookie 请求进行逻辑判断 :return: 结果字典 """ from requests import utils, cookies home_cookie_response = self.fetch_home_cookie() if home_cookie_response: if home_cookie_response.status_code == 200: try: # Path 不同,同样的 key 也会同时存在,因此采用字典去重 temp_cookie = utils.dict_from_cookiejar(self.cookies) load_cookie = utils.dict_from_cookiejar( home_cookie_response.cookies) temp_cookie.update(load_cookie) self.cookies = cookies.cookiejar_from_dict(temp_cookie) content = home_cookie_response.json()['content'] return return_result(2000, [content], desc=u'获取主页 cookie 成功') except Exception as _: return return_result(4100, [], desc=u'主页 cookie 不存在') else: return_result(4000, [], desc=u'获取主页 cookie 网络错误: {}'.format( home_cookie_response.status_code)) else: return return_result(4000, [], desc=u'获取主页 cookie 网络错误')
def __init__(self, url: Optional[str] = None, rules: Optional[str] = None) -> None: self.headers = default_headers() self.auth = None self.proxies = {} self.hooks = default_hooks() self.params = {} self.stream = False self.verify = True self.cert = None self.max_redirects = DEFAULT_REDIRECT_LIMIT self.trust_env = True self.cookies = cookiejar_from_dict({}) self.adapters = OrderedDict() middlewares = [ResurfaceHTTPAdapter(url=url, rules=rules)] adapter = MiddlewareHTTPAdapter(middlewares) self.mount("https://", adapter) self.mount("http://", adapter)
def prepare_request(self, request): cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = merge_cookies( merge_cookies(RequestsCookieJar(), self.cookies), cookies) # Set environment's basic authentication if not explicitly set. # auth = request.auth # if self.trust_env and not auth and not self.auth: # auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(request.auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p
def get_cookiejar(): logged_in_user = os.environ.get('IA_LOGGED_IN_USER') logged_in_sig = os.environ.get('IA_LOGGED_IN_SIG') if not logged_in_user or not logged_in_sig: config = _get_config() cookie_config = config.get('cookies', {}) return cookiejar_from_dict(cookie_config)
def _initialize(self): """ .. warning: Called automatically by GoLismero. Do not call! """ # Start a new session. self.__session = Session() # Load the proxy settings. proxy_addr = Config.audit_config.proxy_addr if proxy_addr: proxy_port = Config.audit_config.proxy_port if proxy_port: proxy_addr = "%s:%s" % (proxy_addr, proxy_port) auth_user = Config.audit_config.proxy_user auth_pass = Config.audit_config.proxy_pass auth, _ = detect_auth_method(proxy_addr) self.__session.auth = get_auth_obj(auth, auth_user, auth_pass) self.__session.proxies = { "http": proxy_addr, "https": proxy_addr, "ftp": proxy_addr, } # Load the cookies. cookie = Config.audit_config.cookie if cookie: self.__session.cookies = cookiejar_from_dict(cookie) # Set User Agent self.__user_agent = Config.audit_config.user_agent
def __init__(self, agent, cookiejar=None, data_to_body_producer=IBodyProducer): self._agent = agent self._cookiejar = cookiejar or cookiejar_from_dict({}) self._data_to_body_producer = data_to_body_producer
def __init__(self, login, password, cookies=None, db_session=None): if cookies is None: log.debug('rutracker cookie not found. Requesting new one') payload_ = { 'login_username': login, 'login_password': password, 'login': '******' } auth_response = post("http://login.rutracker.org/forum/login.php", data=payload_, follow_redirects=True, cookies=cookiejar_from_dict( {'spylog_test': '1'})) if len(auth_response.cookies ) == 0 or auth_response.cookies is None: log.fatal('unable to obtain cookies from rutracker') raise PluginError('unable to obtain cookies from rutracker') self.cookies_ = auth_response.cookies if db_session: db_session.add( RutrackerAccount( login=login, cookies=dict_from_cookiejar(self.cookies_), expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() else: raise ValueError( 'db_session can not be None if cookies is None') else: log.debug('Using previously saved cookie') self.cookies_ = cookies
def set_cookies(cookies): if isinstance(cookies, str): cookies = parse_cookie_file(cookies) elif isinstance(cookies, dict): cookies = cookiejar_from_dict(cookies) if cookies is not None: _scraper.session.cookies = cookies
def request_with_cookie(method: str, url: str, return_when: str) -> Dict[str, Any]: cookie_raw = read_cookie() cookies = parse_cookies(cookie_raw) while 1: session.cookies = cookiejar_from_dict(cookies) resp = session_request(method, url, timeout=c.TIMEOUT) resp.raise_for_status() data = resp.json() keys = return_when.split(".") value = data for key in keys: value = value.get(key, {}) if value: save_cookie(cookie_raw) break error = data.get("ERROR") if error: raise ValueError(f"{error} {url}") cookie_raw = input( "The cookie has expired, please enter a new one:\n").strip() cookies = parse_cookies(cookie_raw) return data
def get_response(self, request): # if an error was requested then raise that instead of doing response if self._exc: raise self._exc # If a cookie dict is passed convert it into a CookieJar so that the # cookies object available in a callback context is always a jar. cookies = self._params.get('cookies', CookieJar()) if isinstance(cookies, dict): cookies = cookiejar_from_dict(cookies, CookieJar()) context = _Context( self._params.get('headers', {}).copy(), self._params.get('status_code', _DEFAULT_STATUS), self._params.get('reason'), cookies) # if a body element is a callback then execute it def _call(f, *args, **kwargs): return f(request, context, *args, **kwargs) if callable(f) else f return create_response(request, json=_call(self._params.get('json')), text=_call(self._params.get('text')), content=_call(self._params.get('content')), body=_call(self._params.get('body')), raw=self._params.get('raw'), status_code=context.status_code, reason=context.reason, headers=context.headers, cookies=context.cookies)
def cookies(self): jar = cookiejar_from_dict({}) if self._cookiejar is not None: for cookie in self._cookiejar: jar.set_cookie(cookie) return jar
def get_session(self): session = ProxmoxHttpSession() session.verify = self.verify_ssl session.auth = self.auth session.cookies = cookiejar_from_dict({"PVEAuthCookie": self.auth.pve_auth_cookie}) session.headers['Connection'] = 'keep-alive' session.headers["accept"] = self.get_serializer().get_accept_types() return session
def get_session(self): session = ProxmoxHttpSession() session.verify = self.verify_ssl session.auth = self.auth session.cookies = cookiejar_from_dict({"PVEAuthCookie": self.auth.pve_auth_cookie}) session.headers['Connection'] = 'keep-alive' session.headers["accept"] = self.get_serializer().get_accept_types() return session
def start_session(self): if self.session is None: self.session = Session() try: with open(COOKIE_JAR_DUMP_FILE, 'rb') as f: self.session.cookies = cookiejar_from_dict(pickle.load(f)) except IOError: pass
def cookiejar_from_str(cookie_string): """ Returns a CookieJar from a Cookie header string. :param cookie_string: Cookie header string :return: RequestJar """ tokens = cookie_string.split('; ') pairs = [t.split('=', 1) for t in tokens] return cookiejar_from_dict(dict(pairs))
def try_authenticate(self, payload): for _ in itertools.repeat(None, 5): auth_response = post("http://login.rutracker.org/forum/login.php", data=payload, cookies=cookiejar_from_dict({'spylog_test': '1'})) if auth_response.cookies and len(auth_response.cookies) > 0: return auth_response else: sleep(3) raise PluginError('unable to obtain cookies from rutracker')
def cookies(self): """ Get a copy of this response's cookies. :rtype: :class:`requests.cookies.RequestsCookieJar` """ jar = cookiejar_from_dict({}) if self._cookiejar is not None: for cookie in self._cookiejar: jar.set_cookie(cookie) return jar
def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request <Request>` sent from this #: :class:`Session <Session>`. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request <Request>`. self.auth = None #: Dictionary mapping protocol to the URL of the proxy (e.g. #: {'http': 'foo.bar:3128'}) to be used on each #: :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request <Request>`. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. self.verify = True #: SSL certificate default. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Should we trust the environment? self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Only store 1000 redirects to prevent using infinite memory self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
def addLinkSpider(add_link_dictionary): # get user's download information from add_link_dictionary for i in ['link', 'header', 'out', 'user-agent', 'load-cookies', 'referer']: if not (i in add_link_dictionary): add_link_dictionary[i] = None link = add_link_dictionary['link'] header = add_link_dictionary['header'] user_agent = add_link_dictionary['user-agent'] raw_cookies = add_link_dictionary['load-cookies'] referer = add_link_dictionary['referer'] requests_session = requests.Session() # defining a requests Session if raw_cookies: # set cookies cookie = SimpleCookie() cookie.load(raw_cookies) cookies = {key: morsel.value for key, morsel in cookie.items()} requests_session.cookies = cookiejar_from_dict(cookies) if referer: # set referer to the session requests_session.headers.update({'referer': referer}) if user_agent: # set user_agent to the session requests_session.headers.update({'user-agent': user_agent}) # find headers response = requests_session.head(link) header = response.headers file_size = None if 'Content-Length' in header.keys(): # checking if file_size is available file_size = int(header['Content-Length']) if int(file_size/1073741824) != 0: # converting file_size to KB or MB or GB file_size = file_size/1073741824 size_str = str(round(file_size, 2)) + " GB" elif int(file_size/1048576) != 0: size_str = str(int(file_size/1048576)) + " MB" elif int(file_size/1024) != 0: size_str = str(int(file_size/1024)) + " KB" else: size_str = str(file_size) filesize = size_str return filesize
def build_response(self, req, resp): response = Response() response.status_code = resp.status_code response.headers = CaseInsensitiveDict((k, v) for k, v in resp.items()) response.encoding = get_encoding_from_headers(response.headers) response.raw = StringIO(resp.content) response.reason = None if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Convert from django's SimpleCookie to request's CookieJar cookiejar_from_dict(resp.cookies, response.cookies) # context response.request = req response.connection = self response = dispatch_hook('response', req.hooks, response) return response
def queueSpider(add_link_dictionary): # get download information from add_link_dictionary for i in ['link', 'header', 'out', 'user_agent', 'load_cookies', 'referer']: if not (i in add_link_dictionary): add_link_dictionary[i] = None link = add_link_dictionary['link'] header = add_link_dictionary['header'] user_agent = add_link_dictionary['user_agent'] raw_cookies = add_link_dictionary['load_cookies'] referer = add_link_dictionary['referer'] requests_session = requests.Session() # defining a requests Session if raw_cookies: # set cookies cookie = SimpleCookie() cookie.load(raw_cookies) cookies = {key: morsel.value for key, morsel in cookie.items()} requests_session.cookies = cookiejar_from_dict(cookies) if referer: # set referer to the session requests_session.headers.update({'referer': referer}) if user_agent: # set user_agent to the session requests_session.headers.update({'user-agent': user_agent}) # find headers try: response = requests_session.head(link) header = response.headers except: header = {} filename = None if 'Content-Disposition' in header.keys(): # checking if filename is available content_disposition = header['Content-Disposition'] if content_disposition.find('filename') != -1: filename_splited = content_disposition.split('filename=') filename_splited = filename_splited[-1] # getting file name in desired format filename = filename_splited[1:-1] if not(filename): filename = link.split('/')[-1] return filename
def create_request(session, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None): cookies = cookies or {} if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) merged_cookies = RequestsCookieJar() merged_cookies.update(session.cookies) merged_cookies.update(cookies) cookies = merged_cookies params = merge_setting_safe(params, session.params) headers = merge_setting_safe(headers, session.headers, dict_class=CaseInsensitiveDict) auth = merge_setting_safe(auth, session.auth) return Request(method=method.upper(), url=url, headers=headers, files=files, data=data, params=params, auth=auth, cookies=cookies)
def run(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument('--debug', action='store_true') parser.add_argument('--cookie') parser.add_argument('graphite_host', action='store') options = parser.parse_args() if options.debug: app.debug = True app.config['GRAPHITE_HOST'] = options.graphite_host session = requests.Session() if options.cookie: cookies = {'pysid': options.cookie} else: cookies = get_cookies(options.graphite_host) session.cookies = cookiejar_from_dict(cookies) app.config['GRAPHITE_SESSION'] = session app.run()
def __init__(self, response): super(SanitizedResponse, self).__init__() self.status_code = response.status_code self.encoding = response.encoding self.raw = response.raw self.reason = response.reason self.url = response.url self.request = response.request self.connection = response.connection self._content_consumed = True self._content = "" self.cookies = cookiejar_from_dict({}) self.headers = CaseInsensitiveDict() self.headers['content-length'] = '0' for header in ('date', 'server'): if header in response.headers: self.headers[header] = response.headers[header]
def addLinkSpider(add_link_dictionary): # get user's download information from add_link_dictionary for i in ['link', 'header', 'out', 'user_agent', 'load_cookies', 'referer']: if not (i in add_link_dictionary): add_link_dictionary[i] = None link = add_link_dictionary['link'] header = add_link_dictionary['header'] user_agent = add_link_dictionary['user_agent'] raw_cookies = add_link_dictionary['load_cookies'] referer = add_link_dictionary['referer'] requests_session = requests.Session() # defining a requests Session if raw_cookies: # set cookies cookie = SimpleCookie() cookie.load(raw_cookies) cookies = {key: morsel.value for key, morsel in cookie.items()} requests_session.cookies = cookiejar_from_dict(cookies) if referer: # set referer to the session requests_session.headers.update({'referer': referer}) if user_agent: # set user_agent to the session requests_session.headers.update({'user-agent': user_agent}) # find headers try: response = requests_session.head(link) header = response.headers except: header = {} file_size = None if 'Content-Length' in header.keys(): # checking if file_size is available file_size = int(header['Content-Length']) # converting file_size to KiB or MiB or GiB file_size = humanReadbleSize(file_size) return file_size # If no Content-Length ? fixed it.
def initialSessionFromCookie(self, chrome_cookie_string): cookies_str = str(chrome_cookie_string) if self.Session is None: self.Session = requests.Session() cookies_list = list(cookies_str.split(';')) cookies_dict = dict() for cookie in cookies_list: kv = cookie.split('=') k = kv[0].strip(' ') # to simplify cookies, uncomment following 2 lines. # if re.match('__utm', k): # continue cookies_dict[kv[0].strip(' ')] = ('='.join(kv[1:])).strip(' ') for k in cookies_dict: print(k) from requests.cookies import cookiejar_from_dict self.Session.cookies = cookiejar_from_dict(cookies_dict) self.xsrf = cookies_dict['_xsrf']
def prepare_request(self, request): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request <Request>` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = RequestsCookieJar() merged_cookies.update(self.cookies) merged_cookies.update(cookies) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p
def __init__(self, agent, cookiejar=None): self._agent = agent self._cookiejar = cookiejar or cookiejar_from_dict({})
def spider(add_link_dictionary): # get user's download request from add_link_dictionary link = add_link_dictionary['link'] ip = add_link_dictionary['ip'] port = add_link_dictionary['port'] proxy_user = add_link_dictionary['proxy_user'] proxy_passwd = add_link_dictionary['proxy_passwd'] download_user = add_link_dictionary['download_user'] download_passwd = add_link_dictionary['download_passwd'] header = add_link_dictionary['header'] out = add_link_dictionary['out'] user_agent = add_link_dictionary['user_agent'] raw_cookies = add_link_dictionary['load_cookies'] referer = add_link_dictionary['referer'] # defin a requests session requests_session = requests.Session() if ip: ip_port = 'http://' + str(ip) + ":" + str(port) if proxy_user: ip_port = 'http://' + proxy_user + ':' + proxy_passwd + '@' + ip_port # set proxy to the session requests_session.proxies = {'http': ip_port} if download_user: # set download user pass to the session requests_session.auth(download_user, download_passwd) # set cookies if raw_cookies: cookie = SimpleCookie() cookie.load(raw_cookies) cookies = {key: morsel.value for key, morsel in cookie.items()} requests_session.cookies = cookiejar_from_dict(cookies) # set referer if referer: requests_session.headers.update({'referer': referer }) #setting referer to the session # set user_agent if user_agent: requests_session.headers.update({'user-agent':user_agent }) #setting user_agent to the session #find headers try: response = requests_session.head(link) header = response.headers except: header = {} filename = None filesize = None if 'Content-Disposition' in header.keys(): # checking if filename is available content_disposition = header['Content-Disposition'] if content_disposition.find('filename') != -1: filename_splited = content_disposition.split('filename=') filename_splited = filename_splited[-1] # getting file name in desired format filename = filename_splited[1:-1] if not(filename): filename = link.split('/')[-1] # if user set file name before in add_link_dictionary['out'], # then set "out" for filename if out: filename = out # check if file_size is available if 'Content-Length' in header.keys(): file_size = int(header['Content-Length']) # converting file_size to KiB or MiB or GiB file_size = humanReadbleSize(file_size) # return results return filename, filesize
def setcookiejar_from_dict(self,dict): self.cookies = cook.cookiejar_from_dict(dict)
def request(self, method, url, **kwargs): method = method.upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.iteritems(): if isinstance(v, str): h.addRawHeader(k, v) else: h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = uuid.uuid4() headers.setRawHeaders( 'content-type', [ 'multipart/form-data; boundary=%s' % (boundary,)]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( 'content-type', ['application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = IBodyProducer(data) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [('gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)