def _get_initial_token(url): """ Create initial connection to get authentication token for future requests. Returns a string to be used in subsequent connections with the X-CSRFToken header or the empty string if we didn't find any token in the cookies. """ logging.info('Getting initial CSRF token.') cookiejar = CookieJar() opener = build_opener(HTTPCookieProcessor(cookiejar)) install_opener(opener) opener.open(url) for cookie in cookiejar: if cookie.name == 'csrftoken': logging.info('Found CSRF token.') return cookie.value logging.warn('Did not find the CSRF token.') return ''
def __init__(self): self.cookie = http.cookiejar.CookieJar() self.opener = build_opener(HTTPCookieProcessor(self.cookie)) self.opener.addheaders = BROWSER_HEAD
def __init__(self, url, cookie_file=None, username=None, password=None, api_token=None, agent=None, session=None, disable_proxy=False, auth_callback=None, otp_token_callback=None, verify_ssl=True, save_cookies=True, ext_auth_cookies=None): if not url.endswith('/'): url += '/' self.url = url + 'api/' self.save_cookies = save_cookies self.ext_auth_cookies = ext_auth_cookies if self.save_cookies: self.cookie_jar, self.cookie_file = create_cookie_jar( cookie_file=cookie_file) try: self.cookie_jar.load(ignore_expires=True) except IOError: pass else: self.cookie_jar = CookieJar() self.cookie_file = None if self.ext_auth_cookies: try: self.cookie_jar.load(ext_auth_cookies, ignore_expires=True) except IOError as e: logging.critical( 'There was an error while loading a ' 'cookie file: %s', e) pass # Get the cookie domain from the url. If the domain # does not contain a '.' (e.g. 'localhost'), we assume # it is a local domain and suffix it (See RFC 2109). parsed_url = urlparse(url) self.domain = parsed_url[1].partition(':')[0] # Remove Port. if self.domain.count('.') < 1: self.domain = '%s.local' % self.domain if session: cookie = Cookie(version=0, name=RB_COOKIE_NAME, value=session, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=True, path=parsed_url[2], path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}) self.cookie_jar.set_cookie(cookie) if self.save_cookies: self.cookie_jar.save() if username: # If the username parameter is given, we have to clear the session # cookie manually or it will override the username:password # combination retrieved from the authentication callback. try: self.cookie_jar.clear(self.domain, parsed_url[2], RB_COOKIE_NAME) except KeyError: pass # Set up the HTTP libraries to support all of the features we need. password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password, api_token, auth_callback, otp_token_callback) self.preset_auth_handler = PresetHTTPAuthHandler( self.url, password_mgr) handlers = [] if not verify_ssl: context = ssl._create_unverified_context() handlers.append(HTTPSHandler(context=context)) if disable_proxy: handlers.append(ProxyHandler({})) handlers += [ HTTPCookieProcessor(self.cookie_jar), ReviewBoardHTTPBasicAuthHandler(password_mgr), HTTPDigestAuthHandler(password_mgr), self.preset_auth_handler, ReviewBoardHTTPErrorProcessor(), ] if agent: self.agent = agent else: self.agent = ('RBTools/' + get_package_version()).encode('utf-8') opener = build_opener(*handlers) opener.addheaders = [ (str('User-agent'), str(self.agent)), ] install_opener(opener) self._cache = None self._urlopen = urlopen
def __init__(self, url, cookie_file=None, username=None, password=None, api_token=None, agent=None, session=None, disable_proxy=False, auth_callback=None, otp_token_callback=None): self.url = url if not self.url.endswith('/'): self.url += '/' self.url = self.url + 'api/' self.cookie_jar, self.cookie_file = create_cookie_jar( cookie_file=cookie_file) try: self.cookie_jar.load(ignore_expires=True) except IOError: pass if session: parsed_url = urlparse(url) # Get the cookie domain from the url. If the domain # does not contain a '.' (e.g. 'localhost'), we assume # it is a local domain and suffix it (See RFC 2109). domain = parsed_url[1].partition(':')[0] # Remove Port. if domain.count('.') < 1: domain = '%s.local' % domain cookie = Cookie(version=0, name=RB_COOKIE_NAME, value=session, port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=True, path=parsed_url[2], path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}) self.cookie_jar.set_cookie(cookie) self.cookie_jar.save() # Set up the HTTP libraries to support all of the features we need. password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password, api_token, auth_callback, otp_token_callback) self.preset_auth_handler = PresetHTTPAuthHandler( self.url, password_mgr) handlers = [] if disable_proxy: handlers.append(ProxyHandler({})) handlers += [ HTTPCookieProcessor(self.cookie_jar), ReviewBoardHTTPBasicAuthHandler(password_mgr), HTTPDigestAuthHandler(password_mgr), self.preset_auth_handler, ReviewBoardHTTPErrorProcessor(), ] if agent: self.agent = agent else: self.agent = ('RBTools/' + get_package_version()).encode('utf-8') opener = build_opener(*handlers) opener.addheaders = [ (b'User-agent', self.agent), ] install_opener(opener) self._cache = APICache()
def url_downloader(url, data=None, path=None, cookie=None, timeout=5, retry=1, retry_ivl=5, agent=None, proxy=None): """Download URL link url: url to download data: post data path: download to local file timeout: socket timeout retry: retry times to download url retry_ivl: interval time when retry agent: http user agent proxy: socks5://127.0.0.1:1080 """ while True: try: if isinstance(data, dict): data = urlencode(data) request = Request(url, data=data) request.add_header('User-Agent', agent or get_user_agent()) if data: request.add_header( 'Content-Type', 'application/x-www-form-urlencoded;charset=utf-8') response = None handlers = [] if proxy: scheme, host, port = proxy.split(':') host = host.strip('/') proxy_handler = SocksiPyHandler( socks.PROXY_TYPES[scheme.upper()], host, int(port) ) handlers.append(proxy_handler) if cookie is None: cookie = CookieJar() cookie_handler = HTTPCookieProcessor(cookie) handlers.append(cookie_handler) opener = build_opener(*handlers) response = opener.open(request, timeout=timeout) content_encoding = response.info().get('content-encoding') if content_encoding: r_data = gzip.decompress(response.read()) else: r_data = response.read() if path: with open(path, 'wb') as f: f.write(r_data) r_data = None response.close() mime = response.info().get('content-type') real_url = response.geturl() err_msg = 'Ok' break except (URLError, socket.error, Exception) as err: response and response.close() retry -= 1 err_msg = str(err) if retry > 0: time.sleep(retry_ivl) retry_ivl += retry_ivl timeout += timeout else: mime = r_data = real_url = None break return { 'mime': mime, 'path': path, 'data': r_data, 'url': real_url, 'cookie': cookie, 'error': err_msg, }
def request_wolfram_alpha(input, verbose=False): r""" Request Wolfram Alpha website. INPUT: - ``input`` -- string - ``verbose`` -- bool (default: ``False``) OUTPUT: json EXAMPLES:: sage: from sage.symbolic.integration.external import request_wolfram_alpha sage: page_data = request_wolfram_alpha('integrate Sin[x]') # optional internet sage: [str(a) for a in sorted(page_data.keys())] # optional internet ['queryresult'] sage: [str(a) for a in sorted(page_data['queryresult'].keys())] # optional internet ['datatypes', 'encryptedEvaluatedExpression', 'encryptedParsedExpression', 'error', 'host', 'id', 'numpods', 'parsetimedout', 'parsetiming', 'pods', 'recalculate', 'related', 'server', 'sponsorCategories', 'success', 'timedout', 'timedoutpods', 'timing', 'version'] """ # import compatible with py2 and py3 from six.moves.urllib.parse import urlencode from six.moves.urllib.request import Request, build_opener, HTTPCookieProcessor import json from http.cookiejar import CookieJar # we need cookies for this... cj = CookieJar() opener = build_opener(HTTPCookieProcessor(cj)) # build initial query for code req = Request("http://www.wolframalpha.com/input/api/v1/code") resp = opener.open(req) # the website returns JSON containing the code page_data = json.loads(resp.read().decode("utf-8")) if not ("code" in page_data.keys()): raise ValueError("Wolfram did not return a code") proxy_code = page_data['code'] if verbose: print("Code: {}".format(proxy_code)) print("Cookies: {}".format(cj)) # now we can make a request # some parameters documented here: # https://products.wolframalpha.com/api/documentation/#parameter-reference # the following are the parameters used by the website params = { 'assumptionsversion': '2', 'async': 'true', 'banners': 'raw', 'debuggingdata': 'false', 'format': 'image,plaintext,imagemap,sound,minput,moutput', 'formattimeout': '8', 'input': input, 'output': 'JSON', 'parsetimeout': '5', 'podinfosasync': 'true', 'proxycode': proxy_code, 'recalcscheme': 'parallel', 'sbsdetails': 'true', 'scantimeout': '0.5', 'sponsorcategories': 'true', 'statemethod': 'deploybutton', 'storesubpodexprs': 'true' } # # we can also change some parameters # params = { # 'assumptionsversion': '2', # 'banners': 'raw', # 'format': 'minput,moutput', # 'formattimeout': '8', # 'input': input, # 'output': 'JSON', # 'parsetimeout': '5', # 'proxycode': proxy_code, # 'scantimeout': '0.5', # 'storesubpodexprs': 'true' # } params = urlencode(params) url = "https://www.wolframalpha.com/input/json.jsp?%s" % params req = Request(url) req.add_header('Referer', "https://www.wolframalpha.com/input/") # seems important resp = opener.open(req) # the website returns JSON containing the code return json.loads(resp.read().decode("utf-8"))
def init_opener(self): self.cj = http_cookiejar.CookieJar() return build_opener(HTTPCookieProcessor(self.cj))
def __init__(self): self.cookies = {} self.server_url = 'https://www.titulky.com' opener = build_opener(HTTPCookieProcessor(http_cookiejar.LWPCookieJar())) opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 ( .NET CLR 3.5.30729)')] install_opener(opener)
def __init__(self, jar): self._opener = build_opener(HTTPCookieProcessor(jar))