def set_cookie(self, name, value): """ Sets a cookie to be passed through with requests. """ cookie_domain = self.extra_environ.get('HTTP_HOST', '.localhost') cookie_domain = cookie_domain.split(':', 1)[0] if '.' not in cookie_domain: cookie_domain = "%s.local" % cookie_domain value = escape_cookie_value(value) cookie = http_cookiejar.Cookie( version=0, name=name, value=value, port=None, port_specified=False, domain=cookie_domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None ) self.cookiejar.set_cookie(cookie)
def validate_session(self): """Validate an API session.""" # We need to store our access token as the openx3_access_token cookie. # This cookie will be passed to all future API requests. cookie = cookielib.Cookie(version=0, name='openx3_access_token', value=self._token, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) self._session.cookies.set_cookie(cookie) # v2 doesn't need this extra step, just the cookie: if self.api_path == API_PATH_V1: response = self._session.put( url=self._resolve_url('/a/session/validate'), timeout=self.timeout) self.log_request(response) return response.text
def get_cookie_opener(gs_username, gs_token, gs_toolname=None): """ Create a GenomeSpace cookie opener """ cj = http_cookiejar.CookieJar() for cookie_name, cookie_value in [('gs-token', gs_token), ('gs-username', gs_username)]: # create a super-cookie, valid for all domains cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(cookie) cookie_opener = build_opener(HTTPCookieProcessor(cj)) cookie_opener.addheaders.append(('gs-toolname', gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME)) return cookie_opener
def set_cookie(self, name, value): """ Sets a cookie to be passed through with requests. """ value = escape_cookie_value(value) cookie = http_cookiejar.Cookie( version=0, name=name, value=value, port=None, port_specified=False, domain='.localhost', domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None ) self.cookiejar.set_cookie(cookie)
def cookie( url, name, value, expires=None): '''Return a new Cookie using a slightly more friendly API than that provided by six.moves.http_cookiejar @param name The cookie name {str} @param value The cookie value {str} @param url The URL path of the cookie {str} @param expires The expiry time of the cookie {datetime}. If provided, it must be a naive timestamp in UTC. ''' u = urlparse(url) domain = u.hostname or u.netloc port = str(u.port) if u.port is not None else None secure = u.scheme == 'https' if expires is not None: if expires.tzinfo is not None: raise ValueError('Cookie expiration must be a naive datetime') expires = (expires - datetime(1970, 1, 1)).total_seconds() return http_cookiejar.Cookie( version=0, name=name, value=value, port=port, port_specified=port is not None, domain=domain, domain_specified=True, domain_initial_dot=False, path=u.path, path_specified=True, secure=secure, expires=expires, discard=False, comment=None, comment_url=None, rest=None, rfc2109=False, )
def _make_cookies(base_url, cookies, cj): domain = urllib_parse.urlsplit(base_url).hostname for key in cookies: c = http_cookiejar.Cookie(0, key, str(cookies[key].split(';')[0]), port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) cj.set_cookie(c) return cj
def cookie( url, name, value, expires=None): '''Return a new Cookie using a slightly more friendly API than that provided by six.moves.http_cookiejar @param name The cookie name {str} @param value The cookie value {str} @param url The URL path of the cookie {str} @param expires The expiry time of the cookie {datetime} ''' u = urlparse(url) domain = u.hostname or u.netloc port = str(u.port) if u.port is not None else None secure = u.scheme == 'https' if expires is not None: expires = expires.strftime("%s") return http_cookiejar.Cookie( version=0, name=name, value=value, port=port, port_specified=port is not None, domain=domain, domain_specified=True, domain_initial_dot=False, path=u.path, path_specified=True, secure=secure, expires=expires, discard=False, comment=None, comment_url=None, rest=None, rfc2109=False, )
def _add_cookie(self, cookie_dict): # We don't use self.app.set_cookie since it has undesirable behaviour # with domain and value fields that causes issues. value = cookie_dict['value'] if six.PY2: value = value.encode('utf-8') cookie = http_cookiejar.Cookie(version=0, name=cookie_dict['name'], value=value, port=None, port_specified=False, domain='localhost.local', domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None) self.app.cookiejar.set_cookie(cookie)
def set_cookie(cookiejar, kaka): """PLaces a cookie (a cookielib.Cookie based on a set-cookie header line) in the cookie jar. Always chose the shortest expires time. :param cookiejar: :param kaka: Cookie """ # default rfc2109=False # max-age, httponly for cookie_name, morsel in kaka.items(): std_attr = ATTRS.copy() std_attr["name"] = cookie_name _tmp = morsel.coded_value if _tmp.startswith('"') and _tmp.endswith('"'): std_attr["value"] = _tmp[1:-1] else: std_attr["value"] = _tmp std_attr["version"] = 0 attr = "" # copy attributes that have values try: for attr in morsel.keys(): if attr in ATTRS: if morsel[attr]: if attr == "expires": std_attr[attr] = http2time(morsel[attr]) else: std_attr[attr] = morsel[attr] elif attr == "max-age": if morsel[attr]: std_attr["expires"] = http2time(morsel[attr]) except TimeFormatError: # Ignore cookie logger.info( "Time format error on %s parameter in received cookie" % (attr, )) continue for att, spec in PAIRS.items(): if std_attr[att]: std_attr[spec] = True if std_attr["domain"] and std_attr["domain"].startswith("."): std_attr["domain_initial_dot"] = True if morsel["max-age"] is 0: try: cookiejar.clear(domain=std_attr["domain"], path=std_attr["path"], name=std_attr["name"]) except ValueError: pass else: # Fix for Microsoft cookie error if "version" in std_attr: try: std_attr["version"] = std_attr["version"].split(",")[0] except (TypeError, AttributeError): pass new_cookie = cookielib.Cookie(**std_attr) cookiejar.set_cookie(new_cookie)
def set_cookie(self, kaka, request): """Returns a http_cookiejar.Cookie based on a set-cookie header line""" if not kaka: return part = urlparse(request.url) _domain = part.hostname logger.debug("%s: '%s'", _domain, kaka) for cookie_name, morsel in kaka.items(): std_attr = ATTRS.copy() std_attr["name"] = cookie_name _tmp = morsel.coded_value if _tmp.startswith('"') and _tmp.endswith('"'): std_attr["value"] = _tmp[1:-1] else: std_attr["value"] = _tmp std_attr["version"] = 0 # copy attributes that have values for attr in morsel.keys(): if attr in ATTRS: if morsel[attr]: if attr == "expires": std_attr[attr] = _since_epoch(morsel[attr]) elif attr == "path": if morsel[attr].endswith(","): std_attr[attr] = morsel[attr][:-1] else: std_attr[attr] = morsel[attr] else: std_attr[attr] = morsel[attr] elif attr == "max-age": if morsel["max-age"]: std_attr["expires"] = time.time() + int(morsel["max-age"]) for att, item in PAIRS.items(): if std_attr[att]: std_attr[item] = True if std_attr["domain"]: if std_attr["domain"].startswith("."): std_attr["domain_initial_dot"] = True else: std_attr["domain"] = _domain std_attr["domain_specified"] = True if morsel["max-age"] == 0: try: self.cookiejar.clear(domain=std_attr["domain"], path=std_attr["path"], name=std_attr["name"]) except ValueError: pass elif std_attr["expires"] and std_attr["expires"] < utc_now(): try: self.cookiejar.clear(domain=std_attr["domain"], path=std_attr["path"], name=std_attr["name"]) except ValueError: pass else: new_cookie = http_cookiejar.Cookie(**std_attr) self.cookiejar.set_cookie(new_cookie)
def getRegexParsed( regexs, url, cookieJar=None, forCookieJarOnly=False, recursiveCall=False, cachedPages={}, rawPost=False, cookie_jar_file=None): #0,1,2 = URL, regexOnly, CookieJarOnly #cachedPages = {} #print 'url',url doRegexs = re.compile('\$doregex\[([^\]]*)\]').findall(url) # print 'doRegexs',doRegexs,regexs setresolved = True for k in doRegexs: if k in regexs: #print 'processing ' ,k m = regexs[k] #print m cookieJarParam = False if 'cookiejar' in m: # so either create or reuse existing jar #print 'cookiejar exists',m['cookiejar'] cookieJarParam = m['cookiejar'] if '$doregex' in cookieJarParam: cookieJar = getRegexParsed(regexs, m['cookiejar'], cookieJar, True, True, cachedPages) cookieJarParam = True else: cookieJarParam = True #print 'm[cookiejar]',m['cookiejar'],cookieJar if cookieJarParam: if cookieJar == None: #print 'create cookie jar' cookie_jar_file = None if 'open[' in m['cookiejar']: cookie_jar_file = m['cookiejar'].split( 'open[')[1].split(']')[0] # print 'cookieJar from file name',cookie_jar_file cookieJar = getCookieJar(cookie_jar_file) # print 'cookieJar from file',cookieJar if cookie_jar_file: saveCookieJar(cookieJar, cookie_jar_file) #cookieJar = http_cookiejar.LWPCookieJar() #print 'cookieJar new',cookieJar elif 'save[' in m['cookiejar']: cookie_jar_file = m['cookiejar'].split('save[')[1].split( ']')[0] complete_path = os.path.join(profile, cookie_jar_file) # print 'complete_path',complete_path saveCookieJar(cookieJar, cookie_jar_file) if m['page'] and '$doregex' in m['page']: pg = getRegexParsed(regexs, m['page'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if len(pg) == 0: pg = 'http://regexfailed' m['page'] = pg if 'setcookie' in m and m['setcookie'] and '$doregex' in m[ 'setcookie']: m['setcookie'] = getRegexParsed(regexs, m['setcookie'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if 'appendcookie' in m and m['appendcookie'] and '$doregex' in m[ 'appendcookie']: m['appendcookie'] = getRegexParsed(regexs, m['appendcookie'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if 'post' in m and '$doregex' in m['post']: m['post'] = getRegexParsed(regexs, m['post'], cookieJar, recursiveCall=True, cachedPages=cachedPages) # print 'post is now',m['post'] if 'rawpost' in m and '$doregex' in m['rawpost']: m['rawpost'] = getRegexParsed(regexs, m['rawpost'], cookieJar, recursiveCall=True, cachedPages=cachedPages, rawPost=True) #print 'rawpost is now',m['rawpost'] if 'rawpost' in m and '$epoctime$' in m['rawpost']: m['rawpost'] = m['rawpost'].replace('$epoctime$', getEpocTime()) if 'rawpost' in m and '$epoctime2$' in m['rawpost']: m['rawpost'] = m['rawpost'].replace('$epoctime2$', getEpocTime2()) link = '' if m['page'] and m[ 'page'] in cachedPages and not 'ignorecache' in m and forCookieJarOnly == False: #print 'using cache page',m['page'] link = cachedPages[m['page']] else: if m['page'] and not m['page'] == '' and m['page'].startswith( 'http'): if '$epoctime$' in m['page']: m['page'] = m['page'].replace('$epoctime$', getEpocTime()) if '$epoctime2$' in m['page']: m['page'] = m['page'].replace('$epoctime2$', getEpocTime2()) #print 'Ingoring Cache',m['page'] page_split = m['page'].split('|') pageUrl = page_split[0] header_in_page = None if len(page_split) > 1: header_in_page = page_split[1] # if # proxy = urllib_request.ProxyHandler({ ('https' ? proxytouse[:5]=="https":"http") : proxytouse}) # opener = urllib_request.build_opener(proxy) # urllib_request.install_opener(opener) # print 'urllib_request.getproxies',urllib_request.getproxies() current_proxies = urllib_request.ProxyHandler( urllib_request.getproxies()) #print 'getting pageUrl',pageUrl req = urllib_request.Request(pageUrl) if 'proxy' in m: proxytouse = m['proxy'] # print 'proxytouse',proxytouse # urllib_request.getproxies= lambda: {} if pageUrl[:5] == "https": proxy = urllib_request.ProxyHandler( {'https': proxytouse}) #req.set_proxy(proxytouse, 'https') else: proxy = urllib_request.ProxyHandler( {'http': proxytouse}) #req.set_proxy(proxytouse, 'http') opener = urllib_request.build_opener(proxy) urllib_request.install_opener(opener) req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1' ) proxytouse = None if 'referer' in m: req.add_header('Referer', m['referer']) if 'accept' in m: req.add_header('Accept', m['accept']) if 'agent' in m: req.add_header('User-agent', m['agent']) if 'x-req' in m: req.add_header('X-Requested-With', m['x-req']) if 'x-addr' in m: req.add_header('x-addr', m['x-addr']) if 'x-forward' in m: req.add_header('X-Forwarded-For', m['x-forward']) if 'setcookie' in m: # print 'adding cookie',m['setcookie'] req.add_header('Cookie', m['setcookie']) if 'appendcookie' in m: # print 'appending cookie to cookiejar',m['appendcookie'] cookiestoApend = m['appendcookie'] cookiestoApend = cookiestoApend.split(';') for h in cookiestoApend: n, v = h.split('=') w, n = n.split(':') ck = http_cookiejar.Cookie( version=0, name=n, value=v, port=None, port_specified=False, domain=w, domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cookieJar.set_cookie(ck) if 'origin' in m: req.add_header('Origin', m['origin']) if header_in_page: header_in_page = header_in_page.split('&') for h in header_in_page: n, v = h.split('=') req.add_header(n, v) if not cookieJar == None: # print 'cookieJarVal',cookieJar cookie_handler = urllib_request.HTTPCookieProcessor( cookieJar) opener = urllib_request.build_opener( cookie_handler, urllib_request.HTTPBasicAuthHandler(), urllib_request.HTTPHandler()) opener = urllib_request.install_opener(opener) # print 'noredirect','noredirect' in m if 'noredirect' in m: opener = urllib_request.build_opener( cookie_handler, NoRedirection, urllib_request.HTTPBasicAuthHandler(), urllib_request.HTTPHandler()) opener = urllib_request.install_opener(opener) elif 'noredirect' in m: opener = urllib_request.build_opener( NoRedirection, urllib_request.HTTPBasicAuthHandler(), urllib_request.HTTPHandler()) opener = urllib_request.install_opener(opener) if 'connection' in m: # print '..........................connection//////.',m['connection'] from keepalive import HTTPHandler keepalive_handler = HTTPHandler() opener = urllib_request.build_opener(keepalive_handler) urllib_request.install_opener(opener) #print 'after cookie jar' post = None if 'post' in m: postData = m['post'] #if '$LiveStreamRecaptcha' in postData: # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar) # if captcha_challenge: # postData=postData.replace('$LiveStreamRecaptcha','manual_recaptcha_challenge_field:'+captcha_challenge+',recaptcha_response_field:'+catpcha_word+',id:'+idfield) splitpost = postData.split(',') post = {} for p in splitpost: n = p.split(':')[0] v = p.split(':')[1] post[n] = v post = urllib_parse.urlencode(post) if 'rawpost' in m: post = m['rawpost'] #if '$LiveStreamRecaptcha' in post: # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar) # if captcha_challenge: # post=post.replace('$LiveStreamRecaptcha','&manual_recaptcha_challenge_field='+captcha_challenge+'&recaptcha_response_field='+catpcha_word+'&id='+idfield) link = '' try: if post: response = urllib_request.urlopen(req, post) else: response = urllib_request.urlopen(req) if response.info().get('Content-Encoding') == 'gzip': import gzip buf = six.BytesIO(response.read()) f = gzip.GzipFile(fileobj=buf) link = f.read() else: link = response.read() if 'proxy' in m and not current_proxies is None: urllib_request.install_opener( urllib_request.build_opener(current_proxies)) link = javascriptUnEscape(link) #print repr(link) #print link This just print whole webpage in LOG if 'includeheaders' in m: #link+=str(response.headers.get('Set-Cookie')) link += '$$HEADERS_START$$:' for b in response.headers: link += b + ':' + response.headers.get( b) + '\n' link += '$$HEADERS_END$$:' # print link response.close() except: pass cachedPages[m['page']] = link #print link #print 'store link for',m['page'],forCookieJarOnly if forCookieJarOnly: return cookieJar # do nothing elif m['page'] and not m['page'].startswith('http'): if m['page'].startswith('$pyFunction:'): val = doEval(m['page'].split('$pyFunction:')[1], '', cookieJar, m) if forCookieJarOnly: return cookieJar # do nothing link = val link = javascriptUnEscape(link) else: link = m['page'] if '$doregex' in m['expres']: m['expres'] = getRegexParsed(regexs, m['expres'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if not m['expres'] == '': #print 'doing it ',m['expres'] if '$LiveStreamCaptcha' in m['expres']: val = askCaptcha(m, link, cookieJar) #print 'url and val',url,val url = url.replace("$doregex[" + k + "]", val) elif m['expres'].startswith( '$pyFunction:') or '#$pyFunction' in m['expres']: #print 'expeeeeeeeeeeeeeeeeeee',m['expres'] val = '' if m['expres'].startswith('$pyFunction:'): val = doEval(m['expres'].split('$pyFunction:')[1], link, cookieJar, m) else: val = doEvalFunction(m['expres'], link, cookieJar, m) if 'ActivateWindow' in m['expres']: return if forCookieJarOnly: return cookieJar # do nothing if 'listrepeat' in m: listrepeat = m['listrepeat'] return listrepeat, eval(val), m, regexs, cookieJar try: url = url.replace(u"$doregex[" + k + "]", val) except: url = url.replace("$doregex[" + k + "]", six.ensure_text(val)) else: if 'listrepeat' in m: listrepeat = m['listrepeat'] ret = re.findall(m['expres'], link) return listrepeat, ret, m, regexs val = '' if not link == '': #print 'link',link reg = re.compile(m['expres']).search(link) try: val = reg.group(1).strip() except: traceback.print_exc() elif m['page'] == '' or m['page'] == None: val = m['expres'] if rawPost: # print 'rawpost' val = urllib_parse.quote_plus(val) if 'htmlunescape' in m: #val=urllib_parse.unquote_plus(val) import HTMLParser val = HTMLParser.HTMLParser().unescape(val) try: url = url.replace("$doregex[" + k + "]", val) except: url = url.replace("$doregex[" + k + "]", six.ensure_text(val)) #print 'ur',url #return val else: url = url.replace("$doregex[" + k + "]", '') if '$epoctime$' in url: url = url.replace('$epoctime$', getEpocTime()) if '$epoctime2$' in url: url = url.replace('$epoctime2$', getEpocTime2()) if '$GUID$' in url: import uuid url = url.replace('$GUID$', str(uuid.uuid1()).upper()) if '$get_cookies$' in url: url = url.replace('$get_cookies$', getCookiesString(cookieJar)) if recursiveCall: return url #print 'final url',repr(url) if url == "": return else: return url, setresolved