def download_torrent(self, url): try: self._sign_in() cookie = cookielib.Cookie(version=0, name='bb_dl', value=url.split('=')[-1], port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=True, path='/forum/', path_specified=True, secure=False, expires=int(time()) + 5 * 60, discard=False, comment=None, comment_url=None, rest={'http_only': None}) self._cj.set_cookie(cookie) data = self._opener.open(url).read() with tempfile.NamedTemporaryFile(delete=False) as tmpfile: tmpfile.write(data) name = tmpfile.name print name, url except Exception: try: with open(self.exc_log, 'a') as fo: fo.write(traceback.format_exc()) except Exception: pass
def login(self) : ''' Login into GoogleReader. You must call identify before calling this. You must call this before anything else that acces to GoogleReader data.''' if self._login==None or self._passwd == None : return data = { 'service':'reader', 'Email':self._login, 'Passwd':self._passwd, 'source':CONST.AGENT, 'continue':'http://www.google.com/', } sidinfo = self._web.get( CONST.URI_LOGIN, data ) # print sidinfo self._sid = None SID_ID = 'SID=' if SID_ID in sidinfo : pos_beg = sidinfo.find(SID_ID) pos_end = sidinfo.find('\n',pos_beg) self._sid = sidinfo[pos_beg+len(SID_ID):pos_end] if self._sid != None : cookie = cookielib.Cookie(version=0, name='SID', value=self._sid, port=None, port_specified=False, domain='.google.com', domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires='1600000000', discard=False, comment=None, comment_url=None, rest={}) self._web.cookies().set_cookie(cookie) return True
def restore_cookie(settings, cookie_name): cookie_str = settings.getSetting(get_setting_name(cookie_name)) log("restore_cookie %s from %s: %s" % (get_setting_name(cookie_name), settings, cookie_str)) if cookie_str: cookie_values = json.loads(cookie_str) result = dict( version=0, name=cookie_name, value=None, port=None, domain='', path='/', secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False, ) for idx, field_name in enumerate(cookie_fields): val = cookie_values[idx] result[field_name] = val result['port_specified'] = bool(result['port']) result['domain_specified'] = bool(result['domain']) result['domain_initial_dot'] = result['domain'].startswith('.') result['path_specified'] = bool(result['path']) return cookielib.Cookie(**result) else: return None
def login(username, password): cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) #req = urllib2.Request('https://www.coursera.org/account/signin') #req.add_header('Host',"www.coursera.org") #opener.open(req) csrftoken = makeCSRFToken() #print "%s=%s" % (CSRFT_TOKEN_COOKIE_NAME, csrftoken) c = cookielib.Cookie(None, CSRFT_TOKEN_COOKIE_NAME, csrftoken, None, None, "", None, None, "/", None, True, None, None, None, None, None, None) cj.set_cookie(c) req = makeLoginRequest(username, password, csrftoken) response = opener.open(req) logIn_resp = response.read() logIn_resp_dict = json.loads(logIn_resp) #print logIn_resp_dict external_id = str(logIn_resp_dict["external_id"]) public_id = str(logIn_resp_dict["id"]) cj.clear("", "/", CSRFT_TOKEN_COOKIE_NAME) #cj.clear("www.coursera.org", "/", "maestro_login") #cj.clear("www.coursera.org", "/", "sessionid") return external_id, public_id, saveCJ(cj)
def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie"). """ result = dict( version=0, name=name, value=value, port=None, domain='', path='/', secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False, ) badargs = set(kwargs) - set(result) if badargs: err = 'create_cookie() got unexpected keyword arguments: %s' raise TypeError(err % list(badargs)) result.update(kwargs) result['port_specified'] = bool(result['port']) result['domain_specified'] = bool(result['domain']) result['domain_initial_dot'] = result['domain'].startswith('.') result['path_specified'] = bool(result['path']) return cookielib.Cookie(**result)
def scrapVideoLink(self, url): #xbmc.log( logmsg + url ) req = urllib2.Request(url) req.add_header('User-Agent', USER_AGENT) ck = cookielib.Cookie(version=0, name='hq', value='1', port=None, port_specified=False, domain='m.mtvkatsomo.fi', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(ck) response = opener.open(req) ret = common.parseDOM(response.read(), "source", {'type': 'video/mp4'}, ret="src") if len(ret) > 0: return ret[0] else: return None
def _set_cookies(self, base_url, cookies): domain = urlparse.urlsplit(base_url).hostname cj = cookielib.LWPCookieJar(COOKIEFILE) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) for key in cookies: c = cookielib.Cookie(0, key, cookies[key], port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) cj.set_cookie(c) try: cj.load(ignore_discard=True) except: pass return cj
def get(self, facid, classnumber, date): year = date[6::] month= date[3:5:] d = date[:2:] date=year+'-'+month+'-'+d cookienamekey = facid + "name" self.response.headers['Access-Control-Allow-Origin'] = '*' cookievaluekey = facid + "value" if not memcache.get(cookienamekey): self.response.write("Timed Out") else: thevalue = memcache.get(cookievaluekey) thecookiename = memcache.get(cookienamekey) if (memcache.get(cookienamekey)): br1 = mechanize.Browser() ck = cookielib.Cookie(version=0, name=thecookiename, value=thevalue, port=None, port_specified=False, domain='prerithelp.net76.net', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) newcj = cookielib.CookieJar() newcj.set_cookie(ck) br1.set_cookiejar(newcj) br1.set_handle_equiv(True) br1.set_handle_redirect(True) br1.set_handle_referer(True) ttUrl = br1.open('http://prerithelp.net76.net/facademics_test/attendjson.php?classnumber='+classnumber+'&date='+date) br1.set_handle_robots(False) if (ttUrl.geturl() == "http://prerithelp.net76.net/facademics_test/attendjson.php?classnumber="+classnumber+'&date='+date): self.response.write(ttUrl.read()) else: self.response.write('nosession')
def build_opener_with_chrome_cookies(domain=None): #同样失败了 cookie_file_path = os.path.join(os.environ['LOCALAPPDATA'], r'Google\Chrome\User Data\Default\Cookies') if not os.path.exists(cookie_file_path): raise Exception('Cookies file not exist!') conn = sqlite3.connect(cookie_file_path) sql = 'select host_key, name, value, path from cookies' if domain: sql += ' where host_key like "%{}%"'.format(domain) cookiejar = cookielib.CookieJar() # No cookies stored yet for row in conn.execute(sql): print(row) cookie_item = cookielib.Cookie( version=0, name=row[1], value=row[2], port=None, port_specified=None, domain=row[0], domain_specified=None, domain_initial_dot=None, path=row[3], path_specified=None, secure=None, expires=None, discard=None, comment=None, comment_url=None, rest=None, rfc2109=False, ) cookiejar.set_cookie(cookie_item) # Apply each cookie_item to cookiejar conn.close() return urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar)) # Return opener
def setup_opener(cookie=None, user=None, passwd=None): cj = cookielib.CookieJar() if cookie: sck = Cookie.SimpleCookie(cookie) for name in sck: ck = cookielib.Cookie(version=0, name=name, value=sck[name].value, port=None, port_specified=False, domain='.baidu.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(ck) elif user and passwd: raise # 未实现 opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener)
def get(self, facid, classnumber, date, values): date = date.lower() month= date[3:4:] if(month=='a'): year = date[9:13:] month = '04' else: month = '05' year = date[7:11:] d = date[:2:] date=year+'-'+month+'-'+d cookienamekey = facid + "name" self.response.headers['Access-Control-Allow-Origin'] = '*' cookievaluekey = facid + "value" if not memcache.get(cookienamekey): self.response.write("Timed Out") else: thevalue = memcache.get(cookievaluekey) thecookiename = memcache.get(cookienamekey) if (memcache.get(cookienamekey)): br1 = mechanize.Browser() ck = cookielib.Cookie(version=0, name=thecookiename, value=thevalue, port=None, port_specified=False, domain='prerithelp.net76.net', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) newcj = cookielib.CookieJar() newcj.set_cookie(ck) br1.set_cookiejar(newcj) br1.set_handle_equiv(True) br1.set_handle_redirect(True) br1.set_handle_referer(True) br1.set_handle_robots(False) homeUrl = "http://prerithelp.net76.net/facademics_test/welcome_user.php" attUrl = "http://prerithelp.net76.net/facademics_test/attend.php?classnumber="+classnumber+"&date="+date opened = br1.open(attUrl) if(opened.geturl()==attUrl): br1.select_form('tek_attend') postData = list(values) key='' value = '' pD = '' #below code looks for the control(select list) of name given and then puts value for p in postData: if(p=='p'): key=key.upper() br1.find_control(name=key).value = ["present"] key='' elif(p=='a'): key=key.upper() br1.find_control(name=key).value = ["absent"] key='' else: key=key+p home=br1.submit() if(home.geturl()==homeUrl): self.response.write('s') else: self.response.write('f') else: self.response.write('nosession')
def get_torrent(self, url, savelocation): torrent_id = dict( [part.split('=') for part in urlparse(url)[4].split('&')])['t'] self.cookiejar.set_cookie( cookielib.Cookie(version=0, name='bb_dl', value=torrent_id, port=None, port_specified=False, domain='.rutracker.org', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)) downloadurl = 'http://dl.rutracker.org/forum/dl.php?t=' + torrent_id torrent_name = torrent_id + '.torrent' download_path = os.path.join(savelocation, torrent_name) try: page = self.opener.open(downloadurl) torrent = page.read() fp = open(download_path, 'wb') fp.write(torrent) fp.close() except Exception, e: logger.error('Error getting torrent: %s' % e) return False
def login(email, passwd): ''' 登录 ''' post_data = {'service': "reader", 'Email': email, 'Passwd': passwd} ur = 'https://www.google.com/accounts/ClientLogin' responseData = opener.open(ur, urllib.urlencode(post_data)).read() if not responseData: return False def getArgs(data, name): name = name if name[-1] == '=' else name + '=' b = data.find(name) e = data.find('\n', b) return data[b + len(name):e] sid = getArgs(responseData, 'SID=') #responseData[0].split('=')[1] auth = getArgs(responseData, 'Auth=') #responseData[2].split('=')[1] headers['Authorization'] = 'GoogleLogin auth=%s' % auth sidCookie = cookielib.Cookie(version=0, name='SID', value=sid, port=None, port_specified=False, domain='.google.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=1600000000, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(sidCookie) return True
def set_cookie(self, kaka, request): """Returns a cookielib.Cookie based on a set-cookie header line""" # default rfc2109=False # max-age, httponly for cookie_name, morsel in kaka.items(): std_attr = ATTRS.copy() std_attr["name"] = cookie_name _tmp = morsel.coded_value if _tmp.startswith('"') and _tmp.endswith('"'): std_attr["value"] = _tmp[1:-1] else: std_attr["value"] = _tmp std_attr["version"] = 0 attr = "" # copy attributes that have values try: for attr in morsel.keys(): if attr in ATTRS: if morsel[attr]: if attr == "expires": std_attr[attr] = _since_epoch(morsel[attr]) else: std_attr[attr] = morsel[attr] elif attr == "max-age": if morsel[attr]: std_attr["expires"] = _since_epoch(morsel[attr]) except TimeFormatError: # Ignore cookie logger.info( "Time format error on %s parameter in received cookie" % ( attr,)) continue for att, spec in PAIRS.items(): if std_attr[att]: std_attr[spec] = True if std_attr["domain"] and std_attr["domain"].startswith("."): std_attr["domain_initial_dot"] = True if morsel["max-age"] is 0: try: self.cookiejar.clear(domain=std_attr["domain"], path=std_attr["path"], name=std_attr["name"]) except ValueError: pass else: # Fix for Microsoft cookie error if "version" in std_attr: try: std_attr["version"] = std_attr["version"].split(",")[0] except (TypeError, AttributeError): pass new_cookie = cookielib.Cookie(**std_attr) self.cookiejar.set_cookie(new_cookie)
def extractChapterUrlsAndMetadata(self): url = self.url logger.debug("URL: " + url) # portkey screws around with using a different URL to set the # cookie and it's a pain. So... cheat! if self.is_adult or self.getConfig("is_adult"): cookie = cl.Cookie(version=0, name='verify17', value='1', port=None, port_specified=False, domain=self.getSiteDomain(), domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=time.time() + 10000, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.get_cookiejar().set_cookie(cookie) try: data = self._fetchUrl(url) except urllib2.HTTPError, e: if e.code == 404: raise exceptions.StoryDoesNotExist(self.url) else: raise e
def do_login(form_data): login_url = 'https://cs.midea.com/c-css/signin' login_page = session.post(login_url, data=form_data, headers=headers) # print('login_page:%s'%login_page.text) # errorReport(str(login_page.text)+time.asctime(time.localtime(time.time()))) # print(session.cookies) if login_page.json()['status'] == True: token_cookie = cookielib.Cookie( name='loginToken', value=login_page.json()['content']['loginToken'], expires=None, secure=False, version=None, port=None, port_specified=False, domain='cs.midea.com', domain_specified=None, domain_initial_dot=None, path='/c-css/', path_specified=None, discard=None, comment=None, comment_url=None, rest=None) session.cookies.set_cookie(token_cookie) print('testtesttest') return session.cookies, login_page.text else: return False, login_page.text
def __init__(self): cj = cookielib.LWPCookieJar() self.br = mechanize.Browser() self.br.set_cookiejar(cj) ck = cookielib.Cookie(version=0, name='screen', value='1920-1080-1920-1040-1-20.74', port=None, port_specified=False, domain='m.tuenti.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(ck) self.br.set_handle_equiv(True) self.br.set_handle_redirect(True) self.br.set_handle_referer(True) self.br.set_handle_robots(False) self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1) # self.br.addheaders = [('User-agent', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0')] # self.br.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8 GTB7.1 (.NET CLR 3.5.30729)')] self.id = ''
def __setup_sg_auth_and_proxy(sg): """ Borrowed from the Shotgun Python API, setup urllib2 with a cookie for authentication on Shotgun instance. Looks up session token and sets that in a cookie in the :mod:`urllib2` handler. This is used internally for downloading attachments from the Shotgun server. :param sg: Shotgun API instance """ # Importing this module locally to reduce clutter and facilitate clean up when/if this # functionality gets ported back into the Shotgun API. import cookielib sid = sg.get_session_token() cj = cookielib.LWPCookieJar() c = cookielib.Cookie('0', '_session_id', sid, None, False, sg.config.server, False, False, "/", True, False, None, True, None, None, {}) cj.set_cookie(c) cookie_handler = urllib2.HTTPCookieProcessor(cj) if sg.config.proxy_handler: opener = urllib2.build_opener(sg.config.proxy_handler, cookie_handler) else: opener = urllib2.build_opener(cookie_handler) urllib2.install_opener(opener)
def load_ff_sessions(session_filename): cj = cookielib.CookieJar() if os.path.exists(session_filename): try: json_data = json.loads(open(session_filename, 'rb').read()) except ValueError as e: print 'Error parsing session JSON:', str(e) else: for window in json_data.get('windows', []): for cookie in window.get('cookies', []): import pprint pprint.pprint(cookie) c = cookielib.Cookie( 0, cookie.get('name', ''), cookie.get('value', ''), None, False, cookie.get('host', ''), cookie.get('host', '').startswith('.'), cookie.get('host', '').startswith('.'), cookie.get('path', ''), False, False, str(int(time.time()) + 3600 * 24 * 7), False, None, None, {}) cj.set_cookie(c) else: print 'Session filename does not exist:', session_filename return cj
def sflyCookie(name, data): if type(data) == dict: value = '' for item in data: value += item + ':' + str(data[item]) + '&' value = value[:-1] else: value = data return cookielib.Cookie(version=0, name=name, value=value, port=None, port_specified=False, domain='.shutterfly.com', domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={}, rfc2109=False)
def __setitem__(self, name, value): if value is None: value = '' current_webtest = LooseVersion( pkg_resources.get_distribution('webtest').version) if current_webtest >= LooseVersion('2.0.16'): self.app.set_cookie(name, value) elif current_webtest >= LooseVersion('2.0.0'): # webtest 2.0.0 to 2.0.15 don't have a cookie setter interface # This cookie setting code is taken from webtest 2.0.16 cookie = cookielib.Cookie(version=0, name=name, value=value, port=None, port_specified=False, domain='.localhost', domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None) self.app.cookiejar.set_cookie(cookie) else: self.app.cookies[name] = value
def getPage(self, baseUrl, addParams={}, post_data=None): tries = 0 cUrl = '' while tries < 4: tries += 1 if addParams == {}: addParams = dict(self.defaultParams) sts, data = self.cm.getPage(baseUrl, addParams, post_data) if not sts: return sts, data cUrl = self.cm.meta['url'] if 'DDoS' in data: if tries == 1: rm(self.COOKIE_FILE) continue timestamp = time.time() * 1000 jscode = '' tmp = ph.findall(data, ('<script', '>'), '</script>', flags=0) for item in tmp: if 'xhr.open' in item: jscode = item break js_params = [{'path': GetJSScriptFile('cinemaxx1.byte')}] js_params.append({'code': jscode}) ret = js_execute_ext(js_params) if ret['sts'] and 0 == ret['code']: try: tmp = ret['data'].split('\n', 1) sleep_time = int(float(tmp[1])) tmp = json_loads(tmp[0]) url = self.getFullUrl(tmp['1'], cUrl) params = dict(addParams) params['header'] = MergeDicts(self.HTTP_HEADER, {'Referer': cUrl}) sts2, data2 = self.cm.getPage(url, params) if not sts2: break js_params = [{'path': GetJSScriptFile('cinemaxx2.byte')}] js_params.append({'code': data2 + 'print(JSON.stringify(e2iobj));'}) ret = js_execute_ext(js_params) if ret['sts'] and 0 == ret['code']: cj = self.cm.getCookie(self.COOKIE_FILE) for item in json_loads(ret['data'])['cookies']: for cookieKey, cookieValue in item.iteritems(): cookieItem = cookielib.Cookie(version=0, name=cookieKey, value=cookieValue, port=None, port_specified=False, domain='.' + self.cm.getBaseUrl(cUrl, True), domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=time.time() + 3600 * 48, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(cookieItem) cj.save(self.COOKIE_FILE, ignore_discard=True) sleep_time -= time.time() * 1000 - timestamp if sleep_time > 0: GetIPTVSleep().Sleep(int(math.ceil(sleep_time / 1000.0))) continue else: break except Exception: printExc() else: break if sts and cUrl: self.cm.meta['url'] = cUrl return sts, data
def add_dict_to_cookiejar(cj, cookie_dict): """Returns a CookieJar from a key/value dictionary. :param cj: CookieJar to insert cookies into. :param cookie_dict: Dict of key/values to insert into CookieJar. """ for k, v in cookie_dict.items(): cookie = cookielib.Cookie(version=0, name=k, value=v, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) # add cookie to cookiejar cj.set_cookie(cookie) return cj
def build_cookie(cookie, domain=""): if isinstance(cookie, basestring): cookie_s = cookie else: cookie_s = "; ".join(["%s=%s" % (k, v) for k, v in cookie.iteritems()]) ck = Cookie.SimpleCookie(cookie_s) result = [] for k in ck: v = ck[k] # v is a Morsel Object cookie_obj = cookielib.Cookie(name=v.key, value=v.value, version=0, port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=True, path="/", path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={"HttpOnly": None}, rfc2109=False, ) result.append(cookie_obj) return result
def set_cookie(app, key, value): """ Sets a cookie on the TestApp 'app'. The WebTest API changed with version 2.0.16 :param app: A webtest.TestApp object """ current_webtest = LooseVersion( pkg_resources.get_distribution('webtest').version) if current_webtest >= LooseVersion('2.0.16'): app.set_cookie(key, value) elif current_webtest >= LooseVersion('2.0.0'): # webtest 2.0.0 to 2.0.15 don't have a cookie setter interface # This cookie setting code is taken from webtest 2.0.16 cookie = cookielib.Cookie(version=0, name=key, value=value, port=None, port_specified=False, domain='.localhost', domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None) app.cookiejar.set_cookie(cookie) else: app.cookies[key] = value
def _set_cookies(base_url, cookies): cookie_file = os.path.join(control.cookieDir, '%s_cookies.lwp' % shrink_host((base_url))) #cookie_file = os.path.join('/home/mrknow/.kodi/userdata/addon_data/plugin.video.specto/Cookies', '%s_cookies.lwp' % shrink_host((base_url))) #control.log('control.cookieDir: %s' % (control.cookieDir)) cj = cookielib.LWPCookieJar(cookie_file) try: cj.load(ignore_discard=True) except: pass #control.log('Before Cookies: %s - %s' % (base_url, cookies_as_str(cj))) domain = urlparse.urlsplit(base_url).hostname for key in cookies: c = cookielib.Cookie(0, key, str(cookies[key]), port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) cj.set_cookie(c) cj.save(ignore_discard=True) #log_utils.log('After Cookies: %s - %s' % (self, scraper_utils.cookies_as_str(cj)), log_utils.LOGDEBUG) return cj
def set_cookie(self, key, value, domain=None, path=None): """Saves a Cookie instance into the underlying CookieJar instance. It will be submitted (as approperate, based on the request's domain, path, security, etc...) along with any further requests. You can specify the domain and path the cookie should be valid for. These need to be specified if there have been no requests made yet with the current BeautifulScraper instance. Otherwise, they are optional. The cookies set this way never expire, are are never "SecureOnly". """ if self._last_request is None and (domain is None or path is None): raise ValueError("You must specify a domain and path for a \ new cookie if you haven't made a request \ (self.go()) yet.") if not domain: domain = ".%s" % urlparse(self._last_request.get_full_url())[1] if not path: path = urlparse(self._last_request.get_full_url())[2] self._cookiejar.set_cookie(cookielib.Cookie( 0, key, value, None, False, domain, True, bool(domain[0] == '.'), path, True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={} ))
def _set_cookies(base_url, cookies): cj = cookielib.LWPCookieJar(cookie_file) try: cj.load(ignore_discard=True) except: pass if kodi.get_setting('debug') == "true": print 'Before Cookies: %s' % (cookies_as_str(cj)) domain = urlparse.urlsplit(base_url).hostname for key in cookies: c = cookielib.Cookie(0, key, str(cookies[key]), port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) cj.set_cookie(c) cj.save(ignore_discard=True) if kodi.get_setting('debug') == "true": print 'After Cookies: %s' % (cookies_as_str(cj)) return cj
def get_cookie_opener(gs_username, gs_token): """ Create a GenomeSpace cookie opener """ cj = cookielib.CookieJar() for cookie_name, cookie_value in [('gs-token', gs_token), ('gs-username', gs_username)]: #create a super-cookie, valid for all domains cookie = cookielib.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(cookie) cookie_opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) return cookie_opener
def validate_session(self): """Validate an API session.""" # We need to store our access token as the openx3_access_token cookie. # This cookie will be passed to all future API requests. cookie = cookielib.Cookie( version=0, name='openx3_access_token', value=self._token.key, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) self._cookie_jar.set_cookie(cookie) url = '%s://%s%s/a/session/validate' % (self.scheme, self.domain, self.api_path) res = self.request(url=url, method='PUT') return res.read()