def login(self, login_callback, check_login_callback): global DEBUG_HTTP if connection.offline: raise Exception("Can't connect in offline mode.") if self.username == '' or self.password == '': raise Exception( "Please configure your username/password and restart the application" ) logger.info("Checking Login status") from cookielib import LWPCookieJar cj = LWPCookieJar(self.cookiefile) if not self.opener_installed: from urllib2 import build_opener, install_opener, HTTPCookieProcessor, HTTPHandler if DEBUG_HTTP: opener = build_opener(HTTPHandler(debuglevel=1), HTTPCookieProcessor(cj)) else: opener = build_opener(HTTPCookieProcessor(cj)) install_opener(opener) self.opener_installed = True try: cj.load() logger.info("Loaded cookie file") except IOError, e: logger.info("Couldn't load cookie file")
def __init__(self, default_cookiejar=None): HTTPCookieProcessor.__init__(self, None) # Store the different cookie jars here, these represent the different # browser sessions that the plugins might request self.jars = {} if default_cookiejar is None: default_cookiejar = MozillaCookieJar() self.default_cookiejar = default_cookiejar
def submit(project, file): cookies = CookieJar() opener = build_opener(HTTPCookieProcessor(cookies)) install_opener(opener) username = raw_input('Username: '******'Password: '******'username': username, 'password': password, })) url = 'http://%s/projects/%i/submit/' % (SITE, project) opener = build_opener(HTTPCookieProcessor(cookies), MultipartPostHandler) response = opener.open(url, {'file': open(file, 'rb')}) try: submission = int(response.read()) except ValueError: sys.stderr.write('Invalid username and password\n') sys.exit(1) started = False count = -1 while True: response = urlopen('http://%s/projects/status/%i/' % (SITE, submission)) status = int(response.read()) if status == 0: break elif status == 1: if not started: started = True if count > 0: sys.stderr.write('\n') sys.stderr.write('Testing started') elif count <= 0: if count == 0: sys.stderr.write('\n') sys.stderr.write('At position %i in queue' % (status - 1)) count = 10 count -= 1 sys.stderr.write('.') sleep(1) sys.stderr.write('\n') response = urlopen('http://%s/projects/result/%i/' % (SITE, submission)) print response.read()
def getCookies(url): #Create a CookieJar object to hold the cookies cj = cookielib.CookieJar() print "this is oookie " + str(url) #Create an opener to open pages using the http protocol and to process cookies. opener = build_opener(HTTPCookieProcessor(cj), HTTPHandler()) #create a request object to be used to get the page. req = urllib2.Request(url) req.add_header('Host', 'www.teledunet.com') req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0') req.add_header( 'Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8') req.add_header('Accept-Encoding', 'gzip, deflate') req.add_header('Referer', 'http://www.teledunet.com/') req.add_header('Connection', 'keep-alive') f = opener.open(req) #see the first few lines of the page cj = str(cj).replace('<cookielib.CookieJar[<Cookie', '').replace('/>]>', '').replace('for www.teledunet.com', '') cj = str(cj).strip() return cj
def getUrl( url, header=None, agent='Mozilla/5.0 (Windows NT 10.0; WOW64; rv:60.0) Gecko/20100101 Firefox/60.0' ): global cj opener = build_opener(HTTPCookieProcessor(cj)) opener.addheaders = [('User-Agent', agent), ('Accept-Encoding', 'gzip, deflate')] try: if header: opener.addheaders = header response = opener.open(url, timeout=40) if response.info().get('Content-Encoding') == 'gzip': content = py3_dec( gzip.GzipFile(fileobj=io.BytesIO(response.read())).read()) else: content = py3_dec(response.read()) except Exception as e: failure = str(e) failing( "(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########" .format(url, failure)) content = "" return sys.exit(0) opener.close() try: cj.save(cookie, ignore_discard=True, ignore_expires=True) except: pass return content
def __call__(self, ssl_ca_certs, url, username, password, insecure=False, manage_cookies=False): cache_key = (ssl_ca_certs, insecure, manage_cookies) if cache_key not in self._opener: handlers = [] https_handler = build_https_handler(ssl_ca_certs, insecure) if https_handler: handlers.append(https_handler) passman = urllib2.HTTPPasswordMgrWithDefaultRealm() authhandler = urllib2.HTTPBasicAuthHandler(passman) handlers.append(authhandler) authhandler = urllib2.HTTPDigestAuthHandler(passman) handlers.append(authhandler) if manage_cookies: cj = CookieJar() handlers.append(HTTPCookieProcessor(cj)) opener = urllib2.build_opener(*handlers) opener.addheaders = [('User-agent', 'MapProxy-%s' % (version, ))] self._opener[cache_key] = (opener, passman) else: opener, passman = self._opener[cache_key] if url is not None and username is not None and password is not None: passman.add_password(None, url, username, password) return opener
def getUrl(url, header=None): global cj opener = build_opener(HTTPCookieProcessor(cj)) try: if header: opener.addheaders = header else: opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.162 Safari/537.36')] opener.addheaders = [('Accept-Encoding', 'gzip, deflate')] response = opener.open(url, timeout=30) if response.info().get('Content-Encoding') == 'gzip': content = py3_dec(gzip.GzipFile(fileobj=io.BytesIO(response.read())).read()) else: content = py3_dec(response.read()) except Exception as e: failure = str(e) if hasattr(e, 'code'): failing("(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########".format(url, failure)) elif hasattr(e, 'reason'): failing("(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########".format(url, failure)) content = "" return sys.exit(0) opener.close() try: cj.save(cookie, ignore_discard=True, ignore_expires=True) except: pass return content
def nse_opener(self): """ builds opener for urllib2 :return: opener object """ cj = CookieJar() return build_opener(HTTPCookieProcessor(cj))
def getUrl(url, header=None): global cj for cook in cj: debug("(getUrl) Cookie : " + str(cook)) opener = build_opener(HTTPCookieProcessor(cj)) try: if header: opener.addheaders = header else: opener.addheaders = [( 'User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.162 Safari/537.36' )] response = opener.open(url, timeout=30) content = py3_dec(response.read()) except Exception as e: failure = str(e) if hasattr(e, 'code'): failing( "(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########" .format(url, failure)) elif hasattr(e, 'reason'): failing( "(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########" .format(url, failure)) content = "" return sys.exit(0) opener.close() try: cj.save(cookie, ignore_discard=True, ignore_expires=True) except: pass return content
def getUrl(url, header=None, agent='Dalvik/2.1.0 (Linux; U; Android 7.1.2;)'): global cj for cook in cj: debug("(getUrl) Cookie : {0}".format(str(cook))) opener = build_opener(HTTPCookieProcessor(cj)) opener.addheaders = [('User-Agent', agent), ('Accept-Encoding', 'gzip, deflate'), ('Accept-Language', siteVersion), ('Authorization', 'DeviceId 427502496159111')] try: if header: opener.addheaders = header response = opener.open(url, timeout=30) if response.info().get('Content-Encoding') == 'gzip': content = py3_dec( gzip.GzipFile(fileobj=io.BytesIO(response.read())).read()) else: content = py3_dec(response.read()) except Exception as e: failure = str(e) failing( "(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########" .format(url, failure)) #xbmcgui.Dialog().notification(translation(30521).format('URL'), "ERROR = [COLOR red]{0}[/COLOR]".format(failure), icon, 15000) content = "" return sys.exit(0) opener.close() try: cj.save(cookie, ignore_discard=True, ignore_expires=True) except: pass return content
def stage4(username, password, push_dict): """ Test and fix _push_list """ print '----------------' print 'starting stage 4' print '----------------' print 'atepmting to push 2 changes:', push_dict.keys() from cookielib import LWPCookieJar from urllib2 import build_opener, install_opener, \ HTTPCookieProcessor # setup cookie handler opener = build_opener(HTTPCookieProcessor(LWPCookieJar())) install_opener(opener) if _login(username, password): if _push_list(push_dict): print 'enries pushed' print 'don\'t forget to reset the list ^^' print 'stage 4 completed' else: print 'PUSHING FAILED' _exit(1) else: print 'LOGIN FAILED, CHECK CREDENTIALS!' _exit(1)
def __init__(self, args): self.modulus = None self.exponent = None self.args = args self.jar = j = LWPCookieJar() if self.args.skip_cert: try: _create_unverified_https_context = ssl._create_unverified_context except AttributeError: # Legacy Python that doesn't verify HTTPS certificates by default pass else: # Handle target environment that doesn't support HTTPS verification ssl._create_default_https_context = _create_unverified_https_context self.has_cookies = False if self.args.cookiefile: self.has_cookies = True try: j.load(self.args.cookiefile, ignore_discard=True) except IOError: self.has_cookies = False self.opener = build_opener(HTTPCookieProcessor(j)) self.nextfile = args.file
def custom_handler(follow=None, delay=None, encoding=None): handlers = [] # as per urllib2 source code, these Handelers are added first # *unless* one of the custom handlers inherits from one of them # # [ProxyHandler, UnknownHandler, HTTPHandler, # HTTPDefaultErrorHandler, HTTPRedirectHandler, # FTPHandler, FileHandler, HTTPErrorProcessor] # & HTTPSHandler #handlers.append(DebugHandler()) handlers.append(SizeLimitHandler(100 * 1024)) # 100KiB handlers.append(HTTPCookieProcessor()) handlers.append(GZIPHandler()) handlers.append(HTTPEquivHandler()) handlers.append(HTTPRefreshHandler()) handlers.append(UAHandler(random.choice(DEFAULT_UAS))) handlers.append(BrowserlyHeaderHandler()) handlers.append(EncodingFixHandler(encoding)) if follow: handlers.append(AlternateHandler(MIMETYPE[follow])) handlers.append(CacheHandler(force_min=delay)) return build_opener(*handlers)
def index(request): # check whether cookie exits visits = (int(request.COOKIES.get('visits', 1))) #Create a CookieJar object to hold the cookies cj = cookielib.CookieJar() #Create an opener to open pages using the http protocol and to process cookies. opener = build_opener(HTTPCookieProcessor(cj), HTTPHandler()) #create a request object to be used to get the page. req = Request("http://www.twitter.com") f = opener.open(req) #see the first few lines of the page html = f.read() print html[:50] #Check out the cookies print "the cookies are: " for cookie in cj: print cookie context_catogaries = Category.objects.order_by('-likes')[:18] context_pages = Page.objects.order_by('-views')[:18] context_dict = {'categories': context_catogaries, 'pages': context_pages} response = render(request, 'rango/index.html', context_dict) visits += 1 response.set_cookie('visits', visits) print "visit is " + str(visits) return response
def login(url, uname, passwd): loginurl = url + '/login.php?do=login' md5 = hashlib.md5(passwd);md5 = md5.hexdigest() cj = cookielib.CookieJar() opener = build_opener(HTTPCookieProcessor(cj), HTTPHandler()) req = Request(url) f = opener.open(req) html = f.read() global headers headers = { 'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0', } opts = { 'vb_login_username':uname, 'vb_login_password':passwd, 's': '', 'securitytoken':'guest', 'do': 'login', 'vb_login_md5password': md5, 'vb_login_md5password_utf': md5 } data = urllib.urlencode(opts) jar = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar)) opener.addheader = headers """
def getCookiesARC(url): #Create a CookieJar object to hold the cookies cj = cookielib.CookieJar() #Create an opener to open pages using the http protocol and to process cookies. opener = build_opener(HTTPCookieProcessor(cj), HTTPHandler()) #create a request object to be used to get the page. req = Request(url) req.add_header('Host', 'www.hdarabic.com') req.add_header('Cache-Control', 'max-age=0') req.add_header( 'Accept', ' text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' ) req.add_header( 'User-Agent', ' Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.69 Safari/537.36' ) req.add_header('Accept-Encoding', 'gzip,deflate,sdch') req.add_header('Referer', 'http://www.hdarabic.com/') req.add_header('Accept-Language', 'sv,en-US;q=0.8,en;q=0.6,en-GB;q=0.4') f = opener.open(req) #see the first few lines of the page cj = str(cj).replace('<cookielib.CookieJar[<Cookie', '').replace('/>]>', '') cj = str(cj).strip() return cj
def __init__(self): """Initialize Pornolab search engine, signing in using given credentials.""" # Initialize various objects. self.cj = cookielib.CookieJar() self.opener = build_opener(HTTPCookieProcessor(self.cj)) self.url = 'https://pornolab.net' # Override url with the actual URL to be used (in case official URL isn't accessible) self.credentials = credentials # Add submit button additional POST param. self.credentials['login'] = u'Вход' try: logging.info("Trying to connect using given credentials.") response = self.opener.open( self.login_url, urlencode(dict_encode(self.credentials)).encode()) # Check if response status is OK. if response.getcode() != 200: raise HTTPError( response.geturl(), response.getcode(), "HTTP request to {} failed with status: {}".format( self.login_url, response.getcode()), response.info(), None) # Check if login was successful using cookies. if not 'bb_data' in [cookie.name for cookie in self.cj]: logging.debug(self.cj) raise ValueError("Unable to connect using given credentials.") else: logging.info("Login successful.") except (URLError, HTTPError, ValueError) as e: logging.error(e)
def getUrl(url, header=None): global cj opener = build_opener(HTTPCookieProcessor(cj)) try: if header: opener.addheaders = header else: opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:60.0) Gecko/20100101 Firefox/60.0')] opener.addheaders = [('Accept-Encoding', 'gzip, deflate')] response = opener.open(url, timeout=30) if response.info().get('Content-Encoding') == 'gzip': content = py3_dec(gzip.GzipFile(fileobj=io.BytesIO(response.read())).read()) else: content = py3_dec(response.read()) except Exception as e: failure = str(e) if hasattr(e, 'code'): failing("(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########".format(url, failure)) xbmcgui.Dialog().notification((translation(30521).format("URL")), "ERROR = [COLOR red]{0}[/COLOR]".format(failure), icon, 12000) elif hasattr(e, 'reason'): failing("(getUrl) ERROR - ERROR - ERROR : ########## {0} === {1} ##########".format(url, failure)) xbmcgui.Dialog().notification((translation(30521).format("URL")), "ERROR = [COLOR red]{0}[/COLOR]".format(failure), icon, 12000) content = "" return sys.exit(0) opener.close() try: cj.save(cookie, ignore_discard=True, ignore_expires=True) except: pass return content
def __init__(self): """ Logs the user in the backend. args.username and args.password will be used to login to the backend. Make sure that this user and password combination is correct, otherwise the GUI will fail to load. The normal login redirects the user to the profile page, but instead we ask the backend to redirect the user to the root URL, and we simply ignore it. """ cjar = CookieJar() login_url = args.django_http_url + 'accounts/login/?next=/' self.opener = build_opener(HTTPCookieProcessor(cjar)) self.opener.open(login_url) # fetch csrftoken and sessionid csrf_token = '' for cookie in cjar: if cookie.name == 'csrftoken': csrf_token = cookie.value if csrf_token == '': raise Exception('No CSRF token found. Check backend URL.') login_data = urlencode({ 'username': args.username, 'password': args.password, 'csrfmiddlewaretoken': csrf_token }) request = self.opener.open(login_url, login_data) assert (request.getcode() == 200) # check if successful
def get_num_results(search_term, start_date, end_date): """ Helper method, sends HTTP request and returns response payload. """ # Open website and read html user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.109 Safari/537.36' query_params = { 'q' : search_term, 'as_ylo' : start_date, 'as_yhi' : end_date} url = "https://scholar.google.com/scholar?as_vis=1&hl=en&as_sdt=1,5&" + urllib.urlencode(query_params) opener = build_opener(HTTPCookieProcessor(cookies)) request = Request(url=url, headers={'User-Agent': user_agent}) handler = opener.open(request) html = handler.read() # Create soup for parsing HTML and extracting the relevant information soup = BeautifulSoup(html, 'html.parser') div_results = soup.find("div", {"id": "gs_ab_md"}) # find line 'About x results (y sec) if div_results != None: res = re.findall(r'(\d+),?(\d+)?,?(\d+)?\s', div_results.text) # extract number of search results if not res: num_results = '0' else: num_results = ''.join(res[0]) # convert string to number success = True else: success = False num_results = 0 return num_results, success
def __init__(self): """Initialize rutracker search engine, signing in using given credentials.""" # Initialize cookie handler. self.cj = cookielib.CookieJar() self.opener = build_opener(HTTPCookieProcessor(self.cj)) self.credentials = credentials # Add submit button additional POST param. self.credentials['login'] = u'Вход' # Send POST information and sign in. try: logging.info("Trying to connect using given credentials.") response = self.opener.open( self.login_url, urlencode(dict_encode(self.credentials)).encode()) # Check if response status is OK. if response.getcode() != 200: raise HTTPError( response.geturl(), response.getcode(), "HTTP request to {} failed with status: {}".format( self.login_url, response.getcode()), response.info(), None) # Check if login was successful using cookies. if not 'bb_data' in [cookie.name for cookie in self.cj]: raise ValueError("Unable to connect using given credentials.") else: logging.info("Login successful.") except (URLError, HTTPError, ValueError) as e: logging.error(e)
def _open_source(source, head, etag=None, last_modified=None, timeout=None, user_agent="Mozilla/5.0"): """Open anything""" if hasattr(source, 'read'): return source if source == '-': return sys.stdin if urlparse.urlparse(source)[0][:4] == 'http': request = urllib2.Request(source) if head: request.get_method = lambda: 'HEAD' request.add_header('User-Agent', user_agent) if etag: request.add_header('If-None-Match', etag) if last_modified: request.add_header('If-Modified-Since', last_modified) request.add_header('Accept-encoding', 'gzip') jar = cookielib.MozillaCookieJar() jar.set_policy( cookielib.DefaultCookiePolicy(rfc2965=True, strict_rfc2965_unverifiable=False)) opener = urllib2.build_opener(SmartRedirectHandler(), HTTPCookieProcessor(jar), DefaultErrorHandler()) return opener.open(request, None, timeout) try: return open(source) except (IOError, OSError): pass return StringIO(str(source))
def __init__(self, mobile, password=None, status='0', cachefile='Fetion.cache', cookiesfile=''): '''登录状态: 在线:400 隐身:0 忙碌:600 离开:100 ''' if cachefile: self.cache = Cache(cachefile) if not cookiesfile: cookiesfile = '%s.cookies' % mobile cookiejar = MozillaCookieJar(filename=cookiesfile) if not os.path.isfile(cookiesfile): open(cookiesfile, 'w').write(MozillaCookieJar.header) cookiejar.load(filename=cookiesfile) cookie_processor = HTTPCookieProcessor(cookiejar) self.opener = build_opener(cookie_processor, HTTPHandler) self.mobile, self.password = mobile, password if not self.alive(): self._login() cookiejar.save() self.changestatus(status)
def __init__(self, uri, cookiejar=None, use_datetime=0): Transport.__init__(self, use_datetime=use_datetime) self.opener = build_opener() # Parse auth (user:passwd) from the uri urltype, rest = splittype(uri) host, rest = splithost(rest) auth, host = splituser(host) self.uri = urltype + '://' + host + rest # Handle HTTP Basic authentication if auth is not None: user, passwd = splitpasswd(auth) passwdmgr = HTTPPasswordMgrWithDefaultRealm() passwdmgr.add_password(realm=None, uri=self.uri, user=user, passwd=passwd) authhandler = HTTPBasicAuthHandler(passwdmgr) self.opener.add_handler(authhandler) # Handle HTTP Cookies if cookiejar is not None: self.opener.add_handler(HTTPCookieProcessor(cookiejar))
def start(self): self.enter_msg() self.cookie.set_cookie(self.acc_token) have_load = False try: if os.path.exists(("baihe.cookie")): self.cookie.load("baihe.cookie", True, True) have_load = True opener = build_opener(HTTPCookieProcessor(self.cookie)) if not have_load: self.get_auth_cookies(opener) self.get_search_cookies(opener) # 有时意外不正常关闭cookie和send_list无法保存,所以启动一个进程来做这件事。 Thread(target=self.saveing).start() while True: try: if self.search(opener) == "finished": self.logger.info("No more girls to send. ") break except Exception, e: time.sleep(1) self.logger.error(e) self.get_auth_cookies(opener) self.get_search_cookies(opener) except KeyboardInterrupt: self.logger.info("Closing...") self.alive = False finally: self.save() self.alive = False
def call_handler(self, handler, postdata=None, **kwargs): url = '%s%s' % (self.get_serv_addr(), handler) params = urlencode(kwargs) url = '%s?%s' % (url, params) logging.debug("Request units url: %s" % url) cj = CookieJar() opener = build_opener(HTTPCookieProcessor(cj)) if self.__session is not None: opener.addheaders.append(('Cookie', 'session=%s' % self.__session)) ##try: response = urlopen(url, postdata) try: response = opener.open(url, postdata) except HTTPError as err: raise (err) except: return None, None ## Reading data: try: body_ = response.read() except: return None, None ## Decoding to JSON: try: return response, json.loads(body_) except: return response, body_
def start(args): """Login and session handler """ # create cookiejar args._cj = LWPCookieJar() # lets urllib handle cookies opener = build_opener(HTTPCookieProcessor(args._cj)) opener.addheaders = [( "User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36" ), ("Accept-Encoding", "identity"), ("Accept-Charset", "utf-8"), ("DNT", "1")] install_opener(opener) # load cookies try: args._cj.load(getCookiePath(args), ignore_discard=True) except IOError: # cookie file does not exist pass args._cj.set_cookie( Cookie(0, "timezoneoffset", str(timezone // 60), None, False, "www.wakanim.tv", False, False, "/", True, False, None, False, None, None, {"HttpOnly": None}, False))
def bugzilla(self): """ Return Bugzilla instance. TODO: check whether we should use cookies or not """ if self._bugzilla is not None: return self._bugzilla self._baseurl = self.options.get('baseurl') self._bugzilla = Bugz(self._baseurl) cj = CookiePot().make_lwp_cookiejar(self._bugzilla.cookiejar.filename, self._bugzilla.host) self._bugzilla.cookiejar = cj self._bugzilla.opener = build_opener(HTTPCookieProcessor(cj)) if self.username is None or self.password is None: if not self._bugzilla.try_auth(): self._bugzilla = None raise apport.crashdb.NeedsCredentials, self.distro else: self._bugzilla = Bugz(self._baseurl, self.username, self.password) try: self._bugzilla.auth() except RuntimeError: # Happens when the username/password pair is invalid. raise apport.crashdb.NeedsCredentials, self.distro return self._bugzilla
def login(self): print "*** Login" login = self.addon.getSetting('login') if login: password = self.addon.getSetting('password') headers = { "Host" : self.url.split("://")[1], "Referer" : self.url + '/', "Origin" : self.url, "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36" } values = { "login": login, "password": password } cj = cookielib.CookieJar() opener = build_opener(HTTPCookieProcessor(cj), HTTPHandler()) req = Request(self.url + "/?mod=login", urllib.urlencode(values), headers) f = opener.open(req) for cookie in cj: cookie = str(cookie).split('svid1=')[-1].split(' ')[0].strip() if cookie and (cookie > ""): self.addon.setSetting('cookie', cookie)
def test_cookie_redirect(self): class MockHTTPHandler(urllib2.HTTPHandler): def __init__(self): self._count = 0 def http_open(self, req): import mimetools from StringIO import StringIO if self._count == 0: self._count = self._count + 1 msg = mimetools.Message( StringIO("Location: http://www.cracker.com/\r\n\r\n")) return self.parent.error("http", req, MockFile(), 302, "Found", msg) else: self.req = req msg = mimetools.Message(StringIO("\r\n\r\n")) return MockResponse(200, "OK", msg, "", req.get_full_url()) # cookies shouldn't leak into redirected requests from cookielib import CookieJar from urllib2 import build_opener, HTTPHandler, HTTPError, \ HTTPCookieProcessor from test_cookielib import interact_netscape cj = CookieJar() interact_netscape(cj, "http://www.example.com/", "spam=eggs") hh = MockHTTPHandler() cp = HTTPCookieProcessor(cj) o = build_opener(hh, cp) o.open("http://www.example.com/") self.assert_(not hh.req.has_header("Cookie"))
def __init__(self, cookiejar=None, **kwargs): HTTPCookieProcessor.__init__(self, cookiejar, **kwargs)