def extract_cookiejar(self): jar = CookieJar() # self._respose could be None # if this method is called from custom preapre response # function called from spider cache backend if self._response and self._request: jar.extract_cookies( MockResponse(self._response._original_response.msg), MockRequest(self._request), ) return jar
def test_cookiejar(self): cookie1 = create_cookie('foo', 'bar', self.server.address) cookie2 = create_cookie('foo', 'bar', self.server.address) self.assertFalse(cookie1 == cookie2) cookie0 = create_cookie('foo', 'bar', domain='.dumpz.org') self.assertEqual(cookie0.domain, '.dumpz.org') jar = CookieJar() jar.set_cookie(create_cookie('foo', 'bar', domain='foo.com')) jar.set_cookie(create_cookie('foo', 'bar', domain='bar.com')) self.assertEqual(len(jar), 2)
def test_cookiejar(self): c1 = create_cookie('foo', 'bar') c2 = create_cookie('foo', 'bar') self.assertFalse(c1 == c2) c = create_cookie('foo', 'bar', domain='.dumpz.org') self.assertEquals(c.domain, '.dumpz.org') cj = CookieJar() cj.set_cookie(create_cookie('foo', 'bar', domain='foo.com')) cj.set_cookie(create_cookie('foo', 'bar', domain='bar.com')) self.assertEqual(len(cj), 2)
def extract_cookiejar(self): jar = CookieJar() # self._respose could be None # if this method is called from custom preapre response if self._response and self._request: jar.extract_cookies( # pylint: disable=protected-access MockResponse(self._response._original_response.msg), # pylint: enable=protected-access MockRequest(self._request), ) return jar
def test_cookiejar(self): c1 = create_cookie('foo', 'bar', self.server.address) c2 = create_cookie('foo', 'bar', self.server.address) self.assertFalse(c1 == c2) c = create_cookie('foo', 'bar', domain='.dumpz.org') self.assertEquals(c.domain, '.dumpz.org') cj = CookieJar() cj.set_cookie(create_cookie('foo', 'bar', domain='foo.com')) cj.set_cookie(create_cookie('foo', 'bar', domain='bar.com')) self.assertEqual(len(cj), 2)
def extract_cookiejar(self): jar = CookieJar() # self._respose could be None # if this method is called from custom preapre response # function called from spider cache backend if self._response and self._request: jar.extract_cookies( # pylint: disable=protected-access MockResponse(self._response._original_response.msg), # pylint: enable=protected-access MockRequest(self._request), ) return jar
def send_cookies(self, connection, cookie_request): if self.cookiejar is None: self.cookiejar = CookieJar() elif self.cookiejar: # Let the cookiejar figure out what cookies are appropriate self.cookiejar.add_cookie_header(cookie_request) # Pull the cookie headers out of the request object... cookielist=list() for h,v in cookie_request.header_items(): if h.startswith('Cookie'): cookielist.append([h,v]) # ...and put them over the connection for h,v in cookielist: connection.putheader(h,v)
def srtm_login_or_skip(monkeypatch): import os try: srtm_username = os.environ['SRTM_USERNAME'] except KeyError: pytest.skip('SRTM_USERNAME environment variable is unset.') try: srtm_password = os.environ['SRTM_PASSWORD'] except KeyError: pytest.skip('SRTM_PASSWORD environment variable is unset.') from six.moves.urllib.request import (HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, build_opener) from six.moves.http_cookiejar import CookieJar password_manager = HTTPPasswordMgrWithDefaultRealm() password_manager.add_password( None, "https://urs.earthdata.nasa.gov", srtm_username, srtm_password) cookie_jar = CookieJar() opener = build_opener(HTTPBasicAuthHandler(password_manager), HTTPCookieProcessor(cookie_jar)) monkeypatch.setattr(cartopy.io, 'urlopen', opener.open)
def __init__(self, **kwargs): """ @param kwargs: Keyword arguments. - B{proxy} - An http proxy to be specified on requests. The proxy is defined as {protocol:proxy,} - type: I{dict} - default: {} - B{timeout} - Set the url open timeout (seconds). - type: I{float} - default: 90 """ Transport.__init__(self) Unskin(self.options).update(kwargs) self.cookiejar = CookieJar() self.proxy = {} self.urlopener = None
def __setstate__(self, state): state['cookiejar'] = CookieJar() for cookie in state['_cookiejar_cookies']: state['cookiejar'].set_cookie(cookie) del state['_cookiejar_cookies'] for slot, value in state.items(): setattr(self, slot, value)
def extract_cookiejar(self): """ Extract cookies that pycurl instance knows. Returns `CookieJar` object. """ # Example of line: # www.google.com\tFALSE\t/accounts/\tFALSE\t0' # \tGoogleAccountsLocale_session\ten # Fields: # * domain # * whether or not all machines under that domain can # read the cookie's information. # * path # * Secure Flag: whether or not a secure connection (HTTPS) # is required to read the cookie. # * exp. timestamp # * name # * value cookiejar = CookieJar() for line in self.curl.getinfo(pycurl.INFO_COOKIELIST): values = line.split("\t") domain = values[0].lower() if domain.startswith("#httponly_"): domain = domain.replace("#httponly_", "") httponly = True else: httponly = False # old # cookies[values[-2]] = values[-1] # new cookie = create_cookie( name=values[5], value=values[6], domain=domain, path=values[2], secure=values[3] == "TRUE", expires=int(values[4]) if values[4] else None, httponly=httponly, ) cookiejar.set_cookie(cookie) return cookiejar
def extract_cookiejar(self): """ Extract cookies that pycurl instance knows. Returns `CookieJar` object. """ # Example of line: # www.google.com\tFALSE\t/accounts/\tFALSE\t0' # \tGoogleAccountsLocale_session\ten # Fields: # * domain # * whether or not all machines under that domain can # read the cookie's information. # * path # * Secure Flag: whether or not a secure connection (HTTPS) # is required to read the cookie. # * exp. timestamp # * name # * value cookiejar = CookieJar() for line in self.curl.getinfo(pycurl.INFO_COOKIELIST): values = line.split('\t') domain = values[0].lower() if domain.startswith('#httponly_'): domain = domain.replace('#httponly_', '') httponly = True else: httponly = False # old # cookies[values[-2]] = values[-1] # new cookie = create_cookie( name=values[5], value=values[6], domain=domain, path=values[2], secure=values[3] == "TRUE", expires=int(values[4]) if values[4] else None, httponly=httponly, ) cookiejar.set_cookie(cookie) return cookiejar
def __init__(self, client, method): """ @param client: A suds client. @type client: L{Client} @param method: A target method. @type method: L{Method} """ self.client = client self.method = method self.options = client.options self.cookiejar = CookieJar()
def create_session(self): if self.use_requests: session = requests.Session() session.verify = self._ssl_verify session.cert = self._ssl_cert session.trust_env = False # let qt4s handle proxying cookie_jar = session.cookies else: from six.moves.urllib import request cookie_jar = CookieJar() cookie_processor = request.HTTPCookieProcessor(cookie_jar) session = request.build_opener(request.ProxyHandler({}), cookie_processor) return session, cookie_jar
def __init__(self, server, receiver, oauth_header=None): """Initialize the message sender. Args: server: The bayeux server to send messages to receiver: The message receiver to pass the responses to """ self.cookie_jar = CookieJar() self.agent = CookieAgent( Agent(reactor, pool=HTTPConnectionPool(reactor)), self.cookie_jar) self.client_id = -1 #Will be set upon receipt of the handshake response self.msg_id = 0 self.server = server self.receiver = receiver self.oauth_header = oauth_header
def _make_opener(self, realm, base_url, username, password): """HTTP Basic Auth and cookie support for token verification.""" auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(realm=realm, uri=base_url, user=username, passwd=password) opener = build_opener(auth_handler) install_opener(opener) cookie_jar = CookieJar() cookie_handler = HTTPCookieProcessor(cookie_jar) handlers = [auth_handler, cookie_handler] opener = build_opener(*handlers) return opener
def __init__(self, url): if url.startswith('https://'): self._transport = GSSAPITransport() elif url.startswith('http://'): raise NitrateError( "Encrypted https communication required for " "GSSAPI authentication.\nURL provided: {0}".format(url)) else: raise NitrateError("Unrecognized URL scheme: {0}".format(url)) self._transport.cookiejar = CookieJar() # print("COOKIES:", self._transport.cookiejar._cookies) self.server = xmlrpclib.ServerProxy(url, transport=self._transport, verbose=VERBOSE, allow_none=1) # Login, get a cookie into our cookie jar: login_dict = self.do_command("Auth.login_krbv", [])
def __init__(self, username, password, url, use_mod_auth_kerb=False): if url.startswith('https://'): self._transport = SafeCookieTransport() elif url.startswith('http://'): self._transport = CookieTransport() else: raise NitrateError("Unrecognized URL scheme") self._transport.cookiejar = CookieJar() # print("COOKIES:", self._transport.cookiejar._cookies) self.server = xmlrpclib.ServerProxy(url, transport=self._transport, verbose=VERBOSE, allow_none=1) # Login, get a cookie into our cookie jar: login_dict = self.do_command( "Auth.login", [dict( username=username, password=password, )])
def _get_initial_token(url): """ Create initial connection to get authentication token for future requests. Returns a string to be used in subsequent connections with the X-CSRFToken header or the empty string if we didn't find any token in the cookies. """ logging.info('Getting initial CSRF token.') cookiejar = CookieJar() opener = build_opener(HTTPCookieProcessor(cookiejar)) install_opener(opener) opener.open(url) for cookie in cookiejar: if cookie.name == 'csrftoken': logging.info('Found CSRF token.') return cookie.value logging.warn('Did not find the CSRF token.') return ''
def __init__(self, name, host=None, username=None, password=None): """Constructor. :param name: :type name: string :param host: :type host: string :param username: :type username: string :param password: :type password: string """ self.name = name self.username = app.TORRENT_USERNAME if username is None else username self.password = app.TORRENT_PASSWORD if password is None else password self.host = app.TORRENT_HOST if host is None else host self.rpcurl = app.TORRENT_RPCURL self.url = None self.response = None self.auth = None self.last_time = time.time() self.session = helpers.make_session() self.session.auth = (self.username, self.password) self.session.cookies = CookieJar()
def url_downloader(url, data=None, path=None, cookie=None, timeout=5, retry=1, retry_ivl=5, agent=None, proxy=None): """Download URL link url: url to download data: post data path: download to local file timeout: socket timeout retry: retry times to download url retry_ivl: interval time when retry agent: http user agent proxy: socks5://127.0.0.1:1080 """ while True: try: if isinstance(data, dict): data = urlencode(data) request = Request(url, data=data) request.add_header('User-Agent', agent or get_user_agent()) if data: request.add_header( 'Content-Type', 'application/x-www-form-urlencoded;charset=utf-8') response = None handlers = [] if proxy: scheme, host, port = proxy.split(':') host = host.strip('/') proxy_handler = SocksiPyHandler( socks.PROXY_TYPES[scheme.upper()], host, int(port) ) handlers.append(proxy_handler) if cookie is None: cookie = CookieJar() cookie_handler = HTTPCookieProcessor(cookie) handlers.append(cookie_handler) opener = build_opener(*handlers) response = opener.open(request, timeout=timeout) content_encoding = response.info().get('content-encoding') if content_encoding: r_data = gzip.decompress(response.read()) else: r_data = response.read() if path: with open(path, 'wb') as f: f.write(r_data) r_data = None response.close() mime = response.info().get('content-type') real_url = response.geturl() err_msg = 'Ok' break except (URLError, socket.error, Exception) as err: response and response.close() retry -= 1 err_msg = str(err) if retry > 0: time.sleep(retry_ivl) retry_ivl += retry_ivl timeout += timeout else: mime = r_data = real_url = None break return { 'mime': mime, 'path': path, 'data': r_data, 'url': real_url, 'cookie': cookie, 'error': err_msg, }
RATING_NUKED = 2 CODEC_UNKNOWN = 0 CODEC_XVID = 1 CODEC_H264 = 2 CODEC_H265 = 3 CODEC_MP3 = 4 CODEC_AAC = 5 CODEC_AC3 = 6 CODEC_DTS = 7 CODEC_DTSHD = 8 CODEC_DTSHDMA = 9 USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.66 Safari/537.36" COOKIE_JAR = CookieJar() urllib_request.install_opener(urllib_request.build_opener(urllib_request.HTTPCookieProcessor(COOKIE_JAR))) class closing(object): def __init__(self, thing): self.thing = thing def __enter__(self): return self.thing def __exit__(self, *exc_info): self.thing.close() def parse_json(data): try:
def from_cookie_list(cls, clist): jar = CookieJar() for cookie in clist: jar.set_cookie(cookie) return cls(jar)
def __init__(self, cookiejar=None): if cookiejar is not None: self.cookiejar = cookiejar else: self.cookiejar = CookieJar()
class CookieManager(object): """ Each Grab instance has `cookies` attribute that is instance of `CookieManager` class. That class contains helpful methods to create, load, save cookies from/to different places. """ __slots__ = ('cookiejar',) def __init__(self, cookiejar=None): if cookiejar is not None: self.cookiejar = cookiejar else: self.cookiejar = CookieJar() # self.disable_cookiejar_lock(self.cookiejar) # def disable_cookiejar_lock(self, cj): # cj._cookies_lock = dummy_threading.RLock() def set(self, name, value, domain, **kwargs): """Add new cookie or replace existing cookie with same parameters. :param name: name of cookie :param value: value of cookie :param kwargs: extra attributes of cookie """ if domain == 'localhost': domain = '' self.cookiejar.set_cookie(create_cookie(name, value, domain, **kwargs)) def update(self, cookies): if isinstance(cookies, CookieJar): for cookie in cookies: self.cookiejar.set_cookie(cookie) elif isinstance(cookies, CookieManager): for cookie in cookies.cookiejar: self.cookiejar.set_cookie(cookie) else: raise GrabMisuseError('Unknown type of cookies argument: %s' % type(cookies)) @classmethod def from_cookie_list(cls, clist): cj = CookieJar() for cookie in clist: cj.set_cookie(cookie) return cls(cj) def clear(self): self.cookiejar = CookieJar() def __getstate__(self): state = {} for cls in type(self).mro(): cls_slots = getattr(cls, '__slots__', ()) for slot in cls_slots: if slot != '__weakref__': if hasattr(self, slot): state[slot] = getattr(self, slot) state['_cookiejar_cookies'] = list(self.cookiejar) del state['cookiejar'] return state def __setstate__(self, state): state['cookiejar'] = CookieJar() for cookie in state['_cookiejar_cookies']: state['cookiejar'].set_cookie(cookie) del state['_cookiejar_cookies'] for slot, value in state.items(): setattr(self, slot, value) def __getitem__(self, key): for cookie in self.cookiejar: if cookie.name == key: return cookie.value raise KeyError def items(self): res = [] for cookie in self.cookiejar: res.append((cookie.name, cookie.value)) return res def load_from_file(self, path): """ Load cookies from the file. Content of file should be a JSON-serialized list of dicts. """ with open(path) as inf: data = inf.read() if data: items = json.loads(data) else: items = {} for item in items: extra = dict((x, y) for x, y in item.items() if x not in ['name', 'value', 'domain']) self.set(item['name'], item['value'], item['domain'], **extra) def get_dict(self): res = [] for cookie in self.cookiejar: res.append(dict((x, getattr(cookie, x)) for x in COOKIE_ATTRS)) return res def save_to_file(self, path): """ Dump all cookies to file. Cookies are dumped as JSON-serialized dict of keys and values. """ with open(path, 'w') as out: out.write(json.dumps(self.get_dict()))
def clear(self): self.cookiejar = CookieJar()
class HttpTransport(Transport): """ HTTP transport using urllib2. Provided basic http transport that provides for cookies, proxies but no authentication. """ def __init__(self, **kwargs): """ @param kwargs: Keyword arguments. - B{proxy} - An http proxy to be specified on requests. The proxy is defined as {protocol:proxy,} - type: I{dict} - default: {} - B{timeout} - Set the url open timeout (seconds). - type: I{float} - default: 90 """ Transport.__init__(self) Unskin(self.options).update(kwargs) self.cookiejar = CookieJar() self.proxy = {} self.urlopener = None def open(self, request): try: url = request.url log.debug('opening (%s)', url) u2request = urllib.request.Request(url) self.proxy = self.options.proxy return self.u2open(u2request) except urllib.error.HTTPError as e: raise TransportError(str(e), e.code, e.fp) def send(self, request): result = None url = request.url msg = request.message headers = request.headers try: u2request = urllib.request.Request(url, msg, headers) self.addcookies(u2request) self.proxy = self.options.proxy request.headers.update(u2request.headers) log.debug('sending:\n%s', request) fp = self.u2open(u2request) self.getcookies(fp, u2request) result = Reply(200, fp.headers.__dict__, fp.read()) log.debug('received:\n%s', result) except urllib.error.HTTPError as e: if e.code in (202, 204): result = None else: raise TransportError(e.msg, e.code, e.fp) return result def addcookies(self, u2request): """ Add cookies in the cookiejar to the request. @param u2request: A urllib2 request. @rtype: u2request: urllib2.Requet. """ self.cookiejar.add_cookie_header(u2request) def getcookies(self, fp, u2request): """ Add cookies in the request to the cookiejar. @param u2request: A urllib2 request. @rtype: u2request: urllib2.Requet. """ self.cookiejar.extract_cookies(fp, u2request) def u2open(self, u2request): """ Open a connection. @param u2request: A urllib2 request. @type u2request: urllib2.Requet. @return: The opened file-like urllib2 object. @rtype: fp """ tm = self.options.timeout url = self.u2opener() return url.open(u2request, timeout=tm) def u2opener(self): """ Create a urllib opener. @return: An opener. @rtype: I{OpenerDirector} """ if self.urlopener is None: return urllib.request.build_opener(*self.u2handlers()) else: return self.urlopener def u2handlers(self): """ Get a collection of urllib handlers. @return: A list of handlers to be installed in the opener. @rtype: [Handler,...] """ handlers = [] handlers.append(urllib.request.ProxyHandler(self.proxy)) return handlers def __deepcopy__(self, memo={}): clone = self.__class__() p = Unskin(self.options) cp = Unskin(clone.options) cp.update(p) return clone
def __init__(self, url, cookie_file=None, username=None, password=None, api_token=None, agent=None, session=None, disable_proxy=False, auth_callback=None, otp_token_callback=None, verify_ssl=True, save_cookies=True, ext_auth_cookies=None): if not url.endswith('/'): url += '/' self.url = url + 'api/' self.save_cookies = save_cookies self.ext_auth_cookies = ext_auth_cookies if self.save_cookies: self.cookie_jar, self.cookie_file = create_cookie_jar( cookie_file=cookie_file) try: self.cookie_jar.load(ignore_expires=True) except IOError: pass else: self.cookie_jar = CookieJar() self.cookie_file = None if self.ext_auth_cookies: try: self.cookie_jar.load(ext_auth_cookies, ignore_expires=True) except IOError as e: logging.critical( 'There was an error while loading a ' 'cookie file: %s', e) pass # Get the cookie domain from the url. If the domain # does not contain a '.' (e.g. 'localhost'), we assume # it is a local domain and suffix it (See RFC 2109). parsed_url = urlparse(url) self.domain = parsed_url[1].partition(':')[0] # Remove Port. if self.domain.count('.') < 1: self.domain = '%s.local' % self.domain if session: cookie = Cookie(version=0, name=RB_COOKIE_NAME, value=session, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=True, path=parsed_url[2], path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}) self.cookie_jar.set_cookie(cookie) if self.save_cookies: self.cookie_jar.save() if username: # If the username parameter is given, we have to clear the session # cookie manually or it will override the username:password # combination retrieved from the authentication callback. try: self.cookie_jar.clear(self.domain, parsed_url[2], RB_COOKIE_NAME) except KeyError: pass # Set up the HTTP libraries to support all of the features we need. password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password, api_token, auth_callback, otp_token_callback) self.preset_auth_handler = PresetHTTPAuthHandler( self.url, password_mgr) handlers = [] if not verify_ssl: context = ssl._create_unverified_context() handlers.append(HTTPSHandler(context=context)) if disable_proxy: handlers.append(ProxyHandler({})) handlers += [ HTTPCookieProcessor(self.cookie_jar), ReviewBoardHTTPBasicAuthHandler(password_mgr), HTTPDigestAuthHandler(password_mgr), self.preset_auth_handler, ReviewBoardHTTPErrorProcessor(), ] if agent: self.agent = agent else: self.agent = ('RBTools/' + get_package_version()).encode('utf-8') opener = build_opener(*handlers) opener.addheaders = [ (str('User-agent'), str(self.agent)), ] install_opener(opener) self._cache = None self._urlopen = urlopen
class ReviewBoardServer(object): """Represents a Review Board server we are communicating with. Provides methods for executing HTTP requests on a Review Board server's Web API. The ``auth_callback`` parameter can be used to specify a callable which will be called when authentication fails. This callable will be passed the realm, and url of the Review Board server and should return a 2-tuple of username, password. The user can be prompted for their credentials using this mechanism. """ def __init__(self, url, cookie_file=None, username=None, password=None, api_token=None, agent=None, session=None, disable_proxy=False, auth_callback=None, otp_token_callback=None, verify_ssl=True, save_cookies=True, ext_auth_cookies=None): if not url.endswith('/'): url += '/' self.url = url + 'api/' self.save_cookies = save_cookies self.ext_auth_cookies = ext_auth_cookies if self.save_cookies: self.cookie_jar, self.cookie_file = create_cookie_jar( cookie_file=cookie_file) try: self.cookie_jar.load(ignore_expires=True) except IOError: pass else: self.cookie_jar = CookieJar() self.cookie_file = None if self.ext_auth_cookies: try: self.cookie_jar.load(ext_auth_cookies, ignore_expires=True) except IOError as e: logging.critical( 'There was an error while loading a ' 'cookie file: %s', e) pass # Get the cookie domain from the url. If the domain # does not contain a '.' (e.g. 'localhost'), we assume # it is a local domain and suffix it (See RFC 2109). parsed_url = urlparse(url) self.domain = parsed_url[1].partition(':')[0] # Remove Port. if self.domain.count('.') < 1: self.domain = '%s.local' % self.domain if session: cookie = Cookie(version=0, name=RB_COOKIE_NAME, value=session, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=True, path=parsed_url[2], path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}) self.cookie_jar.set_cookie(cookie) if self.save_cookies: self.cookie_jar.save() if username: # If the username parameter is given, we have to clear the session # cookie manually or it will override the username:password # combination retrieved from the authentication callback. try: self.cookie_jar.clear(self.domain, parsed_url[2], RB_COOKIE_NAME) except KeyError: pass # Set up the HTTP libraries to support all of the features we need. password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password, api_token, auth_callback, otp_token_callback) self.preset_auth_handler = PresetHTTPAuthHandler( self.url, password_mgr) handlers = [] if not verify_ssl: context = ssl._create_unverified_context() handlers.append(HTTPSHandler(context=context)) if disable_proxy: handlers.append(ProxyHandler({})) handlers += [ HTTPCookieProcessor(self.cookie_jar), ReviewBoardHTTPBasicAuthHandler(password_mgr), HTTPDigestAuthHandler(password_mgr), self.preset_auth_handler, ReviewBoardHTTPErrorProcessor(), ] if agent: self.agent = agent else: self.agent = ('RBTools/' + get_package_version()).encode('utf-8') opener = build_opener(*handlers) opener.addheaders = [ (str('User-agent'), str(self.agent)), ] install_opener(opener) self._cache = None self._urlopen = urlopen def enable_cache(self, cache_location=None, in_memory=False): """Enable caching for all future HTTP requests. The cache will be created at the default location if none is provided. If the in_memory parameter is True, the cache will be created in memory instead of on disk. This overrides the cache_location parameter. """ if not self._cache: self._cache = APICache(create_db_in_memory=in_memory, db_location=cache_location) self._urlopen = self._cache.make_request def login(self, username, password): """Reset the user information""" self.preset_auth_handler.reset(username, password) def logout(self): """Logs the user out of the session.""" self.preset_auth_handler.reset(None, None) self.make_request(HttpRequest('%ssession/' % self.url, method='DELETE')) self.cookie_jar.clear(self.domain) if self.save_cookies: self.cookie_jar.save() def process_error(self, http_status, data): """Processes an error, raising an APIError with the information.""" # In Python 3, the data can be bytes, not str, and json.loads # explicitly requires decoded strings. data = force_unicode(data) try: rsp = json_loads(data) assert rsp['stat'] == 'fail' logging.debug('Got API Error %d (HTTP code %d): %s', rsp['err']['code'], http_status, rsp['err']['msg']) logging.debug('Error data: %r', rsp) raise create_api_error(http_status, rsp['err']['code'], rsp, rsp['err']['msg']) except ValueError: logging.debug('Got HTTP error: %s: %s', http_status, data) raise APIError(http_status, None, None, data) def make_request(self, request): """Perform an http request. The request argument should be an instance of 'rbtools.api.request.HttpRequest'. """ try: content_type, body = request.encode_multipart_formdata() headers = request.headers if body: headers.update({ 'Content-Type': content_type, 'Content-Length': str(len(body)), }) else: headers['Content-Length'] = '0' rsp = self._urlopen( Request(request.url, body, headers, request.method)) except HTTPError as e: self.process_error(e.code, e.read()) except URLError as e: raise ServerInterfaceError('%s' % e.reason) if self.save_cookies: try: self.cookie_jar.save() except IOError: pass return rsp
class CookieManager(object): """ Each Grab instance has `cookies` attribute that is instance of `CookieManager` class. That class contains helpful methods to create, load, save cookies from/to different places. """ __slots__ = ('cookiejar', ) def __init__(self, cookiejar=None): if cookiejar is not None: self.cookiejar = cookiejar else: self.cookiejar = CookieJar() # self.disable_cookiejar_lock(self.cookiejar) # def disable_cookiejar_lock(self, cj): # cj._cookies_lock = dummy_threading.RLock() def set(self, name, value, domain, **kwargs): """Add new cookie or replace existing cookie with same parameters. :param name: name of cookie :param value: value of cookie :param kwargs: extra attributes of cookie """ if domain == 'localhost': domain = '' self.cookiejar.set_cookie(create_cookie(name, value, domain, **kwargs)) def update(self, cookies): if isinstance(cookies, CookieJar): for cookie in cookies: self.cookiejar.set_cookie(cookie) elif isinstance(cookies, CookieManager): for cookie in cookies.cookiejar: self.cookiejar.set_cookie(cookie) else: raise GrabMisuseError('Unknown type of cookies argument: %s' % type(cookies)) @classmethod def from_cookie_list(cls, clist): jar = CookieJar() for cookie in clist: jar.set_cookie(cookie) return cls(jar) def clear(self): self.cookiejar = CookieJar() def __getstate__(self): state = {} for cls in type(self).mro(): cls_slots = getattr(cls, '__slots__', ()) for slot in cls_slots: if slot != '__weakref__': if hasattr(self, slot): state[slot] = getattr(self, slot) state['_cookiejar_cookies'] = list(self.cookiejar) del state['cookiejar'] return state def __setstate__(self, state): state['cookiejar'] = CookieJar() for cookie in state['_cookiejar_cookies']: state['cookiejar'].set_cookie(cookie) del state['_cookiejar_cookies'] for slot, value in state.items(): setattr(self, slot, value) def __getitem__(self, key): for cookie in self.cookiejar: if cookie.name == key: return cookie.value raise KeyError def items(self): res = [] for cookie in self.cookiejar: res.append((cookie.name, cookie.value)) return res def load_from_file(self, path): """ Load cookies from the file. Content of file should be a JSON-serialized list of dicts. """ with open(path) as inf: data = inf.read() if data: items = json.loads(data) else: items = {} for item in items: extra = dict((x, y) for x, y in item.items() if x not in ['name', 'value', 'domain']) self.set(item['name'], item['value'], item['domain'], **extra) def get_dict(self): res = [] for cookie in self.cookiejar: res.append(dict((x, getattr(cookie, x)) for x in COOKIE_ATTRS)) return res def save_to_file(self, path): """ Dump all cookies to file. Cookies are dumped as JSON-serialized dict of keys and values. """ with open(path, 'w') as out: out.write(json.dumps(self.get_dict())) def get_cookie_header(self, req): """ :param req: object with httplib.Request interface Actually, it have to have `url` and `headers` attributes """ mocked_req = MockRequest(req) self.cookiejar.add_cookie_header(mocked_req) return mocked_req.get_new_headers().get('Cookie')
def __init__(self, url, cookie_file=None, username=None, password=None, api_token=None, agent=None, session=None, disable_proxy=False, auth_callback=None, otp_token_callback=None, verify_ssl=True, save_cookies=True): if not url.endswith('/'): url += '/' self.url = url + 'api/' self.save_cookies = save_cookies if self.save_cookies: self.cookie_jar, self.cookie_file = create_cookie_jar( cookie_file=cookie_file) try: self.cookie_jar.load(ignore_expires=True) except IOError: pass else: self.cookie_jar = CookieJar() self.cookie_file = None # Get the cookie domain from the url. If the domain # does not contain a '.' (e.g. 'localhost'), we assume # it is a local domain and suffix it (See RFC 2109). parsed_url = urlparse(url) self.domain = parsed_url[1].partition(':')[0] # Remove Port. if self.domain.count('.') < 1: self.domain = '%s.local' % self.domain if session: cookie = Cookie( version=0, name=RB_COOKIE_NAME, value=session, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=True, path=parsed_url[2], path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}) self.cookie_jar.set_cookie(cookie) if self.save_cookies: self.cookie_jar.save() if username: # If the username parameter is given, we have to clear the session # cookie manually or it will override the username:password # combination retrieved from the authentication callback. try: self.cookie_jar.clear(self.domain, parsed_url[2], RB_COOKIE_NAME) except KeyError: pass # Set up the HTTP libraries to support all of the features we need. password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password, api_token, auth_callback, otp_token_callback) self.preset_auth_handler = PresetHTTPAuthHandler(self.url, password_mgr) handlers = [] if not verify_ssl: context = ssl._create_unverified_context() handlers.append(HTTPSHandler(context=context)) if disable_proxy: handlers.append(ProxyHandler({})) handlers += [ HTTPCookieProcessor(self.cookie_jar), ReviewBoardHTTPBasicAuthHandler(password_mgr), HTTPDigestAuthHandler(password_mgr), self.preset_auth_handler, ReviewBoardHTTPErrorProcessor(), ] if agent: self.agent = agent else: self.agent = ('RBTools/' + get_package_version()).encode('utf-8') opener = build_opener(*handlers) opener.addheaders = [ (b'User-agent', self.agent), ] install_opener(opener) self._cache = None self._urlopen = urlopen
class ReviewBoardServer(object): """Represents a Review Board server we are communicating with. Provides methods for executing HTTP requests on a Review Board server's Web API. The ``auth_callback`` parameter can be used to specify a callable which will be called when authentication fails. This callable will be passed the realm, and url of the Review Board server and should return a 2-tuple of username, password. The user can be prompted for their credentials using this mechanism. """ def __init__(self, url, cookie_file=None, username=None, password=None, api_token=None, agent=None, session=None, disable_proxy=False, auth_callback=None, otp_token_callback=None, verify_ssl=True, save_cookies=True): if not url.endswith('/'): url += '/' self.url = url + 'api/' self.save_cookies = save_cookies if self.save_cookies: self.cookie_jar, self.cookie_file = create_cookie_jar( cookie_file=cookie_file) try: self.cookie_jar.load(ignore_expires=True) except IOError: pass else: self.cookie_jar = CookieJar() self.cookie_file = None # Get the cookie domain from the url. If the domain # does not contain a '.' (e.g. 'localhost'), we assume # it is a local domain and suffix it (See RFC 2109). parsed_url = urlparse(url) self.domain = parsed_url[1].partition(':')[0] # Remove Port. if self.domain.count('.') < 1: self.domain = '%s.local' % self.domain if session: cookie = Cookie( version=0, name=RB_COOKIE_NAME, value=session, port=None, port_specified=False, domain=self.domain, domain_specified=True, domain_initial_dot=True, path=parsed_url[2], path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}) self.cookie_jar.set_cookie(cookie) if self.save_cookies: self.cookie_jar.save() if username: # If the username parameter is given, we have to clear the session # cookie manually or it will override the username:password # combination retrieved from the authentication callback. try: self.cookie_jar.clear(self.domain, parsed_url[2], RB_COOKIE_NAME) except KeyError: pass # Set up the HTTP libraries to support all of the features we need. password_mgr = ReviewBoardHTTPPasswordMgr(self.url, username, password, api_token, auth_callback, otp_token_callback) self.preset_auth_handler = PresetHTTPAuthHandler(self.url, password_mgr) handlers = [] if not verify_ssl: context = ssl._create_unverified_context() handlers.append(HTTPSHandler(context=context)) if disable_proxy: handlers.append(ProxyHandler({})) handlers += [ HTTPCookieProcessor(self.cookie_jar), ReviewBoardHTTPBasicAuthHandler(password_mgr), HTTPDigestAuthHandler(password_mgr), self.preset_auth_handler, ReviewBoardHTTPErrorProcessor(), ] if agent: self.agent = agent else: self.agent = ('RBTools/' + get_package_version()).encode('utf-8') opener = build_opener(*handlers) opener.addheaders = [ (b'User-agent', self.agent), ] install_opener(opener) self._cache = None self._urlopen = urlopen def enable_cache(self, cache_location=None, in_memory=False): """Enable caching for all future HTTP requests. The cache will be created at the default location if none is provided. If the in_memory parameter is True, the cache will be created in memory instead of on disk. This overrides the cache_location parameter. """ if not self._cache: self._cache = APICache(create_db_in_memory=in_memory, db_location=cache_location) self._urlopen = self._cache.make_request def login(self, username, password): """Reset the user information""" self.preset_auth_handler.reset(username, password) def logout(self): """Logs the user out of the session.""" self.preset_auth_handler.reset(None, None) self.make_request(HttpRequest('%ssession/' % self.url, method='DELETE')) self.cookie_jar.clear(self.domain) if self.save_cookies: self.cookie_jar.save() def process_error(self, http_status, data): """Processes an error, raising an APIError with the information.""" try: rsp = json_loads(data) assert rsp['stat'] == 'fail' logging.debug('Got API Error %d (HTTP code %d): %s' % (rsp['err']['code'], http_status, rsp['err']['msg'])) logging.debug('Error data: %r' % rsp) raise create_api_error(http_status, rsp['err']['code'], rsp, rsp['err']['msg']) except ValueError: logging.debug('Got HTTP error: %s: %s' % (http_status, data)) raise APIError(http_status, None, None, data) def make_request(self, request): """Perform an http request. The request argument should be an instance of 'rbtools.api.request.HttpRequest'. """ try: content_type, body = request.encode_multipart_formdata() headers = request.headers if body: headers.update({ b'Content-Type': content_type, b'Content-Length': str(len(body)), }) else: headers[b'Content-Length'] = '0' r = Request(request.url.encode('utf-8'), body, headers, request.method.encode('utf-8')) rsp = self._urlopen(r) except HTTPError as e: self.process_error(e.code, e.read()) except URLError as e: raise ServerInterfaceError('%s' % e.reason) if self.save_cookies: try: self.cookie_jar.save() except IOError: pass return rsp
def setUp(self): MongoTestCase.setUp(self) datadir = pkg_resources.resource_filename(__name__, 'data') staticdir = pkg_resources.resource_filename(__name__, 'static/en') self.redis_instance = RedisTemporaryInstance.get_instance() # load the IdP configuration _test_config = { 'debug': True, 'insecure_cookies': False, 'mongo_uri': self.tmp_db.uri, 'environment': 'test_suite', 'pysaml2_config': os.path.join(datadir, 'test_SSO_conf.py'), 'static_dir': staticdir, 'tou_version': 'mock-version', 'shared_session_secret_key': 'shared-session-secret-key', 'shared_session_ttl': 30, 'redis_host': 'localhost', 'redis_port': str(self.redis_instance.port), 'redis_db': '0', 'listen_addr': 'unittest-idp.example.edu', 'listen_port': 443, 'base_url': 'https://unittest-idp.example.edu/', 'content_packages': [('eduid_idp', 'tests/static')], 'action_plugins': ['tou', 'mfa'], 'default_eppn_scope': 'example.edu', } cherry_conf = { 'tools.sessions.on': True, 'tools.sessions.storage_class': EduidSession, 'tools.sessions.name': 'sessid', 'tools.sessions.domain': 'unittest-idp.example.edu', 'tools.sessions.secure': True, 'tools.sessions.persistent': True, 'tools.sessions.httponly': False, } self.config = IdPConfig.init_config(test_config=_test_config, debug=False) cherry_conf.update(self.config.to_dict()) cherrypy.config.update(cherry_conf) cherrypy.config.logger = logger if hasattr(cherrypy.request, '_session_init_flag'): del cherrypy.request._session_init_flag init(storage_class=EduidSession, path='/', name='sessid', domain="unittest-idp.example.edu") # Create the IdP app self.idp_app = IdPApplication(logger, self.config) self.actions = self.idp_app.context.actions_db # setup some test data _email = '*****@*****.**' self.test_user = self.amdb.get_user_by_mail(_email) self.test_action = self.actions.add_action(self.test_user.eppn, action_type='dummy', preference=100, params={}) # prevent the HTTP server from ever starting cherrypy.server.unsubscribe() # mount the IdP app in the cherrypy app server cherrypy.tree.mount(self.idp_app, '', {'/': cherry_conf}) # create a webtest testing environment from six.moves.http_cookiejar import CookieJar self.http = webtest.TestApp(cherrypy.tree, extra_environ={'wsgi.url_scheme': 'https'}, cookiejar=CookieJar())
def from_cookie_list(cls, clist): cj = CookieJar() for cookie in clist: cj.set_cookie(cookie) return cls(cj)
def extract_cookiejar(self, resp, req): jar = CookieJar() jar.extract_cookies(MockResponse(resp._original_response.msg), MockRequest(req)) return jar
class CookieTransport(xmlrpclib.Transport): '''A subclass of xmlrpclib.Transport that supports cookies.''' cookiejar = None scheme = 'http' # Cribbed from xmlrpclib.Transport.send_user_agent def send_cookies(self, connection, cookie_request): if self.cookiejar is None: self.cookiejar = CookieJar() elif self.cookiejar: # Let the cookiejar figure out what cookies are appropriate self.cookiejar.add_cookie_header(cookie_request) # Pull the cookie headers out of the request object... cookielist=list() for h,v in cookie_request.header_items(): if h.startswith('Cookie'): cookielist.append([h,v]) # ...and put them over the connection for h,v in cookielist: connection.putheader(h,v) # This is just python 2.7's xmlrpclib.Transport.single_request, with # send additions noted below to send cookies along with the request def single_request_with_cookies(self, host, handler, request_body, verbose=0): # ADDED: construct the URL and Request object for proper cookie handling request_url = "%s://%s%s" % (self.scheme,host,handler) #log.debug("request_url is %s" % request_url) cookie_request = urllib2.Request(request_url) try: if six.PY2: h = self.make_connection(host) if verbose: h.set_debuglevel(1) self.send_request(h, handler, request_body) self.send_host(h, host) self.send_cookies(h, cookie_request) # ADDED. creates cookiejar if None. self.send_user_agent(h) self.send_content(h, request_body) else: # Python 3 xmlrpc.client.Transport makes its own connection h = self.send_request(host, handler, request_body, verbose) response = h.getresponse() # ADDED: parse headers and get cookies here cookie_response = CookieResponse(response.msg) # Okay, extract the cookies from the headers self.cookiejar.extract_cookies(cookie_response,cookie_request) #log.debug("cookiejar now contains: %s" % self.cookiejar._cookies) # And write back any changes if hasattr(self.cookiejar,'save'): try: self.cookiejar.save(self.cookiejar.filename) except Exception as e: raise #log.error("Couldn't write cookiefile %s: %s" % \ # (self.cookiejar.filename,str(e))) if response.status == 200: self.verbose = verbose return self.parse_response(response) if (response.getheader("content-length", 0)): response.read() raise xmlrpclib.ProtocolError( host + handler, response.status, response.reason, response.msg, ) except xmlrpclib.Fault: raise finally: try: h.close() except NameError: # h not initialized yet pass # Override the appropriate request method single_request = single_request_with_cookies # python 2.7+