def init_http_head(self): self.cookie = http_cookiejar.CookieJar() self.url = None self.headers = { "Connection": "keep-alive", "Content-Type": "application/json", }
def get_cookie_opener(gs_username, gs_token, gs_toolname=None): """ Create a GenomeSpace cookie opener """ cj = http_cookiejar.CookieJar() for cookie_name, cookie_value in [('gs-token', gs_token), ('gs-username', gs_username)]: # create a super-cookie, valid for all domains cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cj.set_cookie(cookie) cookie_opener = build_opener(HTTPCookieProcessor(cj)) cookie_opener.addheaders.append(('gs-toolname', gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME)) return cookie_opener
def __init__(self, app, extra_environ=None, relative_to=None, use_unicode=True, cookiejar=None, parser_features=None): if 'WEBTEST_TARGET_URL' in os.environ: app = os.environ['WEBTEST_TARGET_URL'] if isinstance(app, string_types): if app.startswith('http'): from wsgiproxy import HostProxy if '#' not in app: app += '#httplib' url, client = app.split('#', 1) app = HostProxy(url, client=client) else: from paste.deploy import loadapp # @@: Should pick up relative_to from calling module's # __file__ app = loadapp(app, relative_to=relative_to) self.app = app self.relative_to = relative_to if extra_environ is None: extra_environ = {} self.extra_environ = extra_environ self.use_unicode = use_unicode self.cookiejar = cookiejar or http_cookiejar.CookieJar() if parser_features: self.RequestClass.ResponseClass.parser_features = parser_features
def createCookie( url, cj=None, agent='Mozilla/5.0 (Windows NT 6.1; rv:32.0) Gecko/20100101 Firefox/32.0' ): urlData = '' try: if cj is None: cj = http_cookiejar.CookieJar() import requests session = requests.session() session.cookies = cj headers = {"User-Agent": agent} urlData = session.get(url, headers=headers).text isCookie = _get_sucuri_cookie(urlData) if isCookie: session.cookies = _make_cookies(url, isCookie, cj) urlData = session.get(url, headers=headers).text return urlData except: traceback.print_exc(file=sys.stdout) return urlData
def _set_cookies(self, src): ''' function that returns a urllib2 opener for retrieving data from *src* input: *src* : 'asos' or 'wunderground' or 'wunder_nonairport' ''' jar = http_cookiejar.CookieJar() handler = request.HTTPCookieProcessor(jar) opener = request.build_opener(handler) try: if src.lower() == 'wunderground': url1 = 'http://www.wunderground.com/history/airport/%s/2011/12/4/DailyHistory.html?' % self.sta_id url2 = 'http://www.wunderground.com/cgi-bin/findweather/getForecast?setpref=SHOWMETAR&value=1' url3 = 'http://www.wunderground.com/history/airport/%s/2011/12/4/DailyHistory.html?&&theprefset=SHOWMETAR&theprefvalue=1&format=1' % self.sta_id opener.open(url1) opener.open(url2) opener.open(url3) elif src.lower() == 'asos': url = 'ftp://ftp.ncdc.noaa.gov/pub/data/asos-fivemin/' opener.open(url) elif src.lower() == 'wunder_nonairport': url = 'http://www.wunderground.com/weatherstation/WXDailyHistory.asp?ID=MEGKO3&day=1&year=2013&month=1&graphspan=day&format=1' opener.open(url) except error.URLError: print(('connection to %s not available. working locally' % src)) return opener
def test_cookie_policy(self): from six.moves import http_cookiejar def cookie_app(environ, start_response): status = to_bytes("200 OK") body = 'Cookie.' headers = [ ('Content-Type', 'text/plain'), ('Content-Length', str(len(body))), ('Set-Cookie', 'spam=eggs; secure; Domain=.example.org;'), ] start_response(status, headers) return [to_bytes(body)] policy = webtest.app.CookiePolicy() flags = (policy.DomainStrictNoDots | policy.DomainRFC2965Match | policy.DomainStrictNonDomain) policy.strict_ns_domain |= flags cookiejar = http_cookiejar.CookieJar(policy=policy) app = webtest.TestApp(cookie_app, cookiejar=cookiejar, extra_environ={'HTTP_HOST': 'example.org'}) res = app.get('/') res = app.get('/') self.assertFalse(app.cookies, 'Response should not have set cookies') self.assertNotIn('HTTP_COOKIE', res.request.environ) self.assertEqual(dict(res.request.cookies), {})
def __init__(self, cache_enabled=False): self.authenticated = False # InfoQ requires cookies to be logged in. Use a dedicated urllib opener self.opener = urllib.request.build_opener( urllib.request.HTTPCookieProcessor(http_cookiejar.CookieJar())) self.cache = None if cache_enabled: self.enable_cache()
def __init__(self, configuration): self.configuration = configuration self.cookie = http_cookiejar.CookieJar() self.url = None self.headers = { "Connection": "keep-alive", "Content-Type": "application/json", }
def __init__(self, host, scheme='http', relative_to=None): self.host = host self.relative_to = relative_to self.conn = {} self._load_conn(scheme) self.extra_environ = { 'wsgi.url_scheme': scheme, } self.cookiejar = http_cookiejar.CookieJar() self.reset()
def login(username, password): log(__name__, " Logging in with username '%s' ..." % (username)) content = geturl(main_url + 'index.php') if content is not None: match = re.search('logouticon.png', content, re.IGNORECASE | re.DOTALL) if match: return 1 else: match = re.search(unique_pattern, content, re.IGNORECASE | re.DOTALL) if match: return_value = match.group(1) unique_name = match.group(2) unique_value = match.group(3) login_postdata = urllib.parse.urlencode({ 'username': username, 'passwd': password, 'remember': 'yes', 'Submit': 'Login', 'remember': 'yes', 'option': 'com_user', 'task': 'login', 'silent': 'true', 'return': return_value, unique_name: unique_value }) cj = http_cookiejar.CookieJar() my_opener = urllib.request.build_opener( urllib.request.HTTPCookieProcessor(cj)) my_opener.addheaders = [('Referer', main_url)] urllib.request.install_opener(my_opener) request = urllib.request.Request(main_url + 'index.php', login_postdata) response = urllib.request.urlopen(request).read() match = re.search('logouticon.png', response, re.IGNORECASE | re.DOTALL) if match: return 1 else: return 0 else: return 0
def _check_cookie_session_persistence(self): """Check cookie persistence types by injecting cookies in requests.""" # Send first request and get cookie from the server's response cj = http_cookiejar.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) opener.open("http://{0}/".format(self.vip_ip)) resp = [] # Send 10 subsequent requests with the cookie inserted in the headers. for count in range(10): request = urllib2.Request("http://{0}/".format(self.vip_ip)) cj.add_cookie_header(request) response = urllib2.urlopen(request) resp.append(response.read()) self.assertEqual(len(set(resp)), 1, message=resp)
def __init__(self, configuration, debug=True): super(XMLAPIConnector, self).__init__() self.storage_ip = configuration.emc_nas_server self.username = configuration.emc_nas_login self.password = configuration.emc_nas_password self.debug = debug self.auth_url = 'https://' + self.storage_ip + '/Login' self._url = ('https://' + self.storage_ip + '/servlets/CelerraManagementServices') https_handler = url_request.HTTPSHandler() cookie_handler = url_request.HTTPCookieProcessor( http_cookiejar.CookieJar()) self.url_opener = url_request.build_opener(https_handler, cookie_handler) self._do_setup()
def __init__(self, verify=True, ca_bundle=None, key_file=None, cert_file=None): self.request_args = {"allow_redirects": False} #self.cookies = {} self.cookiejar = http_cookiejar.CookieJar() self.request_args["verify"] = verify if verify: if ca_bundle: self.request_args["verify"] = ca_bundle if key_file: self.request_args["cert"] = (cert_file, key_file) self.sec = None self.user = None self.passwd = None
def _processHandler(self, securityHandler, param_dict): """proceses the handler and returns the cookiejar""" cj = None handler = None if securityHandler is None: cj = cookiejar.CookieJar() elif securityHandler.method.lower() == "token": param_dict['token'] = securityHandler.token if hasattr(securityHandler, 'cookiejar'): cj = securityHandler.cookiejar if hasattr(securityHandler, 'handler'): handler = securityHandler.handler elif securityHandler.method.lower() == "handler": handler = securityHandler.handler cj = securityHandler.cookiejar return param_dict, handler, cj
def __init__(self, app, extra_environ=None, relative_to=None, use_unicode=True, cookiejar=None, parser_features=None, json_encoder=None, lint=True): if 'WEBTEST_TARGET_URL' in os.environ: app = os.environ['WEBTEST_TARGET_URL'] if isinstance(app, string_types): if app.startswith('http'): try: from wsgiproxy import HostProxy except ImportError: # pragma: no cover raise ImportError( ('Using webtest with a real url requires WSGIProxy2. ' 'Please install it with: ' 'pip install WSGIProxy2')) if '#' not in app: app += '#httplib' url, client = app.split('#', 1) app = HostProxy(url, client=client) else: from paste.deploy import loadapp # @@: Should pick up relative_to from calling module's # __file__ app = loadapp(app, relative_to=relative_to) self.app = app self.lint = lint self.relative_to = relative_to if extra_environ is None: extra_environ = {} self.extra_environ = extra_environ self.use_unicode = use_unicode if cookiejar is None: cookiejar = http_cookiejar.CookieJar(policy=CookiePolicy()) self.cookiejar = cookiejar if parser_features is None: parser_features = 'html.parser' self.RequestClass.ResponseClass.parser_features = parser_features if json_encoder is None: json_encoder = json.JSONEncoder self.JSONEncoder = json_encoder
def __init__(self, *args, **kwargs): cookiejar = kwargs.pop("cookiejar", None) self.timeout = kwargs.pop("timeout", 0) self.proxy_config = self._get_proxy(**kwargs) self.no_proxy = os.environ.get("no_proxy", "").lower().split(',') self.context = kwargs.pop('context', None) if hasattr(xmlrpclib.Transport, "__init__"): xmlrpclib.Transport.__init__(self, *args, **kwargs) self.cookiejar = cookiejar or cookielib.CookieJar() if hasattr(self.cookiejar, "load"): if not os.path.exists(self.cookiejar.filename): if hasattr(self.cookiejar, "save"): self.cookiejar.save(self.cookiejar.filename) self.cookiejar.load(self.cookiejar.filename)
def __init__(self, user_agent, site_name=None): """ Specify the user agent for the application and optionally a site_name. If site_name is None, then the site name will be looked for in the environment variable REDDIT_SITE. It if is not found there, the default site name `reddit` will be used. """ if not user_agent or not isinstance(user_agent, six.string_types): raise TypeError('User agent must be a non-empty string.') self.DEFAULT_HEADERS['User-agent'] = UA_STRING % user_agent self.config = Config(site_name or os.getenv('REDDIT_SITE') or 'reddit') _cookie_jar = http_cookiejar.CookieJar() self._opener = build_opener(HTTPCookieProcessor(_cookie_jar)) self.modhash = self.user = None
def __init__(self, configuration, debug=True): super(XMLAPIConnector, self).__init__() self.storage_ip = enas_utils.convert_ipv6_format_if_needed( configuration.emc_nas_server) self.username = configuration.emc_nas_login self.password = configuration.emc_nas_password self.debug = debug self.auth_url = 'https://' + self.storage_ip + '/Login' self._url = 'https://{}/servlets/CelerraManagementServices'.format( self.storage_ip) context = enas_utils.create_ssl_context(configuration) if context: https_handler = url_request.HTTPSHandler(context=context) else: https_handler = url_request.HTTPSHandler() cookie_handler = url_request.HTTPCookieProcessor( http_cookiejar.CookieJar()) self.url_opener = url_request.build_opener(https_handler, cookie_handler) self._do_setup()
def get_tweets(search_params, receive_buffer=None, buffer_length=100): refresh_cursor = '' results = [] results_aux = [] cookie_jar = cookiejar.CookieJar() active = True counter = 0 while active: json_response = Scraper.get_json_response(search_params, refresh_cursor, cookie_jar) if len(json_response['items_html'].strip()) == 0: break refresh_cursor = json_response['min_position'] tweets = PyQuery( json_response['items_html'])('div.js-stream-tweet') if len(tweets) == 0: break for tweetHTML in tweets: tweet_pq = PyQuery(tweetHTML) tweet_object = tweet.Tweet() try: username_tweet = tweet_pq( "span.username.js-action-profile-name b").text() txt = re.sub( r"\s+", " ", tweet_pq("p.js-tweet-text").text().replace( '# ', '#').replace('@ ', '@')) retweets = int( tweet_pq( "span.ProfileTweet-action--retweet span.ProfileTweet-actionCount" ).attr("data-tweet-stat-count").replace(",", "")) favorites = int( tweet_pq( "span.ProfileTweet-action--favorite span.ProfileTweet-actionCount" ).attr("data-tweet-stat-count").replace(",", "")) date_sec = int( tweet_pq("small.time span.js-short-timestamp").attr( "data-time")) tweet_id = tweet_pq.attr("data-tweet-id") permalink = tweet_pq.attr("data-permalink-path") user_id = int( tweet_pq("a.js-user-profile-link").attr( "data-user-id")) geo = '' geo_span = tweet_pq('span.Tweet-geo') if len(geo_span) > 0: geo = geo_span.attr('title') urls = [] for link in tweet_pq("a"): try: urls.append((link.attrib["data-expanded-url"])) except KeyError: pass tweet_object.id = tweet_id tweet_object.permalink = 'https://twitter.com' + permalink tweet_object.username = username_tweet tweet_object.text = txt tweet_object.date = datetime.datetime.fromtimestamp( date_sec) tweet_object.formatted_date = datetime.datetime.fromtimestamp( date_sec).strftime("%a %b %d %X +0000 %Y") tweet_object.retweets = retweets tweet_object.favorites = favorites tweet_object.mentions = " ".join( re.compile('(@\\w*)').findall(tweet_object.text)) tweet_object.hashtags = " ".join( re.compile('(#\\w*)').findall(tweet_object.text)) tweet_object.geo = geo tweet_object.urls = ",".join(urls) tweet_object.author_id = user_id counter += 1 sys.stdout.write("Total Tweets: %d \r" % counter) sys.stdout.flush() results.append(tweet_object) results_aux.append(tweet_object) if receive_buffer and len(results_aux) >= buffer_length: receive_buffer(results_aux) results_aux = [] if 0 < search_params.maxTweets <= len(results): active = False break except Exception: pass if receive_buffer and len(results_aux) > 0: receive_buffer(results_aux) return results
from six.moves import urllib from rime.basic import codes as basic_codes from rime.basic import consts import rime.basic.targets.problem import rime.basic.targets.project import rime.basic.targets.solution import rime.basic.targets.testset # NOQA from rime.core import targets from rime.core import taskgraph from rime.plugins.plus import commands as plus_commands from rime.util import files # opener with cookiejar cookiejar = http_cookiejar.CookieJar() opener = urllib.request.build_opener( urllib.request.HTTPCookieProcessor(cookiejar)) class Project(targets.registry.Project): def PreLoad(self, ui): super(Project, self).PreLoad(ui) self.atcoder_config_defined = False def _atcoder_config(upload_script, contest_url, username, password, lang_ids): self.atcoder_config_defined = True self.atcoder_upload_script = upload_script self.atcoder_contest_url = contest_url self.atcoder_username = username
def init_opener(self): self.cj = http_cookiejar.CookieJar() return build_opener(HTTPCookieProcessor(self.cj))
def cookies(self): """CookieJar object that will be used for cookies in this request.""" if self._cookies is None: self._cookies = cookielib.CookieJar() return self._cookies
def get_handlers(self): handlers = [] if self._verify_cert == False: ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE handler = request.HTTPSHandler(context=ctx) handlers.append(handler) from urllib.request import HTTPRedirectHandler redirect_handler = HTTPRedirectHandler() redirect_handler.max_redirections = 30 redirect_handler.max_repeats = 30 handlers.append(redirect_handler) if self._username and self._password: passman = request.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, self._parsed_org_url, self._username, self._password) handlers.append(request.HTTPBasicAuthHandler(passman)) passman = request.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, self._parsed_org_url, self._username, self._password) handlers.append(request.HTTPDigestAuthHandler(passman)) if os.name == 'nt': try: from arcgis._impl.common._iwa import NtlmSspiAuthHandler, KerberosSspiAuthHandler auth_krb = KerberosSspiAuthHandler() handlers.append(auth_krb) try: auth_NTLM = NtlmSspiAuthHandler() handlers.append(auth_NTLM) except: pass except Error as err: _log.error( "winkerberos packages is required for IWA authentication (NTLM and Kerberos)." ) _log.error( "Please install it:\n\tconda install winkerberos") _log.error(str(err)) else: _log.error( 'The GIS uses Integrated Windows Authentication which is currently only supported on the Windows platform' ) if self._auth == "PKI" or \ (self.cert_file is not None and self.key_file is not None): handlers.append( HTTPSClientAuthHandler(self.key_file, self.cert_file)) elif self._portal_connection and \ self._portal_connection.cert_file is not None and \ self._portal_connection.key_file is not None: handlers.append( HTTPSClientAuthHandler(self._portal_connection.key_file, self._portal_connection.cert_file)) cj = cookiejar.CookieJar() if self.proxy_host: # Simple Proxy Support from urllib.request import ProxyHandler if self.proxy_port is None: self.proxy_port = 80 proxies = { "http": "http://%s:%s" % (self.proxy_host, self.proxy_port), "https": "https://%s:%s" % (self.proxy_host, self.proxy_port) } proxy_support = ProxyHandler(proxies) handlers.append(proxy_support) handlers.append(request.HTTPCookieProcessor(cj)) return handlers
def __init__(self, encoding='utf-8'): self.cj = cookielib.CookieJar() self.opener = request.build_opener(request.HTTPCookieProcessor( self.cj)) self.encoding = encoding
def __init__(self, policy=None, check_expired_frequency=10000): self.policy = policy or http_cookiejar.DefaultCookiePolicy() self.jar = http_cookiejar.CookieJar(self.policy) self.jar._cookies_lock = _DummyLock() self.check_expired_frequency = check_expired_frequency self.processed = 0