def __init__(self, ca_file): # pylint: disable=super-on-old-class if sys.version_info[0] >= 3: super(HTTPSAuthHandler, self).__init__() else: # NOTE: WTF? HTTPSHandler's super-superclass BaseHandler is an old-style class! o_O HTTPSHandler.__init__(self) self.__ca_file = ca_file
def __init__(self, debuglevel=0, context=None): # Old form for Python 2 compatibility. HTTPSHandler.__init__(self, debuglevel, context) self.lock = threading.Lock() self.pool = set() self.free = set()
def u2handlers(self): handlers = [] handlers.append(ProxyHandler(self.proxy)) handlers.append( HTTPBasicAuthHandler()) # python ssl Context support - PEP 0466 if hasattr(ssl, '_create_unverified_context'): ssl_context = ssl._create_unverified_context() handlers.append(HTTPSHandler(context=ssl_context)) else: handlers.append(HTTPSHandler()) return handlers
def build_opener_with_context(context=None, *handlers): # `context` has been added in Python 2.7.9 and 3.4.3. if _URLLIB_SUPPORTS_SSL_CONTEXT: https_handler = HTTPSHandler(context=context) else: warnings.warn( ("SSL context is not supported in your environment for urllib " "calls. Perhaps your Python version is obsolete? " "This probably means that TLS verification doesn't happen, " "which is insecure. Please consider upgrading your Python " "interpreter version."), UserWarning) https_handler = HTTPSHandler() return build_opener(https_handler, *handlers)
def __init__(self, writing=WRITING_NATIVE, opener=None, retry_times=4, executor=_g_executor, timeout=4, service_urls=('http://translate.google.com', ), debug=False): self._DEBUG = debug self._MIN_TASKS_FOR_CONCURRENT = 2 self._opener = opener self._languages = None self._TIMEOUT = timeout if not self._opener: debuglevel = self._DEBUG and 1 or 0 self._opener = build_opener(HTTPHandler(debuglevel=debuglevel), HTTPSHandler(debuglevel=debuglevel)) self._RETRY_TIMES = retry_times self._executor = executor self._writing = writing if _is_sequence(service_urls): self._service_urls = service_urls else: self._service_urls = (service_urls, )
def http_download(download_url, outfile, proxy_url=None, proxy_port=None): if proxy_url: proxy = "{}:{}".format(proxy_url, proxy_port) mainlog.info("Using a proxy : {}".format(proxy)) urlopener = build_opener(ProxyHandler({ 'https': proxy, 'http': proxy }), HTTPRedirectHandler()) else: mainlog.info("Not using a proxy") urlopener = build_opener(HTTPHandler(), HTTPSHandler(), HTTPRedirectHandler()) urlopener.addheaders = [( 'User-agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:32.0) Gecko/20100101 Firefox/32.0' )] datasource = urlopener.open(download_url) out = open(outfile, 'wb') while True: d = datasource.read(8192) # self.logger.debug("Downloaded {} bytes".format(len(d))) if not d: break else: out.write(d) out.flush() out.close() datasource.close()
def _setup_url_opener_if_necessary(self): if self._url_opener is not None: return kwargs = dict() # disable SSL verification if requested if not self._verify_ssl_certificates: ssl_hosts = [ server for server in self._servers if server.url.startswith('https') ] if ssl_hosts: context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE kwargs['context'] = context # setup URL openers - add pre-emptive basic authentication http_handler = HTTPHandler() https_handler = HTTPSHandler(**kwargs) password_manager = HTTPPasswordMgrWithDefaultRealm() auth_handlers = [] # setup auth handler if we have any servers requiring authentication for server in self._servers: if server.username: password_manager.add_password(None, server.url, server.username, server.password) if password_manager.passwd: auth_handler = PreemptiveBasicAuthHandler(password_manager) auth_handlers.append(auth_handler) self._url_opener = build_opener(http_handler, https_handler, *auth_handlers)
def handle(): if username is not None and password is not None: auth_value = '{0}:{1}'.format(username, password).encode('utf-8') auth_token = base64.b64encode(auth_value) auth_header = b'Basic ' + auth_token headers.append(['Authorization', auth_header]) url_components = urlparse(url) if not url_components.scheme or url_components.scheme != 'https': url_handler = HTTPHandler else: context = ssl.SSLContext() if verify: context.verify_mode = ssl.CERT_REQUIRED else: context.verify_mode = ssl.CERT_NONE url_handler = HTTPSHandler(context=context) request = Request(url, data=data) request.get_method = lambda: method for k, v in headers: request.add_header(k, v) resp = build_opener(url_handler).open(request, timeout=timeout) if resp.code >= 400: raise IOError("error talking to pushgateway: {0} {1}".format( resp.code, resp.msg))
def getMediaStreamInfo(self, url): if type(url) != type(u""): url = url.decode('utf-8') if url.startswith("http") == False: self.log.info('Not an HTTP url. Maybe direct stream...') return UrlInfo(url, False, None) self.log.info('Requesting stream... %s' % url) req = UrlRequest(url) req.add_header('User-Agent', USER_AGENT) try: opener = urlBuild_opener( DummyMMSHandler(), HTTPSHandler(context=my_ssl_create_unverified_context())) f = opener.open(req, timeout=float(self.url_timeout)) except HTTPError as e: self.log.warn('HTTP Error for %s: %s' % (url, e)) return None except URLError as e: self.log.info('URLError for %s: %s ' % (url, e)) if str(e.reason).startswith('MMS REDIRECT'): newurl = e.reason.split("MMS REDIRECT:", 1)[1] self.log.info('Found mms redirect for: %s' % newurl) return UrlInfo(newurl, False, None) else: return None except BadStatusLine as e: if str(e).startswith('ICY 200'): self.log.info('Found ICY stream') return UrlInfo(url, False, None) else: return None except Exception as e: print('%s: for %s: %s' % (type(e), url, e), file=sys.stderr) self.log.warn('%s: for %s: %s' % (type(e), url, e)) return None metadata = f.info() firstbytes = f.read(500) f.close() try: contentType = metadata["content-type"] self.log.info('Content-Type: %s' % contentType) except Exception as e: self.log.info("Couldn't read content-type. Maybe direct stream...") return UrlInfo(url, False, None) for decoder in self.decoders: self.log.info('Checking decoder') if decoder.isStreamValid(contentType, firstbytes): return UrlInfo(url, True, contentType, decoder) # no playlist decoder found. Maybe a direct stream self.log.info( 'No playlist decoder could handle the stream. Maybe direct stream...' ) return UrlInfo(url, False, contentType)
def _urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, cafile=None, capath=None, cadefault=False): def http_response(request, response): return response http_error_processor = HTTPErrorProcessor() http_error_processor.https_response = http_response if _have_sslcontext and (cafile or capath or cadefault): context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.options |= ssl.OP_NO_SSLv2 context.verify_mode = ssl.CERT_REQUIRED if cafile or capath: context.load_verify_locations(cafile, capath) else: context.set_default_verify_paths() https_handler = HTTPSHandler(context=context, check_hostname=True) opener = build_opener(https_handler, http_error_processor) else: opener = build_opener(http_error_processor) return opener.open(url, data, timeout)
def __init__(self, args): self.cjar = MozillaCookieJar() self.debug = args.debug self.user_agent = args.user_agent self.cookie_file = args.cookie_file if os.path.exists(self.cookie_file): try: self.cjar.load(self.cookie_file, ignore_discard=True) except (KeyboardInterrupt, SystemExit): raise except Exception: self.cjar = MozillaCookieJar() if args.debug: traceback.print_exc() # Fix from: # https://stackoverflow.com/questions/19268548/python-ignore-certificate-validation-urllib2 ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE self.opener = build_opener( HTTPSHandler(context=ctx), HTTPCookieProcessor(self.cjar))
def _perform_http_request_internal(self, url: str, req: Request): opener: Optional[OpenerDirector] = None # for security (BAN-B310) if url.lower().startswith("http"): if self.proxy is not None: if isinstance(self.proxy, str): opener = urllib.request.build_opener( ProxyHandler({"http": self.proxy, "https": self.proxy}), HTTPSHandler(context=self.ssl), ) else: raise SlackRequestError( f"Invalid proxy detected: {self.proxy} must be a str value" ) else: raise SlackRequestError(f"Invalid URL detected: {url}") # NOTE: BAN-B310 is already checked above http_resp: Optional[HTTPResponse] = None if opener: http_resp = opener.open(req, timeout=self.timeout) # skipcq: BAN-B310 else: http_resp = urlopen( # skipcq: BAN-B310 req, context=self.ssl, timeout=self.timeout ) charset: str = http_resp.headers.get_content_charset() or "utf-8" response_body: str = http_resp.read().decode(charset) resp = WebhookResponse( url=url, status_code=http_resp.status, body=response_body, headers=http_resp.headers, ) _debug_log_response(self.logger, resp) return resp
def authReq(cookiejar): if (ssl_validation): opener = build_opener(HTTPCookieProcessor(cookiejar), HTTPHandler()) else: opener = build_opener(HTTPCookieProcessor(cookiejar), HTTPHandler(), HTTPSHandler(context=sslvalidation())) lcsrf = logincsrf(cookiejar) if (lcsrf is None): print(bcolors.NONERED + '[-] Failed to pull CSRF token' + bcolors.ENDLINE) data = urlencode({ "session_key": linkedin_username, "session_password": linkedin_password, "isJsEnabled": "false", "loginCsrfParam": lcsrf }).encode("utf-8") headers = { "Host": "www.linkedin.com", "User-Agent": user_agent, "Content-type": "application/x-www-form-urlencoded", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Cookie": getcookie(cookiejar), "X-IsAJAXForm": "1", } req = Request("https://www.linkedin.com/uas/login-submit", headers) f = opener.open(req, timeout=timeout, data=data) return cookiejar
def pullid(): global company global companyid cookiejar = initialReq() cookiejar = authReq(cookiejar) if (ssl_validation): opener = build_opener(HTTPCookieProcessor(cookiejar), HTTPHandler()) else: opener = build_opener(HTTPCookieProcessor(cookiejar), HTTPHandler(), HTTPSHandler(context=sslvalidation())) query = "count=10&filters=List(resultType-%3ECOMPANIES)&" + urlencode( {"keywords": company} ) + "&origin=SWITCH_SEARCH_VERTICAL&q=all&queryContext=List(spellCorrectionEnabled-%3Etrue,relatedSearchesEnabled-%3Efalse)&start=0" headers = { "Host": "www.linkedin.com", "User-Agent": user_agent, "Accept": "application/vnd.linkedin.normalized+json+2.1", "x-restli-protocol-version": "2.0.0", "Cookie": getcookie(cookiejar), "Csrf-Token": ajaxtoken(cookiejar), } req = Request( "https://www.linkedin.com/voyager/api/search/blended?" + query, None, headers) data = opener.open(req, timeout=timeout).read() content = json.loads(data) for companyname in content["included"]: id = companyname["entityUrn"].split(":") print("{:.<40}: {:s}".format(companyname["name"] + " :", id[3])) companyid = input("\nSelect company ID value: ")
def _init_urllib(self): # Initialize a SSL context for all HTTPS calls if self.verify_tls: context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_1) context.verify_mode = ssl.CERT_REQUIRED context.check_hostname = True context.load_default_certs() else: context = ssl.create_default_context( ) # Should we enforce TLS 1.1 here? context.check_hostname = False context.verify_mode = ssl.CERT_NONE # Cookie Jar self.cj = http.cookiejar.CookieJar() # Debugging if self.debug: debuglevel = 1 else: debuglevel = 0 opener = build_opener(HTTPSHandler(debuglevel=debuglevel, context=context), \ HTTPCookieProcessor(self.cj), NoRedirection) install_opener(opener)
def _install_opener(self): self.opener = None if self.username != "": self.password_mgr = HTTPPasswordMgrWithDefaultRealm() self.password_mgr.add_password(None, self.baseurlauth, self.username, self.password) self.auth_handler = HTTPBasicAuthHandler(self.password_mgr) if self.ssl is True: if self.cafile == "": self.context = ssl.create_default_context() self.context.check_hostname = False self.context.verify_mode = ssl.CERT_NONE else: self.context = ssl.create_default_context() self.context.load_verify_locations(cafile=self.cafile) self.context.verify_mode = ssl.CERT_REQUIRED self.https_handler = HTTPSHandler(context=self.context) if self.username != "": self.opener = build_opener(self.https_handler, self.auth_handler) else: self.opener = build_opener(self.https_handler) else: if self.username != "": self.opener = build_opener(self.auth_handler) if self.opener is not None: self.log.debug("Setting up opener on: {}".format(self.baseurlauth)) install_opener(self.opener)
def get_asf_cookie(user, password): logging.info("logging into asf") login_url = "https://urs.earthdata.nasa.gov/oauth/authorize" client_id = "BO_n7nTIlMljdvU6kRRB3g" redirect_url = "https://auth.asf.alaska.edu/login" user_pass = base64.b64encode(bytes(user + ":" + password, "utf-8")) user_pass = user_pass.decode("utf-8") auth_cookie_url = f"{login_url}?client_id={client_id}&redirect_uri={redirect_url}&response_type=code&state=" context = {} opener = build_opener(HTTPCookieProcessor(cookie_jar), HTTPHandler(), HTTPSHandler(**context)) request = Request(auth_cookie_url, headers={"Authorization": "Basic {0}".format(user_pass)}) try: response = opener.open(request) except HTTPError as e: if e.code == 401: logging.error("invalid username and password") return False else: # If an error happens here, the user most likely has not confirmed EULA. logging.error(f"Could not log in. {e.code} {e.response}") return False if check_cookie_is_logged_in(cookie_jar): # COOKIE SUCCESS! cookie_jar.save(cookie_jar_path) logging.info("successfully logged into asf") return True logging.info("failed logging into asf") return False
def api(self, command, args=None): """ Main API function. Returns: returns 'False' if invalid command or if no APIKey or Secret is specified (if command is "private"). returns {"error":"<error message>"} if API error. """ logging.info("Poloniex command: {}, args: {}".format(command, args)) args = args or {} if command in PUBLIC_COMMANDS: url = 'https://poloniex.com/public?' args['command'] = command # prevent urllib from complaining when using a proxy context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE if constants.PROXY_ADDR == "" or constants.PROXY_PORT is None: opener = build_opener(HTTPSHandler(context=context)) else: opener = build_opener( SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, constants.PROXY_ADDR, constants.PROXY_PORT, True, context=context)) ret = opener.open(Request(url + urlencode(args))) return json.loads(ret.read().decode(encoding='UTF-8')) else: return False
def _send_request(self, url): ''' Performs a GET against the given url. ''' LOGGER.debug('Making an api call to {0}'.format(url)) data = None try: ctx = ssl.create_default_context() ctx.check_hostname = False if self.ssl_certs['ca_cert']: ctx.load_verify_locations(cafile=self.ssl_certs.ca_cert) ctx.load_cert_chain(certfile=self.ssl_certs.client_cert, keyfile=self.ssl_certs.client_key) opener = urllib.request.build_opener(HTTPSHandler(context=ctx)) opener.addheaders = [] for k, v in self.headers.items(): opener.addheaders.append((k, v)) response = opener.open(url) data = response.read() data = json.loads(data) except urllib.error.HTTPError as e: LOGGER.error('HTTPError - status code: {0}, ' 'received from {1}'.format(e.code, url)) except urllib.error.URLError as e: LOGGER.error('URLError - {0} {1}'.format(url, e.reason)) except ValueError as e: LOGGER.error('Error parsing JSON for url {0}. {1}'.format(url, e)) return data
def _perform_urllib_http_request_internal( self, url: str, req: Request, ) -> Dict[str, Any]: # urllib not only opens http:// or https:// URLs, but also ftp:// and file://. # With this it might be possible to open local files on the executing machine # which might be a security risk if the URL to open can be manipulated by an external user. # (BAN-B310) if url.lower().startswith("http"): opener: Optional[OpenerDirector] = None if self.proxy is not None: if isinstance(self.proxy, str): opener = urllib.request.build_opener( ProxyHandler({ "http": self.proxy, "https": self.proxy }), HTTPSHandler(context=self.ssl), ) else: raise SlackRequestError( f"Invalid proxy detected: {self.proxy} must be a str value" ) # NOTE: BAN-B310 is already checked above resp: Optional[HTTPResponse] = None if opener: resp = opener.open(req, timeout=self.timeout) # skipcq: BAN-B310 else: resp = urlopen( # skipcq: BAN-B310 req, context=self.ssl, timeout=self.timeout) if resp.headers.get_content_type() == "application/gzip": # admin.analytics.getFile body: bytes = resp.read() if self._logger.level <= logging.DEBUG: self._logger.debug("Received the following response - " f"status: {resp.code}, " f"headers: {dict(resp.headers)}, " f"body: (binary)") return { "status": resp.code, "headers": resp.headers, "body": body } charset = resp.headers.get_content_charset() or "utf-8" body: str = resp.read().decode( charset) # read the response body here if self._logger.level <= logging.DEBUG: self._logger.debug("Received the following response - " f"status: {resp.code}, " f"headers: {dict(resp.headers)}, " f"body: {body}") return {"status": resp.code, "headers": resp.headers, "body": body} raise SlackRequestError(f"Invalid URL detected: {url}")
def u2handlers(self): """Get a collection of urllib handlers. """ handlers = suds.transport.http.HttpTransport.u2handlers(self) if self.ssl_context: try: handlers.append(HTTPSHandler(context=self.ssl_context, check_hostname=self.verify)) except TypeError: # Python 2.7.9 HTTPSHandler does not accept the # check_hostname keyword argument. # # Note that even older Python versions would also # croak on the context keyword argument. But these # old versions do not have SSLContext either, so we # will not end up here in the first place. handlers.append(HTTPSHandler(context=self.ssl_context)) return handlers
def get_cert(cls, url): context = ssl.create_default_context() context.load_cert_chain(cls.ssl_file('client-certificate.pem'), cls.ssl_file('client-private-key.pem'), 'client-password') context.load_verify_locations(cls.ssl_file('ca-certificate.pem')) opener = build_opener(HTTPSHandler(context=context)) return opener.open(url).read().decode('utf-8')
def setup_method(self, method): self.cookies = CookieJar() self.opener = build_opener(HTTPRedirectHandler(), HTTPHandler(debuglevel=0), HTTPSHandler(debuglevel=0), HTTPCookieProcessor(self.cookies)) self.application_process = Process(target=main) self.application_process.start()
def __open(self, url, headers={}, data=None, baseurl=""): """Raw urlopen command""" if not baseurl: baseurl = self.baseurl self._user_agent() last_except = Exception for count in range(self.tries): try: req = Request("%s%s" % (baseurl, url), headers=headers) try: req.data = urlencode(data).encode('utf-8') # Python 3 except: try: req.add_data(urlencode(data)) # Python 2 except: pass # Proxy support if self.proxy_url: if self.proxy_user: proxy = ProxyHandler({ 'https': 'https://%s:%s@%s' % (self.proxy_user, self.proxy_password, self.proxy_url) }) auth = HTTPBasicAuthHandler() opener = build_opener(proxy, auth, HTTPHandler) else: handler = ProxyHandler({'https': self.proxy_url}) opener = build_opener(handler) else: opener = build_opener( HTTPSHandler(debuglevel=self.debuglevel)) resp = opener.open(req, timeout=5) charset = resp.info().get('charset', 'utf-8') break except (HTTPError, URLError, SSLError, socket.timeout) as e: import sys last_except = e print('# %d Timed out or other error for %s: %s\n' % (time.time(), type, str(e)), file=sys.stderr) if self.debug: print('# %d Timed out or other error for %s: %s\n' % (time.time(), type, str(e))) count += 1 if count != self.tries: time.sleep(count * self.retry_delay) else: print("# %d %d exceptions: %s" % (time.time(), self.tries, str(last_except))) raise last_except return json.loads(resp.read().decode(charset))
def __init__(self, server_ip, auth=None, cert=None, output_mode='json', version=None): self.headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json' } # TODO: check version functionality if version is not None: self.headers['Version'] = version if auth is None: raise Exception('No valid credentials found in configuration.') self.server_ip = server_ip self.base_uri = server_ip self.output_mode = output_mode self.auth = auth # Create a secure SSLContext # PROTOCOL_SSLv23 is misleading. PROTOCOL_SSLv23 will use the highest # version of SSL or TLS that both the client and server supports. context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # Disable ssl check context.verify_mode = ssl.CERT_NONE context.check_hostname = False check_hostname = False if cert is not None: cert_file_name = "cert.pem" certfile = open("cert.pem", "w+") certfile.write(cert) certfile.close() with open(cert_file_name, 'w+') as f: try: f.write(cert) f.close() except IOError: print('Failed to setup certificate') try: context.load_default_certs() context.load_cert_chain(certfile="cert.pem") except: print('Failed to load certificate') os.remove("cert.pem") install_opener( build_opener( HTTPSHandler(context=context, check_hostname=check_hostname))) self.set_auth_token() # set authorization token in header
def load_from_uri(uri, timeout=3, method='GET'): request = Request(uri, method=method) https_sslv3_handler = HTTPSHandler(context=ssl.SSLContext()) opener = build_opener(https_sslv3_handler) install_opener(opener) resource = opener.open(request, timeout=timeout) content = _read_python3x(resource) return content
def instanciate_template(tpl_id): urlopener = build_opener(HTTPHandler(), HTTPSHandler()) url = configuration.get( "DownloadSite", "base_url") + "/instanciate_template?tpl_id={}".format(tpl_id) op = urlopener.open(url) doc_id = int(op.read().decode()) op.close() return doc_id
def get(url, fileobj, progress_hook=None): """ Perform a GET request against a URL writing the contents into the provideded file like object. :param url: The URL to fetch :param fileobj: The fileobj to write the content to :param progress_hook: The function to call with progress updates :returns: Returns a tuple containing the number of bytes read and the result of the info() function from urllib2.urlopen(). :raises: Exceptions from urllib2.urlopen() and writing to the provided fileobj may occur. """ user_agent = build_user_agent() try: # Wrap in a try as Python versions prior to 2.7.9 don't have # create_default_context, but some distros have backported it. ssl_context = ssl.create_default_context() if config.get("no-check-certificate"): logger.debug("Disabling SSL/TLS certificate verification.") ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE opener = build_opener(HTTPSHandler(context=ssl_context)) except: opener = build_opener() if user_agent: logger.debug("Setting HTTP User-Agent to %s", user_agent) opener.addheaders = [ ("User-Agent", user_agent), ] else: opener.addheaders = [(header, value) for header, value in opener.addheaders if header.lower() != "user-agent"] remote = opener.open(url) info = remote.info() try: content_length = int(info["content-length"]) except: content_length = 0 bytes_read = 0 while True: buf = remote.read(GET_BLOCK_SIZE) if not buf: # EOF break bytes_read += len(buf) fileobj.write(buf) if progress_hook: progress_hook(content_length, bytes_read) remote.close() fileobj.flush() return bytes_read, info
def __init__(self, elem=None, parent=None, method=lambda: None): self.elem = elem self.parent = parent self.method = method # initialize opener only once if EndPoint.opener is None: EndPoint.opener = OpenerDirector() EndPoint.opener.add_handler(HTTPHandler()) EndPoint.opener.add_handler(HTTPSHandler(context=CTX))
def remove_documents(doc_ids): mainlog.debug("Deleting document {} from server".format(str(doc_ids))) urlopener = build_opener(HTTPHandler(), HTTPSHandler()) for doc_id in doc_ids: mainlog.debug("Deleting document {} from server".format(doc_id)) url = configuration.get( "DownloadSite", "base_url") + "/remove_file?file_id={}".format(doc_id) urlopener.open(url)
def __init__(self, **kwargs): """Constructor. Any keyword args are passed to the httpclient handler.""" HTTPSHandler.__init__(self) self._connection_args = kwargs
def __init__(self, key, cert): HTTPSHandler.__init__(self) self.key = key self.cert = cert