def getcurrent_filename_and_hash(proxy_handl, ssl_cont, url): # dummy return values in case getting actual values fails returntuple = '', 0, '' https_handler = ur.HTTPSHandler(context=ssl_cont) # prepare wrapper for our https connection opener = ur.build_opener(proxy_handl, https_handler) ur.install_opener(opener) # open https connection try: resp = opener.open(url) except Exception as e: print(e) pass else: config = ConfigParser(interpolation=None) # read ini from https connection try: config.read_string(''.join(resp.read().decode('utf-8'))) except ParsingError: pass # return values for options FileName and MD5 from section AVV-ZIP (options are not casesensitive) if config.has_option('AVV-ZIP', 'filename') and config.has_option( 'AVV-ZIP', 'md5') and config.has_option('AVV-ZIP', 'filepath'): returntuple = config.get('AVV-ZIP', 'filename'), config.get( 'AVV-ZIP', 'md5'), config.get('AVV-ZIP', 'filepath') return returntuple
def _init_https_handler(self): if not self.verify: context = self.create_unverified_context() else: context = self.create_verified_context() return request.HTTPSHandler(context=context)
def loadPage(self): url = self.dSites["xicidaili"] http_handler = ulb.HTTPSHandler() opener = ulb.build_opener(http_handler) request = ulb.Request(url,headers=self.headers) response = opener.open(request) return response
def check_proxy(q): """ check proxy for and append to working proxies :param q: """ if not q.empty(): proxy = q.get(False) proxy = proxy.replace("\r", "").replace("\n", "") try: opener = rq.build_opener( rq.ProxyHandler({'https': 'https://' + proxy}), rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) req = rq.Request('https://api.ipify.org/') if rq.urlopen(req).read().decode() == proxy.partition(':')[0]: proxys_working_list.update({proxy: proxy}) if _verbose: print(c + "[+]" + w + " Successfully connected with " + proxy) else: if _verbose: print(r + "[!]" + w + " Failed to connect with " + proxy) except Exception as err: if _verbose: print(r + "[!]" + w + " Failed to connect with " + proxy) if _debug: logger.error(err) pass
def login(self): handlers = [] url = "%s://%s/login" % (self.scheme, self.endpoint) handlers.append(request.HTTPHandler(debuglevel=self.debug)) handlers.append(request.HTTPCookieProcessor(self.cookie_jar)) data = {"username": self.username, "password": self.password} if self.scheme == "https": if self.insecure: context = ssl._create_unverified_context() else: context = ssl.create_default_context() handlers.append( request.HTTPSHandler(debuglevel=self.debug, context=context)) opener = request.build_opener(*handlers) for k, v in self.cookies.items(): opener.append = (k, v) req = request.Request(url, data=urlencoder.urlencode(data).encode()) opener.open(req) for cookie in self.cookie_jar: if cookie.name == "authtok": self.authtok = cookie.value self.authenticated = True return self.authenticated
def _make_connection(self, url, method="GET", postData=None, send_headers_dict=None, timeout=None): conn_object = http_code = received_headers = None req_object = request.Request(url, data=postData, headers=send_headers_dict, method=method) opener_args = [request.ProxyHandler(self.proxies)] if self.ssl_unverified: opener_args.append( request.HTTPSHandler(context=ssl._create_unverified_context())) try: conn_object = request.build_opener(*opener_args).open( req_object, timeout=(timeout if type(timeout) == int else socket._GLOBAL_DEFAULT_TIMEOUT)) http_code, received_headers = conn_object.status, conn_object.getheaders( ) except Exception as exception_object: print(f" Connection error! {exception_object} ( {url} )", file=sys.stderr) try: yield (conn_object, http_code, received_headers) except Exception as exception_object: raise ParserError( f"Parser error! {exception_object} ( {self.url} )") finally: if conn_object: conn_object.close()
def request(url, post=None, headers={}, redirect=True, timeout=30): handlers = [] if (2, 7, 8) < sys.version_info < (2, 7, 12): try: import ssl ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) except: pass headers.update({ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:75.0) Gecko/20100101 Firefox/75.0', 'Referer': 'https://www.rtlmost.hu/', 'x-customer-name': 'rtlhu', 'Accept-Language': 'hu-HU,hu;q=0.8,en-US;q=0.5,en;q=0.3' }) if isinstance(post, dict): post = urlencode(post) if redirect == False: class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = addinfourl(fp, headers, req.get_full_url()) #infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del headers['Referer'] except: pass request = urllib2.Request(url, data=post, headers=headers) response = urllib2.urlopen(request, timeout=timeout) if redirect == False: result = response.headers.get('location') else: result = response.read(5242880) response.close() if (sys.version_info[0] == 3 and not isinstance(result, str)): return result.decode('utf-8') else: return result
def check_up(url, ssl_ca=None, ssl_client_cert=None, ssl_client_key=None): """Check that a url responds with a non-error code For use in exec_python_in_pod, which means imports need to be in the function Uses stdlib only because requests isn't always available in the target pod """ from urllib import request import ssl if ssl_ca: context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ssl_ca) if ssl_client_cert: context.load_cert_chain(certfile=ssl_client_cert, keyfile=ssl_client_key) else: context = None # disable redirects (this would be easier if we ran exec in an image with requests) class NoRedirect(request.HTTPRedirectHandler): def redirect_request(self, req, fp, code, msg, headers, newurl): return None opener = request.build_opener(NoRedirect, request.HTTPSHandler(context=context)) try: u = opener.open(url) except request.HTTPError as e: if e.status >= 400: raise u = e print(u.status)
def setup(self): self.configuration = config.load() self.colpath = tempfile.mkdtemp() self.shutdown_socket, shutdown_socket_out = socket.socketpair() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: # Find available port sock.bind(("127.0.0.1", 0)) self.sockname = sock.getsockname() self.configuration.update( { "storage": { "filesystem_folder": self.colpath, # Disable syncing to disk for better performance "_filesystem_fsync": "False" }, "server": { "hosts": "[%s]:%d" % self.sockname }, # Enable debugging for new processes "logging": { "level": "debug" } }, "test", privileged=True) self.thread = threading.Thread(target=server.serve, args=(self.configuration, shutdown_socket_out)) ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE self.opener = request.build_opener( request.HTTPSHandler(context=ssl_context), DisabledRedirectHandler)
def makeRequest(cam): starttime = datetime.utcnow() timestamp = starttime.strftime("%Y%m%d%H%M%S") # for improper ssl certificates, try this to ignore CERTs context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE proxy = req.ProxyHandler({}) opener = req.build_opener(proxy, req.HTTPSHandler(context=context)) req.install_opener(opener) fn = cachepath + cam + "/{}_{}.jpg".format(cam, timestamp) fn_latest = latest + cam + '_latest.jpg' if SAFE_MODE: print("Would retrieve {} to {}".format(urls[cam] + url_suffix, fn)) print("Would copy {} to {}".format(fn, fn_latest)) else: req.urlretrieve(urls[cam] + url_suffix, fn) chmod(fn, 0o755) # set the permission system("cp {} {}".format(fn, fn_latest)) chmod(fn_latest, 0o755) # set the permission
def videomega_download(url, info_only=False, **kwargs): # Hot-plug cookie handler ssl_context = request.HTTPSHandler( context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)) cookie_handler = request.HTTPCookieProcessor() opener = request.build_opener(ssl_context, cookie_handler) opener.addheaders = [('Referer', url), ('Cookie', 'noadvtday=0')] request.install_opener(opener) if re.search(r'view\.php', url): php_url = url else: content = get_content(url) m = re.search(r'ref="([^"]*)";\s*width="([^"]*)";\s*height="([^"]*)"', content) ref = m.group(1) width, height = m.group(2), m.group(3) php_url = ( 'http://videomega.tv/view.php?ref={}&width={}&height={}'.format( ref, width, height)) content = get_content(php_url) title = match1(content, r'<title>(.*)</title>') js = match1(content, r'(eval.*)') t = match1(js, r'\$\("\w+"\)\.\w+\("\w+","([^"]+)"\)') t = re.sub(r'(\w)', r'{\1}', t) t = t.translate({87 + i: str(i) for i in range(10, 36)}) s = match1(js, r"'([^']+)'\.split").split('|') src = t.format(*s) _type, ext, size = url_info(src, faker=True) print_info(site_info, title, _type, size) if not info_only: download_urls([src], title, ext, size, **kwargs)
def fetch(self, server): ''' This function gets your IP from a specific server. ''' url = None cj = cjar.CookieJar() ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE opener = urllib.build_opener(urllib.HTTPCookieProcessor(cj), urllib.HTTPSHandler(context=ctx)) opener.addheaders = [('User-agent', "Mozilla/5.0 (X11; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0"), ('Accept', "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ('Accept-Language', "en-US,en;q=0.5")] try: url = opener.open(server, timeout=4) content = url.read() # Didn't want to import chardet. Prefered to stick to stdlib if PY3K: try: content = content.decode('UTF-8') except UnicodeDecodeError: content = content.decode('ISO-8859-1') m = re.search( '(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)', content) myip = m.group(0) return myip if len(myip) > 0 else '' except Exception: return '' finally: if url: url.close()
def get_csrf(): """ get CSRF token from login page to use in POST requests """ global csrf_token print(bcolors.WARNING + "[+] Getting CSRF Token: " + bcolors.ENDC) try: opener = rq.build_opener(rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) request = rq.Request('https://www.instagram.com/') try: # python 2 headers = rq.urlopen(request).info().headers except Exception: # python 3 headers = rq.urlopen(request).info().get_all('Set-Cookie') for header in headers: if header.find('csrftoken') != -1: csrf_token = header.partition(';')[0].partition('=')[2] print(bcolors.OKGREEN + "[+] CSRF Token :", csrf_token, "\n" + bcolors.ENDC) except Exception as err: print(bcolors.FAIL + "[!] Can't get CSRF token , please use -d for debug" + bcolors.ENDC) if _debug: logger.error(err) print(bcolors.FAIL + "[!] Exiting..." + bcolors.ENDC) exit(3)
def get_csrf(): """ get CSRF token from login page to use in POST requests """ global csrf_token print(y+"[+]"+w+" Trying to get CSRF token ...") try: opener = rq.build_opener(rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) request = rq.Request('https://www.instagram.com/') try: # python 2 headers = rq.urlopen(request).info().headers except Exception: # python 3 headers = rq.urlopen(request).info().get_all('Set-Cookie') for header in headers: if header.find('csrftoken') != -1: csrf_token = header.partition(';')[0].partition('=')[2] print(c+"[+]"+w+" CSRF Token : "+csrf_token) except Exception as err: print(r+"[!]"+w+" Oops, cant get CSRF token, please try again") if _debug: logger.error(err) print("[!]"" Exiting ...") exit(3)
def api_request_native(url, data=None, method=None): request = urllib.Request(url) # print('API request url:', request.get_full_url()) if method: request.get_method = lambda: method request.add_header('Authorization', 'token ' + token_auth_string()) request.add_header('Accept', 'application/json') request.add_header('Content-Type', 'application/json') if data is not None: if PY3: request.add_data(bytes(data, 'utf8')) else: request.add_data(data) # print('API request data:', request.get_data()) # print('API request header:', request.header_items()) if settings.get('https_proxy'): opener = urllib.build_opener( urllib.HTTPHandler(), urllib.HTTPSHandler(), urllib.ProxyHandler({'https': settings.get('https_proxy')})) urllib.install_opener(opener) try: with contextlib.closing(urllib.urlopen(request)) as response: if response.code == 204: # No Content return None else: return json.loads(response.read().decode('utf8')) except urllib.HTTPError as err: with contextlib.closing(err): raise SimpleHTTPError(err.code, err.read())
def check_proxy(q): """ check proxy for and append to working proxies :param q: """ if not q.empty(): proxy = q.get(False) proxy = proxy.replace("\r", "").replace("\n", "") try: opener = rq.build_opener( rq.ProxyHandler({'https': 'https://' + proxy}), rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) req = rq.Request('https://api.ipify.org/') if rq.urlopen(req).read().decode() == proxy.partition(':')[0]: proxys_working_list.update({proxy: proxy}) if _verbose: print(bcolors.OKGREEN + " --[+] ", proxy, " | PASS" + bcolors.ENDC) else: if _verbose: print(" --[!] ", proxy, " | FAILED") except Exception as err: if _verbose: print(" --[!] ", proxy, " | FAILED") if _debug: logger.error(err) pass
def nicovideo_download(url, info_only=False, **kwargs): import ssl ssl_context = request.HTTPSHandler( context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)) cookie_handler = request.HTTPCookieProcessor() opener = request.build_opener(ssl_context, cookie_handler) request.install_opener(opener) import netrc import getpass try: info = netrc.netrc().authenticators('nicovideo') except Exception: info = None if info is None: user = input('User: '******'Password: '******'Logging in...') nicovideo_login(user, password) html = get_content(url) # necessary! title = match1(html, r'<title>(.+?)</title>') vid = url.split('/')[-1].split('?')[0] api_html = get_content( 'http://flapi.nicovideo.jp/api/getflv?v={}'.format(vid)) real_url = parse.unquote(match1(api_html, r'url=([^&]+)&')) _type, ext, size = url_info(real_url) print_info(site_info, title, _type, size) if not info_only: download_urls([real_url], title, ext, size, **kwargs)
def urlopen(url): """Opens an url with urllib2""" timeout = 5 # Proxy an ssl configuration pref = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Addons") if pref.GetBool("NoProxyCheck", True): proxies = {} else: if pref.GetBool("SystemProxyCheck", False): proxy = urllib2.getproxies() proxies = {"http": proxy.get('http'), "https": proxy.get('http')} elif pref.GetBool("UserProxyCheck", False): proxy = pref.GetString("ProxyUrl", "") proxies = {"http": proxy, "https": proxy} if ssl_ctx: handler = urllib2.HTTPSHandler(context=ssl_ctx) else: handler = {} proxy_support = urllib2.ProxyHandler(proxies) opener = urllib2.build_opener(proxy_support, handler) urllib2.install_opener(opener) # Url opening req = urllib2.Request(url, headers={'User-Agent': "Magic Browser"}) try: u = urllib2.urlopen(req, timeout=timeout) except Exception: return None else: return u
def api_request(url, data=None, token=None, https_proxy=None, method=None): settings = sublime.load_settings('Gist.sublime-settings') request = urllib.Request(url) if method: request.get_method = lambda: method token = token if token != None else token_auth_string() request.add_header('Authorization', 'token ' + token) request.add_header('Accept', 'application/json') request.add_header('Content-Type', 'application/json') if data is not None: request.add_data(bytes(data.encode('utf8'))) https_proxy = https_proxy if https_proxy != None else settings.get( 'https_proxy') if https_proxy: opener = urllib.build_opener( urllib.HTTPHandler(), urllib.HTTPSHandler(), urllib.ProxyHandler({'https': https_proxy})) urllib.install_opener(opener) try: with contextlib.closing(urllib.urlopen(request)) as response: if response.code == 204: # no content return None return json.loads(response.read().decode('utf8', 'ignore')) except urllib.HTTPError as err: with contextlib.closing(err): raise SimpleHTTPError('{}: {}'.format(err.code, err.read()))
def opener_creat(): # 创建cookie cookie = cookiejar.CookieJar() cookie_handler = ur.HTTPCookieProcessor(cookie) http_handler = ur.HTTPHandler() https_handler = ur.HTTPSHandler() opener = ur.build_opener(cookie_handler, http_handler, https_handler) return opener
def init_no_ssl(cls): from ssl import create_default_context from _ssl import CERT_NONE no_check_ssl = create_default_context() no_check_ssl.check_hostname = False no_check_ssl.verify_mode = CERT_NONE EtherPadEditor.OPENER = request.build_opener( request.HTTPSHandler(context=no_check_ssl))
def open_url(url, proxy, resp_code, sslContext): if isinstance(url, request.Request): logging.info('opener: opening "{0}"'.format(url.full_url)) else: logging.info('opener: opening "{0}"'.format(url)) opener = request.OpenerDirector() if proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars opener.add_handler( request.ProxyHandler({ 'http': proxy, 'https': proxy })) else: opener.add_handler(request.ProxyHandler({})) opener.add_handler(request.HTTPHandler()) opener.add_handler(PackratHandler()) if hasattr(http.client, 'HTTPSConnection'): opener.add_handler(request.HTTPSHandler()) # context=sslContext opener.add_handler(PackratsHandler()) # context=sslContext opener.add_handler(request.FileHandler()) opener.add_handler(request.FTPHandler()) opener.add_handler(request.UnknownHandler()) try: resp = opener.open(url, timeout=WEB_HANDLE_TIMEOUT) except request.HTTPError as e: raise FileRetrieveException('HTTPError "{0}"'.format(e)) except request.URLError as e: if isinstance(e.reason, socket.timeout): raise FileRetrieveException( 'Request Timeout after {0} seconds'.format(WEB_HANDLE_TIMEOUT)) raise FileRetrieveException( 'URLError "{0}" for "{1}" via "{2}"'.format( e, url.full_url, proxy)) except socket.timeout: raise FileRetrieveException( 'Request Timeout after {0} seconds'.format(WEB_HANDLE_TIMEOUT)) except socket.error as e: raise FileRetrieveException('Socket Error "{0}"'.format(e)) if resp.code is not None: # FileHandler, FTPHandler do not have a response code if resp.code == 404: raise FileRetrieveException('File "{0}" not Found'.format(url)) if resp.code != resp_code: raise FileRetrieveException('Invalid Response code "{0}"'.format( resp.code)) return resp
def __init__(self, host, root_path, proxy=None, verify_ssl=True): super().__init__() if not host.startswith(('http:', 'https:')): raise ValueError('hostname must start with http(s):') if host[-1] == '/': raise ValueError('hostname must not end with "/"') self.proxy = proxy self.host = host logging.debug( 'cinp: new client host: "{0}", root_path: "{1}", via: "{2}"'. format(self.host, root_path, self.proxy)) self.uri = URI(root_path) self.opener = request.OpenerDirector() if self.proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars self.opener.add_handler( request.ProxyHandler({ 'http': self.proxy, 'https': self.proxy })) else: self.opener.add_handler(request.ProxyHandler({})) self.opener.add_handler(request.HTTPHandler()) if hasattr(http.client, 'HTTPSConnection'): if not verify_ssl: self.opener.add_handler( request.HTTPSHandler( context=ssl._create_unverified_context())) else: self.opener.add_handler(request.HTTPSHandler()) self.opener.add_handler(request.UnknownHandler()) self.opener.addheaders = [ ('User-Agent', 'python CInP client {0}'.format(__CLIENT_VERSION__)), ('Accepts', 'application/json'), ('Accept-Charset', 'utf-8'), ('CInP-Version', __CINP_VERSION__) ]
def init(): if is_init: return httphd = request.HTTPHandler(debuglevel=1) httpshd = request.HTTPSHandler(debuglevel=1) cookie = request.HTTPCookieProcessor(http.cookiejar.CookieJar()) opener = request.build_opener(httphd, httpshd, cookie) request.install_opener(opener)
def __init__(self): self.cookie_value = "" self.log = getLogger(__name__) self.context = ssl._create_unverified_context() self.cookie = cookiejar.MozillaCookieJar() self.ssl_handler = request.HTTPSHandler(context = self.context) self.cookie_handler = request.HTTPCookieProcessor(self.cookie) self.handlers = [self.ssl_handler, self.cookie_handler] self.opener = request.build_opener(*self.handlers)
def resetProxies(self, httpProxyTuple): # for ntlm user and password are required self.hasNTLM = False self._httpProxyTuple = httpProxyTuple # save for resetting in noCertificateCheck setter if isinstance(httpProxyTuple, (tuple, list)) and len(httpProxyTuple) == 5: useOsProxy, _urlAddr, _urlPort, user, password = httpProxyTuple _proxyDirFmt = proxyDirFmt(httpProxyTuple) # only try ntlm if user and password are provided because passman is needed if user and not useOsProxy: for pluginXbrlMethod in pluginClassMethods( "Proxy.HTTPAuthenticate"): pluginXbrlMethod(self.cntlr) for pluginXbrlMethod in pluginClassMethods( "Proxy.HTTPNtlmAuthHandler"): HTTPNtlmAuthHandler = pluginXbrlMethod() if HTTPNtlmAuthHandler is not None: self.hasNTLM = True if not self.hasNTLM: # try for python site-packages ntlm try: from ntlm import HTTPNtlmAuthHandler self.hasNTLM = True except ImportError: pass if self.hasNTLM: pwrdmgr = proxyhandlers.HTTPPasswordMgrWithDefaultRealm() pwrdmgr.add_password(None, _proxyDirFmt["http"], user, password) self.proxy_handler = proxyhandlers.ProxyHandler({}) self.proxy_auth_handler = proxyhandlers.ProxyBasicAuthHandler( pwrdmgr) self.http_auth_handler = proxyhandlers.HTTPBasicAuthHandler( pwrdmgr) self.ntlm_auth_handler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler( pwrdmgr) proxyHandlers = [ self.proxy_handler, self.ntlm_auth_handler, self.proxy_auth_handler, self.http_auth_handler ] if not self.hasNTLM: self.proxy_handler = proxyhandlers.ProxyHandler( proxyDirFmt(httpProxyTuple)) self.proxy_auth_handler = proxyhandlers.ProxyBasicAuthHandler() self.http_auth_handler = proxyhandlers.HTTPBasicAuthHandler() proxyHandlers = [ self.proxy_handler, self.proxy_auth_handler, self.http_auth_handler ] if ssl and self.noCertificateCheck: # this is required in some Akamai environments, such as sec.gov context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE proxyHandlers.append(proxyhandlers.HTTPSHandler(context=context)) self.opener = proxyhandlers.build_opener(*proxyHandlers) self.opener.addheaders = [('User-Agent', self.httpUserAgent), ('Accept-Encoding', 'gzip, deflate')]
def send_request(req, timeout=5, context=None): """ :param req: request请求对象 :param timeout: 设置请求超时时间 :param context: 忽略ssl安全证书 :return: """ if context: handler = request.HTTPSHandler(context=context) opener = request.build_opener(handler) return opener.open(req, timeout=timeout) else: # debuglevel默认为0,默认为1的时候表示 handler = request.HTTPSHandler(debuglevel=1) opener = request.build_opener(handler) return opener.open(req, timeout=timeout)
def __init__(self): self.dSites = {"kuaidaili": "https://www.kuaidaili.com/free/inha/", "xicidaili": "https://www.xicidaili.com/", "89ip": "http://www.89ip.cn/"} self.headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36", "Connection": "keep-alive", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3", "Accept-Language": "zh-CN,zh;q=0.9"} http_handler = ulb.HTTPSHandler() opener = ulb.build_opener(http_handler)
def __init__(self): self.__cookie=cookiejar.CookieJar() #创建cookie self.__cookie_handler=request.HTTPCookieProcessor(self.__cookie) #创建cookie管理器 self.__http_handler=request.HTTPHandler() #创建http管理器 self.__https_handler=request.HTTPSHandler() #创建https管理器 self.__agent=request.build_opener(self.__cookie_handler,self.__http_handler,self.__https_handler) self.__login_res=None self.__url='http://zhjw.scu.edu.cn/loginAction.do' self.__xh=None self.__mm=None
def send_request(req,timeout=100,content=None): """ 自己定义一个方法发起请求 :param req: request请求对象 :param timeout: 设置请求的超时时间 :param content: 忽略ssl证书验证 :return: handler:创建handler处理器是为了实现特定功能 opener:为了使用opener.open方法发起请求 """ if content: handler=request.HTTPSHandler(context=content,debuglevel=1) opener=request.build_opener(handler) return opener.open(req,timeout=timeout) else: #debuglevel默认为0,设置为1的话表示开启debug模式, #会对请求进行跟踪,方便查看每一个请求的信息 handler=request.HTTPSHandler(debuglevel=1) opener=request.build_opener(handler) return opener.open(req,timeout=timeout)