def linkedin(): global opener cookie_filename = "cookies.txt" # Simulate browser with cookies enabled cj = cookielib.MozillaCookieJar(cookie_filename) if os.access(cookie_filename, os.F_OK): cj.load() # Load Proxy settings if len(config['proxylist']) > 0: proxy_handler = urllib2.ProxyHandler( {'https': config['proxylist'][0]}) opener = urllib2.build_opener( proxy_handler, urllib2.HTTPRedirectHandler(), urllib2.HTTPHandler(debuglevel=0), urllib2.HTTPSHandler(debuglevel=0), urllib2.HTTPCookieProcessor(cj) ) else: opener = urllib2.build_opener( urllib2.HTTPRedirectHandler(), urllib2.HTTPHandler(debuglevel=0), urllib2.HTTPSHandler(debuglevel=0), urllib2.HTTPCookieProcessor(cj) ) user_agent = config['cookie']['User-Agent'] opener.addheaders = [('User-Agent', user_agent)] # Get CSRF Token html = load_page("https://www.linkedin.com/") soup = BeautifulSoup(html, "html.parser") csrf = soup.find(id="loginCsrfParam-login")['value'] # Authenticate login_data = urllib.urlencode({ 'session_key': config['username'], 'session_password': config['password'], 'loginCsrfParam': csrf, }) html = load_page("https://www.linkedin.com/uas/login-submit", login_data) soup = BeautifulSoup(html, "html.parser") try: print(cj._cookies['.www.linkedin.com']['/']['li_at'].value) except Exception: print("error") cj.save() os.remove(cookie_filename)
def __get_csrf_cookie(self): """ Acquire CSRF cookie from the server :return: CSRF cookie """ cookie_handler = request.HTTPCookieProcessor() opener = request.build_opener(request.HTTPHandler(), cookie_handler) request.install_opener(opener) opener.open('{}'.format(self.__server_addr)) # attempt to get the csrf token from the cookie jar csrf_cookie = None for cookie in cookie_handler.cookiejar: if cookie.name == 'csrftoken': csrf_cookie = cookie break if not csrf_cookie: raise IOError('No csrf cookie found') self.__log_writer.log('csrf_cookie: {}'.format(csrf_cookie), level=LogLevel.DEBUG) return csrf_cookie
def urlrequest(stream, url, headers, write_lock, debug=0): """URL request function""" if debug: print("Input for urlrequest", url, headers, debug) req = UrlRequest('GET', url=url, headers=headers) if debug: hdlr = urllib2.HTTPHandler(debuglevel=1) opener = urllib2.build_opener(hdlr) else: opener = urllib2.build_opener() time0 = time.time() fdesc = opener.open(req) data = fdesc.read() ctime = time.time() - time0 fdesc.close() # just use elapsed time if we use html format if headers['Accept'] == 'text/html': response = {'ctime': str(ctime)} else: decoder = JSONDecoder() response = decoder.decode(data) if isinstance(response, dict): write_lock.acquire() stream.write(str(response) + '\n') stream.flush() write_lock.release()
def check_proxy(q): """ check proxy for and append to working proxies :param q: """ if not q.empty(): proxy = q.get(False) proxy = proxy.replace("\r", "").replace("\n", "") try: opener = rq.build_opener( rq.ProxyHandler({'https': 'https://' + proxy}), rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) req = rq.Request('https://api.ipify.org/') if rq.urlopen(req).read().decode() == proxy.partition(':')[0]: proxys_working_list.update({proxy: proxy}) if _verbose: print(c + "[+]" + w + " Successfully connected with " + proxy) else: if _verbose: print(r + "[!]" + w + " Failed to connect with " + proxy) except Exception as err: if _verbose: print(r + "[!]" + w + " Failed to connect with " + proxy) if _debug: logger.error(err) pass
def api_request(url, data=None, token=None, https_proxy=None, method=None): settings = sublime.load_settings('Gist.sublime-settings') request = urllib.Request(url) if method: request.get_method = lambda: method token = token if token != None else token_auth_string() request.add_header('Authorization', 'token ' + token) request.add_header('Accept', 'application/json') request.add_header('Content-Type', 'application/json') if data is not None: request.add_data(bytes(data.encode('utf8'))) https_proxy = https_proxy if https_proxy != None else settings.get( 'https_proxy') if https_proxy: opener = urllib.build_opener( urllib.HTTPHandler(), urllib.HTTPSHandler(), urllib.ProxyHandler({'https': https_proxy})) urllib.install_opener(opener) try: with contextlib.closing(urllib.urlopen(request)) as response: if response.code == 204: # no content return None return json.loads(response.read().decode('utf8', 'ignore')) except urllib.HTTPError as err: with contextlib.closing(err): raise SimpleHTTPError('{}: {}'.format(err.code, err.read()))
def __init__(self, cookieName): self.cookieName = cookieName httphd = request.HTTPHandler(debuglevel=1) httpshd = request.HTTPSHandler(debuglevel=1) self.ckojb = cookiejar.LWPCookieJar(self.cookieName) cookiehd = request.HTTPCookieProcessor(self.ckojb) self.opener = request.build_opener(httphd, httpshd, cookiehd)
def __init__(self, proto, host, user=None, passwd=None, token=None, debug=False, disable_certificate_verification=False): self.url = "%s://%s" % (proto, host) self.user = user self.passwd = passwd self.token = token # authentication token self.debug = debug context = None if disable_certificate_verification: import ssl if "_create_unverified_context" in ssl.__dict__.keys(): context = ssl._create_unverified_context() self.opener = urllib_request.OpenerDirector() self.opener.add_handler(urllib_request.HTTPHandler()) try: self.opener.add_handler( urllib_request.HTTPSHandler(context=context)) except TypeError: # Python < 2.7.9 self.opener.add_handler(urllib_request.HTTPSHandler()) Agent.defaultInst = self
def get_csrf(): """ get CSRF token from login page to use in POST requests """ global csrf_token print(bcolors.WARNING + "[+] Getting CSRF Token: " + bcolors.ENDC) try: opener = rq.build_opener(rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) request = rq.Request('https://www.instagram.com/') try: # python 2 headers = rq.urlopen(request).info().headers except Exception: # python 3 headers = rq.urlopen(request).info().get_all('Set-Cookie') for header in headers: if header.find('csrftoken') != -1: csrf_token = header.partition(';')[0].partition('=')[2] print(bcolors.OKGREEN + "[+] CSRF Token :", csrf_token, "\n" + bcolors.ENDC) except Exception as err: print(bcolors.FAIL + "[!] Can't get CSRF token , please use -d for debug" + bcolors.ENDC) if _debug: logger.error(err) print(bcolors.FAIL + "[!] Exiting..." + bcolors.ENDC) exit(3)
def get_csrf(): """ get CSRF token from login page to use in POST requests """ global csrf_token print(y+"[+]"+w+" Trying to get CSRF token ...") try: opener = rq.build_opener(rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) request = rq.Request('https://www.instagram.com/') try: # python 2 headers = rq.urlopen(request).info().headers except Exception: # python 3 headers = rq.urlopen(request).info().get_all('Set-Cookie') for header in headers: if header.find('csrftoken') != -1: csrf_token = header.partition(';')[0].partition('=')[2] print(c+"[+]"+w+" CSRF Token : "+csrf_token) except Exception as err: print(r+"[!]"+w+" Oops, cant get CSRF token, please try again") if _debug: logger.error(err) print("[!]"" Exiting ...") exit(3)
def getdata(url, headers, encoded_data, ckey, cert, capath, verbose=None, method='POST'): "helper function to use in predict/upload APIs, it place given URL call to the server" debug = 1 if verbose else 0 req = urllib2.Request(url=url, headers=headers, data=encoded_data) if method == 'DELETE': req.get_method = lambda: 'DELETE' if ckey and cert: ckey = fullpath(ckey) cert = fullpath(cert) http_hdlr = HTTPSClientAuthHandler(ckey, cert, capath, debug) elif cert and capath: cert = fullpath(cert) http_hdlr = HTTPSClientAuthHandler(ckey, cert, capath, debug) else: http_hdlr = urllib2.HTTPHandler(debuglevel=debug) proxy_handler = urllib2.ProxyHandler({}) cookie_jar = cookielib.CookieJar() cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar) data = {} try: opener = urllib2.build_opener(http_hdlr, proxy_handler, cookie_handler) fdesc = opener.open(req) if url.endswith('json'): data = json.load(fdesc) else: data = fdesc.read() fdesc.close() except urllib2.HTTPError as error: print(error.read()) sys.exit(1) if url.endswith('json'): return json.dumps(data) return data
def api_request_native(url, data=None, method=None): request = urllib.Request(url) # print('API request url:', request.get_full_url()) if method: request.get_method = lambda: method request.add_header('Authorization', 'token ' + token_auth_string()) request.add_header('Accept', 'application/json') request.add_header('Content-Type', 'application/json') if data is not None: if PY3: request.add_data(bytes(data, 'utf8')) else: request.add_data(data) # print('API request data:', request.get_data()) # print('API request header:', request.header_items()) if settings.get('https_proxy'): opener = urllib.build_opener( urllib.HTTPHandler(), urllib.HTTPSHandler(), urllib.ProxyHandler({'https': settings.get('https_proxy')})) urllib.install_opener(opener) try: with contextlib.closing(urllib.urlopen(request)) as response: if response.code == 204: # No Content return None else: return json.loads(response.read().decode('utf8')) except urllib.HTTPError as err: with contextlib.closing(err): raise SimpleHTTPError(err.code, err.read())
def login(self): handlers = [] url = "%s://%s/login" % (self.scheme, self.endpoint) handlers.append(request.HTTPHandler(debuglevel=self.debug)) handlers.append(request.HTTPCookieProcessor(self.cookie_jar)) data = {"username": self.username, "password": self.password} if self.scheme == "https": if self.insecure: context = ssl._create_unverified_context() else: context = ssl.create_default_context() handlers.append(request.HTTPSHandler(debuglevel=self.debug, context=context)) opener = request.build_opener(*handlers) for k, v in self.cookies.items(): opener.append = (k, v) req = request.Request(url, data=urlencoder.urlencode(data).encode()) opener.open(req) for cookie in self.cookie_jar: if cookie.name == "authtok": self.authtok = cookie.value self.authenticated = True return self.authenticated
def check_proxy(q): """ check proxy for and append to working proxies :param q: """ if not q.empty(): proxy = q.get(False) proxy = proxy.replace("\r", "").replace("\n", "") try: opener = rq.build_opener( rq.ProxyHandler({'https': 'https://' + proxy}), rq.HTTPHandler(), rq.HTTPSHandler()) opener.addheaders = [('User-agent', 'Mozilla/5.0')] rq.install_opener(opener) req = rq.Request('https://api.ipify.org/') if rq.urlopen(req).read().decode() == proxy.partition(':')[0]: proxys_working_list.update({proxy: proxy}) if _verbose: print(bcolors.OKGREEN + " --[+] ", proxy, " | PASS" + bcolors.ENDC) else: if _verbose: print(" --(!) ", proxy, " | FAILED") except Exception as err: if _verbose: print(" --(!) ", proxy, " | FAILED") if _debug: logger.error(err) pass
def _getFirstLoginInfo(self): ''' 获取登录的cookies信息:token, success, apptempid :return: ''' extractPattern = re.compile( '.+?token=(?P<token>.+?)&success=(?P<success>.+?)&apptempid=(?P<apptempid>.+?)$' ) req = request.Request(self.loginUrl) httpHandler = request.HTTPHandler() opener = request.build_opener(httpHandler) try: response = opener.open(req) except Exception as e: print(e) return status = response.getcode() if status == 200: self.loginUrl = response.url items = extractPattern.match(self.loginUrl.strip()) if items: return items.groupdict() else: print('[ERROR] 解析url失败: {}'.format(self.loginUrl)) return else: print('[ERROR] 请求失败: {}'.format(self.loginUrl)) return
def opener_creat(): # 创建cookie cookie = cookiejar.CookieJar() cookie_handler = ur.HTTPCookieProcessor(cookie) http_handler = ur.HTTPHandler() https_handler = ur.HTTPSHandler() opener = ur.build_opener(cookie_handler, http_handler, https_handler) return opener
def go_to_endpoint(context, endpoint): requestUrl = "{}/{}".format(context.apiUrl, parse.quote(endpoint)) opener = request.OpenerDirector() opener.add_handler(request.HTTPHandler()) response = opener.open(requestUrl) context.endpointResponse = response context.responseBodyString = context.endpointResponse.read().decode("utf8")
def handler_opener(url): # 系统的urlopen并没有添加代理的功能所以需要我们自定义这个功能 # 安全 套接层 ssl第三方的CA数字证书 # http80端口# 和https443 handler = request.HTTPHandler() opener = request.build_opener(handler) res = opener.open(url) print(res.read())
def open_url(url, proxy, resp_code, sslContext): if isinstance(url, request.Request): logging.info('opener: opening "{0}"'.format(url.full_url)) else: logging.info('opener: opening "{0}"'.format(url)) opener = request.OpenerDirector() if proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars opener.add_handler( request.ProxyHandler({ 'http': proxy, 'https': proxy })) else: opener.add_handler(request.ProxyHandler({})) opener.add_handler(request.HTTPHandler()) opener.add_handler(PackratHandler()) if hasattr(http.client, 'HTTPSConnection'): opener.add_handler(request.HTTPSHandler()) # context=sslContext opener.add_handler(PackratsHandler()) # context=sslContext opener.add_handler(request.FileHandler()) opener.add_handler(request.FTPHandler()) opener.add_handler(request.UnknownHandler()) try: resp = opener.open(url, timeout=WEB_HANDLE_TIMEOUT) except request.HTTPError as e: raise FileRetrieveException('HTTPError "{0}"'.format(e)) except request.URLError as e: if isinstance(e.reason, socket.timeout): raise FileRetrieveException( 'Request Timeout after {0} seconds'.format(WEB_HANDLE_TIMEOUT)) raise FileRetrieveException( 'URLError "{0}" for "{1}" via "{2}"'.format( e, url.full_url, proxy)) except socket.timeout: raise FileRetrieveException( 'Request Timeout after {0} seconds'.format(WEB_HANDLE_TIMEOUT)) except socket.error as e: raise FileRetrieveException('Socket Error "{0}"'.format(e)) if resp.code is not None: # FileHandler, FTPHandler do not have a response code if resp.code == 404: raise FileRetrieveException('File "{0}" not Found'.format(url)) if resp.code != resp_code: raise FileRetrieveException('Invalid Response code "{0}"'.format( resp.code)) return resp
def send(webhook, payload): # send payload to slack API url = webhook opener = urlrequest.build_opener(urlrequest.HTTPHandler()) payload_json = json.dumps(payload) data = urlencode({"payload": payload_json}) req = urlrequest.Request(url) response = opener.open(req, data.encode('utf-8')).read() return response.decode('utf-8')
def opener_demo(): # 构建一个HTTPHandler处理器对象,支持HTTP请求 http_handler = request.HTTPHandler(debuglevel=1) # 调用urllib.request的build_opener方法,创建支持http请求的opener对象 opener = request.build_opener(http_handler) # 构建request请求 req = request.Request("http://www.baidu.com") # 使用自定义opener对象的open方法,发送request请求 response = opener.open(req)
def init(): if is_init: return httphd = request.HTTPHandler(debuglevel=1) httpshd = request.HTTPSHandler(debuglevel=1) cookie = request.HTTPCookieProcessor(http.cookiejar.CookieJar()) opener = request.build_opener(httphd, httpshd, cookie) request.install_opener(opener)
def __init__(self): self.__cookie=cookiejar.CookieJar() #创建cookie self.__cookie_handler=request.HTTPCookieProcessor(self.__cookie) #创建cookie管理器 self.__http_handler=request.HTTPHandler() #创建http管理器 self.__https_handler=request.HTTPSHandler() #创建https管理器 self.__agent=request.build_opener(self.__cookie_handler,self.__http_handler,self.__https_handler) self.__login_res=None self.__url='http://zhjw.scu.edu.cn/loginAction.do' self.__xh=None self.__mm=None
def post(payload, slack_post_url): """ SlackにメッセージをPOSTする """ payload_json = json.dumps(payload) data = parse.urlencode({"payload": payload_json}) req = request.Request(slack_post_url) response = request.build_opener(request.HTTPHandler()).open( req, data.encode('utf-8')) return response.read().decode('utf-8')
def download_file(url, proxy, dest=None, verbose=False): """ Download and save a file specified by url to dest directory, """ proxies = {"http": "http://%s" % proxy} proxy_support = urllib2.ProxyHandler(proxies) opener = urllib2.build_opener(proxy_support, urllib2.HTTPHandler(debuglevel=1)) urllib2.install_opener(opener) req = urllib2.Request(url) req.add_unredirected_header('User-Agent', 'Mozilla/5.0') u = urllib2.urlopen(req) scheme, netloc, path, query, fragment = urlparse.urlsplit(url) filename = os.path.basename(path) if not filename: filename = 'downloaded.file' if dest: filename = os.path.join(dest, filename) with open(filename, 'wb') as f: meta = u.info() meta_func = meta.getheaders if hasattr(meta, 'getheaders') else meta.get_all meta_length = meta_func("Content-Length") file_size = None if meta_length: file_size = int(meta_length[0]) if verbose: print("... Downloading: {0} (Bytes: {1})".format(url, file_size)) file_size_dl = 0 block_sz = 8192 while True: buffer = u.read(block_sz) if not buffer: break file_size_dl += len(buffer) f.write(buffer) status = "{0:16}".format(file_size_dl) if file_size: status += " [{0:6.2f}%]".format(file_size_dl * 100 / file_size) status += chr(13) if verbose: print(status, end="") if verbose: print() return filename
def __init__(self, config, auth, handler=False, container=False): """ Create opener for a connection with authorization cookie. It's not possible to reuse the opener used to authenticate because there's no method in opener to remove auth data. config: instance of the Config class with configuration options auth: instance of the MicexAuth class with authentication info handler: user's handler class inherited from MicexISSDataHandler containet: user's container class """ if config.proxy_url: self.opener = urllib2.build_opener(urllib2.ProxyHandler({"http": config.proxy_url}), urllib2.HTTPCookieProcessor( auth.cookie_jar), urllib2.HTTPHandler(debuglevel=config.debug_level)) else: self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(auth.cookie_jar), urllib2.HTTPHandler(debuglevel=config.debug_level)) urllib2.install_opener(self.opener) if handler: self.handler = handler(container)
def send(self, message): payload = {'username': self.bot_name, 'text': message} payload_json = json.dumps(payload) data = urlencode({"payload": payload_json}) req = urlrequest.Request(self.hool_url) opener = urlrequest.build_opener(urlrequest.HTTPHandler()) with opener.open(req, data.encode('utf-8')) as res: self.logging(res.read().decode('utf-8'))
def __init__( self, host, root_path, proxy=None, verify_ssl=True, retry_event=None ): # retry_event should be an Event Object, use to cancel retry loops, if the event get's set the retry loop will throw the most recent Exception it ignored super().__init__() self.retry_event = retry_event if not host.startswith(('http:', 'https:')): raise ValueError('hostname must start with http(s):') if host[-1] == '/': raise ValueError('hostname must not end with "/"') self.proxy = proxy self.host = host logging.debug( 'cinp: new client host: "{0}", root_path: "{1}", via: "{2}"'. format(self.host, root_path, self.proxy)) self.uri = URI(root_path) self.opener = request.OpenerDirector() if self.proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars self.opener.add_handler( request.ProxyHandler({ 'http': self.proxy, 'https': self.proxy })) else: self.opener.add_handler(request.ProxyHandler({})) self.opener.add_handler(request.HTTPHandler()) if hasattr(http.client, 'HTTPSConnection'): if not verify_ssl: self.opener.add_handler( request.HTTPSHandler( context=ssl._create_unverified_context())) else: self.opener.add_handler(request.HTTPSHandler()) self.opener.add_handler(request.UnknownHandler()) self.opener.addheaders = [ ('User-Agent', 'python CInP client {0}'.format(__CLIENT_VERSION__)), ('Accepts', 'application/json'), ('Accept-Charset', 'utf-8'), ('CInP-Version', __CINP_VERSION__) ]
def iniOpener():#初始化启动器 cookie = cookiejar.CookieJar() # 根据创建的cookie生成cookie的管理器 cookie_handle = request.HTTPCookieProcessor(cookie) # 创建http请求管理器 http_handle = request.HTTPHandler() # 创建https管理器 https_handle = request.HTTPSHandler() # 创建求求管理器,将上面3个管理器作为参数属性 # 有了opener,就可以替代urlopen来获取请求了 opener = request.build_opener(cookie_handle,http_handle,https_handle) return opener
def request(self, path, method="GET", data=None): if self.username and not self.auth.authenticated: self.auth.login() handlers = [] url = "%s://%s%s" % (self.scheme, self.endpoint, path) handlers.append(request.HTTPHandler(debuglevel=self.debug)) handlers.append(request.HTTPCookieProcessor(self.auth.cookie_jar)) if self.scheme == "https": if self.insecure: context = ssl._create_unverified_context() else: context = ssl.create_default_context() handlers.append(request.HTTPSHandler(debuglevel=self.debug, context=context)) if data is not None: encoded_data = data.encode() else: encoded_data = None opener = request.build_opener(*handlers) for k, v in self.cookies.items(): opener.append = (k, v) headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } req = request.Request(url, data=encoded_data, headers=headers) req.get_method = lambda: method try: resp = opener.open(req) except request.HTTPError as e: self.auth.logout() raise BadRequest(e.read()) data = resp.read() # DEPRECATED: workaround for skydive < 0.17 # See PR #941 if method == "DELETE": return data content_type = resp.headers.get("Content-type").split(";")[0] if content_type == "application/json": return json.loads(data.decode()) return data
def htmlcookietxt(): cookie = cookiejar.MozillaCookieJar() cookie.load('cookie.txt', ignore_discard=True, ignore_expires=True) cookie_handler = request.HTTPCookieProcessor(cookie) http_handler = request.HTTPHandler() https_handler = request.HTTPSHandler() opener = request.build_opener(http_handler, https_handler, cookie_handler) urlprive2 = 'http://www.renren.com/290945236/profile' res = opener.open(urlprive2) htmlprive2 = res.read().decode() with open('rsp2.html', 'w') as f: f.write(htmlprive2)