def run(self): global pq, j while 1: for i in range(2): try: # proxy = pq.getProxy() # print 'get proxy '+proxy # s = Loginer.Session(proxy) s = Loginer.Session() # s.open('http://play.baidu.com') res = Loginer.post(s, baiduSongLink[i]) # print '########################' con = eval(res.read()) link = con['data']['songList'][0]['linkinfo']['128'][ 'songLink'].replace('\\', '') # print 'opening link: ' + link try: res = s.open(link, time_out=5) except: # print 'open play.baidu.com' res = s.open('http://play.baidu.com', time_out=5) # print 'return code ' +res.getcode() # print 'sleeping' time.sleep(90) res = Loginer.post(s, baiduStage[i]) print 'posted ' + str(i) + ' | ' + res.read() except: j += 1 if j % 100 == 0: print 'error count: ' + str(j)
def run(self): global proxies global th global pollnum global count global lock global pcount, f, headers, index, pollUrl c = 0 while 1: i = 1 errc = 0 try: proxy = proxies.getProxy() # print 'thread using proxy: ' + proxy session = Session(proxy) # session = Session() except: # print 'read proxy or session error' # exstr = traceback.format_exc() # print exstr continue while 1: try: # if i%(pollnum+1)==0: # print >> f, proxy # pcount+=1 # if pcount%50==0: # f.flush() # break # session = Session() # res = session.open(pollUrl2+str(int(time.time()*1e3)), time_out=10) req = HttpRequest() req.header = headers req.url = index res = Loginer.get(session, req) for i in range(pollnum): req.url = pollUrl + str(4 + i) res = Loginer.get(session, req) con = res.read() print con # c+=1 # i+=1 # print con errc = 0 break # if c%30==0: # lock.acquire() # count += c # c=0 # print '==========voted ' + str(count) + ' times!' # lock.release() except: errc += 1 # print 'error! count: '+ str(errc) if errc > th: exstr = traceback.format_exc() # print exstr break
def run(self): global g_header, th, g_post, g_url,proxies, f, arr while 1: i=0 errc=0 try: proxy = proxies.getProxy() session = Session(proxy) # session = Session() except: print 'read proxy or session error' exstr = traceback.format_exc() print exstr while 1: try: i+=1 res = session.open('http://www.xingdiandian.com/', time_out=8) con=res.read() csrf = con[con.find('csrf-token')-52:con.find('csrf-token')-8] # print con post = Loginer.HttpRequest() post.header = dict(g_header) post.header['X-CSRF-Token'] = csrf post.url = g_url for startid in arr: post.post = g_post+str(startid) wait=1 while wait<10 and wait!=0: if wait>1: print 'sleeping (' +str(wait) + ')+1 secs to post again---' time.sleep(wait) res = Loginer.post(session, post) con=res.read() params = con.split(';') wait = int(params[4]) print str(startid)+'||'+con # print >>f, proxy # if i%5==0: # f.flush() session.opener.close() errc=0 break # if json.loads(con)['code']==200: # print con # if json.loads(con)['code']==500: # # print 'break and switch new proxy' # break except: errc+=1 # print 'error! count: '+ str(errc) if errc>th: exstr = traceback.format_exc() session.opener.close() # print exstr break
def run(self): global th while 1: i = 0 errc = 0 try: # proxy = proxies.getProxy() # session = Session(proxy) session = Session() except: print 'read proxy or session error' exstr = traceback.format_exc() print exstr while 1: try: i += 1 if (i % 30 == 0): # session = Session(proxy) session = Session() res = Loginer.post(session, netease) con = res.read() print con errc = 0 if json.loads(con)['code'] == 200: print con if json.loads(con)['code'] == 500: # print 'break and switch new proxy' break except: errc += 1 # print 'error! count: '+ str(errc) if errc > th: exstr = traceback.format_exc() # print exstr break
def proxy(request): url = urllib.unquote(request.GET.get('url')) print url request = Loginer.HttpRequest() request.url = url request.header = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,ja;q=0.2,zh-TW;q=0.2', 'Connection': 'keep-alive', 'Host': 'pan.baidu.com', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_90_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36' } s = Loginer.Session() res = Loginer.get(s, request) con = res.read() return HttpResponse(con)
def refresh(request): url = 'http://rs.ewang.com/fengyun/hou.php?bangid=0001&jiangid=0010&time=1426870745&userid=000023089&sig=16cc4ac58060ca1a34531d5058b4d488' s = Loginer.Session() res = s.open(url) data = eval(res.read()) time = datetime.datetime.now() for d in data['data']: item = models.PollStat.objects.create(count=int(d['piao']), name=d['name'], time=time) item.save() return HttpResponse('success')
def run(self): global g_header, th, g_post, g_url,proxies, f, arr while 1: i=0 errc=0 try: # proxy = proxies.getProxy() # session = Session(proxy) session = Session() except: print 'read proxy or session error' exstr = traceback.format_exc() print exstr while 1: try: req = Loginer.HttpRequest() req.header=g_header req.url=g_url for post in arr: req.post=post res = Loginer.post(session, req) print res.read() # print >>f, proxy # if i%5==0: # f.flush() session.opener.close() # if json.loads(con)['code']==200: # print con # if json.loads(con)['code']==500: # # print 'break and switch new proxy' # break except: errc+=1 # print 'error! count: '+ str(errc) if errc>th: exstr = traceback.format_exc() session.opener.close() # print exstr break
def proxy(request): url = urllib.unquote(request.GET.get('url')) print url request = Loginer.HttpRequest() request.url=url request.header={ 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Language':'en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,ja;q=0.2,zh-TW;q=0.2', 'Connection':'keep-alive', 'Host':'pan.baidu.com', 'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_90_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36' } s = Loginer.Session() res = Loginer.get(s, request) con = res.read() return HttpResponse(con)
if r.get('m_cost'): del r['m_cost'] if r.get('m_size_u'): del r['m_size_u'] if r.get('m_size_d'): del r['m_size_d'] r['content'] = content.decode('utf8').encode('utf8') keys = r.keys() res = '' keys.sort() for k in keys: res += k + '=' + urllib.unquote(r[k]) res += code r['sign'] = str(sign) return urllib.urlencode(r).replace('%25', '%') baidu1 = Loginer.HttpRequest() baidu1.url = 'http://c.tieba.baidu.com/c/c/post/add' baidu1.header = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', 'Host': 'c.tieba.baidu.com', 'sid': '6107c188d6407cb1', 'Connection': 'close', 'net':
import urllib, urllib2, json, base64, time, binascii import cookielib import re import hashlib from Loginer import Session import Loginer import threading, traceback import Queue from ProxyQueue import ProxyQueue netease = Loginer.HttpRequest() netease.post = urllib.urlencode({'id': 16, 'type': 'geshou'}) netease.url = 'http://newyear.music.163.com/web/activity/vote/toupiao' netease.header = { 'Accept': 'application/json, text/javascript, */*; q=0.01', 'Accept-Language': 'en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,ja;q=0.2,zh-TW;q=0.2', 'Connection': 'keep-alive', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Host': 'newyear.music.163.com', 'Origin': 'http://newyear.music.163.com', 'Referer': 'http://newyear.music.163.com/activity/vote/w/singer/2', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_90_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36', 'X-Requested-With': 'XMLHttpRequest' } proxies = ProxyQueue()
import urllib, urllib2, json, base64, rsa, time, binascii, random import cookielib import re, threading import hashlib import Loginer import math, traceback import ProxyQueue baiduSongLink4 = Loginer.HttpRequest() baiduSongLink4.url = 'http://play.baidu.com/data/music/songlink' baiduSongLink4.post = 'songIds=65720837&hq=1&type=m4a%2Cmp3&rate=&pt=0&flag=-1&s2p=-1&prerate=-1&bwt=-1&dur=-1&bat=-1&bp=-1&pos=-1&auto=-1' baiduSongLink1 = Loginer.HttpRequest() baiduSongLink1.url = 'http://play.baidu.com/data/music/songlink' baiduSongLink1.post = 'songIds=85800595&hq=1&type=m4a%2Cmp3&rate=&pt=0&flag=-1&s2p=-1&prerate=-1&bwt=-1&dur=-1&bat=-1&bp=-1&pos=-1&auto=-1' baiduSongLink2 = Loginer.HttpRequest() baiduSongLink2.url = 'http://play.baidu.com/data/music/songlink' baiduSongLink2.post = 'songIds=131445055&hq=1&type=m4a%2Cmp3&rate=&pt=0&flag=-1&s2p=-1&prerate=-1&bwt=-1&dur=-1&bat=-1&bp=-1&pos=-1&auto=-1' baiduSongLink3 = Loginer.HttpRequest() baiduSongLink3.url = 'http://play.baidu.com/data/music/songlink' baiduSongLink3.post = 'songIds=91009739&hq=1&type=m4a%2Cmp3&rate=&pt=0&flag=-1&s2p=-1&prerate=-1&bwt=-1&dur=-1&bat=-1&bp=-1&pos=-1&auto=-1' baiduSongLink = [] baiduSongLink.append(baiduSongLink1) baiduSongLink.append(baiduSongLink2) baiduSongLink.append(baiduSongLink3) baiduSongLink.append(baiduSongLink4)