def get_response(self, resource, query_string): """ GET a response from a resource. Arguments: resource: string resource to GET. query_string: string query-string. Returns: tuple (dict request() response, dict json_response content). """ response = request('GET', resource, query_string) is_404 = lambda: '404 Not Found' == response['http_status'] for i in range(50): if not is_404(): break time.sleep(1) response = request('GET', resource, query_string) if is_404(): self.fail('Response was 404 Not Found for GET ' + resource + \ '?' + query_string) return response, json.loads(response['content'].decode())
def bbs_task(name, teaching_task_id, course_id, forum_id, dis_cnt, qst_href): # 提问题 from random import choice hrefs = [choice(qst_href) for _ in range(dis_cnt)] qst = [extract_bbs(href) for href in hrefs] print('《{}》课程讨论问题:'.format(name)) for i in range(dis_cnt): post_url = '/student/bbs/manageDiscuss.do?{}&method=toAdd&teachingTaskId={}&forumId={}&' \ 'isModerator=false'.format(date(), teaching_task_id, forum_id) request(post_url) # 获取发布讨论的网页 title, content = qst[i][0], qst[i][1].replace('\n', '') data = { 'forumId': forum_id, 'teachingTaskId': teaching_task_id, 'isModerator': 'false', 'topic': title, 'content': content, } dispatch = '/student/bbs/manageDiscuss.do?{}&method=add'.format(date()) request(dispatch, data) # 这是一个302转发 # bbs_index = '/student/bbs/index.do?teachingTaskId={}&forumId={}&{}&'\ # .format(teaching_task_id, course_id, date()) # 直接添加就行 # request(bbs_index) # print(type(content), content) # if len(content.decode('utf8')) > 50: # content = content.decode('utf-8')[:30] + '...' + content.decode('utf-8')[-15:] + '(omitted!)' print('\t标题: {}\t内容: {}'.format(title, content)) return forum_id
def login(self): """fetch the main page(root directory) of tkkc website than set the cookie of JSESSIONID to login_header and captcha_header""" texts = request('').text login_cnt = 5 while login_cnt > 0: annonuce = re.findall( '<input type="hidden" name="(.*?)" value="announce"', texts, re.S)[0] login_url = re.findall('action="(.*?)"', texts, re.S)[0] captcha_code = request_captcha() data = { annonuce: 'announce', 'loginMethod': annonuce + 'button', 'logname': self.number, 'password': self.pwd, 'randomCode': captcha_code, } texts = request(login_url, data).text # return loginMsg cookie login_cnt -= 1 if texts.find('验证码错误') != -1: continue elif texts.find('密码不正确') != -1: self.collate('pwd') elif texts.find('服务器未建立连接') != -1: print('用户不存在或与身份验证服务器未建立连接!') exit() else: print("login success! Let's set sail!") break
def __LoadUserOrders__(self, cur, skchecked=False): # # __LoadUserOrders__ : download user all orders for a specific currency # # Orders are in the variable User.Book.Orders[$currency$] # self.Return=None if self.Trading: if not(skchecked): result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["useraccount"]) if result==0: return MCXNOW_ERROR['HTTP Error'] else: texthtml=result.text if self.__CheckSecretKey__(texthtml): skOk=True else: skOk=False else: skOk=True if skOk: if cur in MCXNOW_TRADEDCURRENCY: result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["info"]+"cur="+cur) if result==0: return MCXNOW_ERROR['HTTP Error'] else: textxml=result.text if ('<orders>')in textxml: orderslist=textxml.split('<orders>')[1].split('</orders>')[0] orders=orderslist.split('<o>') self.User.Book.ClearCurrencyAllOrders(cur) for i in range(1,len(orders)): order_id=int(orders[i].split('<id>')[1].split('</id>')[0]) order_confirm=int(orders[i].split('<e>')[1].split('</e>')[0]) order_time=int(orders[i].split('<t>')[1].split('</t>')[0])#time.strftime("%a, %d %b %Y %H:%M:%S",time.localtime(float(orders[i].split('<t>')[1].split('</t>')[0]))) order_type=int(orders[i].split('<b>')[1].split('</b>')[0]) order_amount=float(orders[i].split('<a1>')[1].split('</a1>')[0]) order_price=float(orders[i].split('<p>')[1].split('</p>')[0]) self.User.Book.AddOrder(cur, order_id, order_type,order_time, order_confirm, order_amount, order_price) else: return MCXNOW_ERROR['HTTP Error'] if ('<base_bal>')in textxml: base_bal=float(textxml.split('<base_bal>')[1].split('</base_bal>')[0]) self.User.Book.AddBaseBalance(cur, base_bal) else: return MCXNOW_ERROR['HTTP Error'] if ('<cur_bal>')in textxml: cur_bal=float(textxml.split('<cur_bal>')[1].split('</cur_bal>')[0]) self.User.Book.AddCurrencyBalance(cur, cur_bal) else: return MCXNOW_ERROR['HTTP Error'] self.Return=self.User.Book.Orders[cur] return MCXNOW_ERROR['Ok'] else: return MCXNOW_ERROR['Unknown Currency'] else: self.Trading=False return MCXNOW_ERROR['Session ended'] else: return MCXNOW_ERROR['Anonymous connexion']
def ready_steup(url): group = [] html = request(url) if html: soup = BeautifulSoup(html, 'lxml') for item in soup.find('div', class_='position').find_all('dl')[1].find_all( 'div')[1].find_all('a'): area = item.text second_url = head_url + item['href'] second_html = request(second_url) if second_html: second_soup = BeautifulSoup(second_html, 'lxml') total_room = second_soup.find( 'div', class_='resultDes').find('h2').find('span').text if int(total_room) <= 3000: goal_url = { 'area': area, 'url': second_url, } yield goal_url else: for a in second_soup.find('div', class_='position').find_all( 'dl')[1].find_all('div')[2].find_all('a'): goal_url = { 'area': area, 'url': head_url + a['href'], } yield goal_url
def download_css_files(self): text = open(self.file_path, 'rb+').read().decode('utf-8') soup = BeautifulSoup(text, "lxml") # find css files css_list = soup.find_all(attrs={"type": "text/css"}) for css in css_list: if 'svgtextcss' in css.get('href'): url = 'http:' + css.get('href') request.request(url, './css/', 'css') self.parse_css_file('./css/' + utils.get_file_name(url)) print("Css files have been downloaded and parsed.")
def test_content(self): self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Test that total results do not increase over 3 seconds. response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) len0 = len(json_response) time.sleep(3) response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) len1 = len(json_response) self.assertEqual(len0, len1)
def __init__(self, path): f = open(path) self.V, self.E, self.R, self.C, self.X = f.readline().split(' ') self.V, self.E, self.R, self.C, self.X = int(self.V), int(self.E), int( self.R), int(self.C), int(self.X) self.videos = [] self.vidreq = [(0, 0)] * self.V # (id vid, nbreq) self.endPoints = [] self.requests = [] self.caches = [] for i in range(self.C): self.caches.append(cache.cache(self.X)) vi = f.readline().split(' ') for i in range(len(vi)): self.videos.append(video.video(int(vi[i]), i, 0)) for i in range(self.E): Ld, K = f.readline().split(' ') Ld, K = int(Ld), int(K) e = endPoint.endPoint(Ld) for j in range(K): c, Lc = f.readline().split(' ') e.addCacheLat(self.caches[int(c)], int(Lc)) self.endPoints.append(e) for i in range(self.R): Rv, Re, Rn = f.readline().split(' ') self.requests.append(request.request(int(Rv), int(Re), int(Rn))) for r in self.requests: self.videos[r.idVid].req += r.nbCall Length = len(self.videos) k = 0 while k < Length: if self.videos[k].req == 0: self.videos.pop(k) Length -= 1 else: k += 1 self.videos = sorted(self.videos, key=lambda v: v.size / v.req) for e in self.endPoints: e.savingLat() for e in self.endPoints: for c in e.list: k = 0 while k < Length: if c[0].capacity >= c[0].size + self.videos[k].size: c[0].addVideo(self.videos.pop(k)) Length -= 1 else: k += 1 f.close()
def __init__(self, parent, main): QtGui.QWidget.__init__(self) mainLayout = QtGui.QVBoxLayout() mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(0) self.setLayout(mainLayout) self.list = QtGui.QTreeWidget(self) self.list.setHeaderLabels(['Name', 'Description', 'Quantity']) params = urllib.urlencode({'type_of_query': 'total_inventory'}) data = request.request(params) print data for i in range(len(data)): item = QtGui.QTreeWidgetItem( [data[i]['name'], data[i]['description'], data[i]['quantity']]) self.list.addTopLevelItem(item) fileBox = QtGui.QHBoxLayout() mainLayout.addLayout(fileBox, 0) mainLayout.addWidget(self.list, 200)
def convXYtoNode(logFile): benchmark = [] with open(logFile,"r") as bmread: for line in bmread: benchmarkLine = [] benchmarkItems = line.split() for i in benchmarkItems: benchmarkLine.append(int(float(i))) benchmark.append(benchmarkLine) newBenchmark = [] newConfiguration = [] ## Converts the XY coordinates ## To reconfigure the nodes, comment out these equations and uncomment the next section #(1 0) = 0; (1 1) = 1; (1 2) = 2; (1 3) = 3; (1 4) = 4; (1 5) = 5; (1 6) = 6; (1 7) = 7 #(2 7) = 8; (2 6) = 9; (2 5) = 10; (2 4) = 11; (2 3) = 12; (2 2) = 13; (2 1) = 14; (2 0) = 15 for row in benchmark: if (row[0] != 0) & (row[2] != 0): if (row[0] == 1) & (row[2] == 1): newRow = [(row[0] - 1) * 8 + row[1], (row[2] - 1) * 8 + row[3]] elif (row[0] == 2) & (row[2] == 2): newRow = [(row[0] - 1) * 8 + (7 - row[1]), (row[2] - 1) * 8 + (7 - row[3])] elif (row[0] == 1) & (row[2] == 2): newRow = [(row[0] - 1) * 8 + row[1], (row[2] - 1) * 8 + (7 - row[3])] elif (row[0] == 2) & (row[2] == 1): newRow = [(row[0] - 1) * 8 + (7 - row[1]), (row[2] - 1) * 8 + row[3]] newRow.extend(row[4:6]) newRow.append(row[6]*config.EccToOcc) newBenchmark.append(request(newRow)) requestsHolder = newBenchmark return newBenchmark
def search(x,y): global t global aux global m for i in range(0, 4): cx, cy = xoy[i] newx = x + cx newy = y + cy if (newx > 0) and (newx < 11) and (newy > 0) and (newy < 11) and ((newx, newy) not in lovit): print(newx,newy) lovit.append((newx, newy)) e=request.request(newx,newy) if e=="HIT": t+=1 m+=1 hit(newx, newy) search(newx, newy) elif e=="DESTROYED": t+=1 print('DESTROYED') hit(newx, newy) return
def SendBuyOrder(self, cur=None, amt=0, price=0, confirm=0): # # SendBuyOrder : send a buy order # Return 1 if success or 0 if error # # if return 0 then self.ErrorCode give you the errorcode ! # if confirm=0: # self.Return=[$id$,1,$time$,$amt$,$price$] # else : # self.Return=[None,1,$time$,$amt$,$price$] (because this order can be executed yet) # # if self.Trading: result = request(self.Session, 'get', MCXNOW_DOMAIN + MCXNOW_ACTION["useraccount"]) if result == 0: self.ErrorCode = MCXNOW_ERROR['HTTP Error'] return 0 else: texthtml = result.text if self.__CheckSecretKey__(texthtml): self.ErrorCode = self.__SendAOrder__( cur, 1, amt, price, confirm, True) if self.ErrorCode == 1: return 1 else: return 0 else: self.ErrorCode = MCXNOW_ERROR['Session ended'] return 0 else: self.ErrorCode = MCXNOW_ERROR['Anonymous connexion'] return 0
def accept_handler(event: accept): global pid global acceptNum global acceptVal if event.ballot.depth == block_chain.depth + 1 and event.ballot >= ballotNum: acceptNum = event.ballot acceptVal = event.myVal sender = threading.Thread( target=send_event, args=[accepted(event.ballot, event.myVal), event.ballot.proposer]) sender.daemon = True sender.start() elif event.ballot.depth <= block_chain.depth: logger.info("sender was out of date, sending them a stale message") sender = threading.Thread( target=send_event, args=[stale(ballotNum, block_chain), event.ballot.proposer]) sender.daemon = True sender.start() elif event.ballot.depth > block_chain.depth + 1: logger.info("out of date: sending request") requester = threading.Thread( target=send_event, args=[request(ballotNum, pid), event.ballot.proposer]) requester.daemon = True requester.start()
def __init__(self, rName, rType=sortType.HOT, rLimit=25): rTypeTxt = typeStr(rType) lg.debug("listing::__init__ " + rTypeTxt + " " + str(rLimit)) self.rName = rName self.rType = rType if self.rName == "": self.rName = "front" self.currLine = 0 rLimitTxt = None if rLimit is not 25: rLimitTxt = "limit=" + str(rLimit) # TODO add other options if rName is "": rTxt = "/" else: rTxt = "/r/" + rName + "/" + rTypeTxt + "/" params = rLimitTxt # add other options to params self.r = request(rTxt, params) # I should check if request went well self.ok = self.r.ok self.status = self.r.status if self.ok: self.json = self.r.json self.__fetchLinks__()
def run(): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) insert_values = [] response = request(API_URL_CHAMPIONS, 'global') champions_dict = response['data'] version = response['version'] for champion in champions_dict.values(): insert_values.append(u"({}, {}, {}, {}, {})".format( champion['id'], database.escape(champion['name']), database.escape('http://ddragon.leagueoflegends.com/cdn/{}/img/champion/{}'.format(version, champion['image']['full'])), database.escape('http://ddragon.leagueoflegends.com/cdn/img/champion/loading/' + champion['image']['full'].replace('.png', '_0.jpg')), database.escape('http://ddragon.leagueoflegends.com/cdn/img/champion/splash/' + champion['image']['full'].replace('.png', '_0.jpg')), )) insert_query = u''' INSERT INTO champions (id, name, image_icon_url, image_loading_url, image_splash_url) VALUES {} '''.format(u','.join(insert_values)) database.execute('TRUNCATE TABLE champions') database.execute(insert_query)
def proposal_handler(event: proposal): global ballotNum global pid logger.info("received proposal with following value: %s", str(event.ballot)) if event.ballot.depth == block_chain.depth + 1 and event.ballot >= ballotNum: ballotNum = event.ballot sender = threading.Thread(target=send_event, args=[ promise(event.ballot, acceptNum, acceptVal), event.ballot.proposer ]) sender.daemon = True sender.start() elif event.ballot.depth <= block_chain.depth: logger.info("sender was out of date, sending them a stale message") sender = threading.Thread( target=send_event, args=[stale(ballotNum, block_chain), event.ballot.proposer]) sender.daemon = True sender.start() elif event.ballot.depth > block_chain.depth + 1: logger.info("out of date: sending request") requester = threading.Thread( target=send_event, args=[request(ballotNum, pid), event.ballot.proposer]) requester.daemon = True requester.start()
def __LoadUserDetails__(self): # # __LoadUserDetails__ : download all info about user account # # Account Info are in the variable User.Details # self.Return=None if self.Trading: result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["useraccount"]) if result==0: return MCXNOW_ERROR['HTTP Error'] else: texthtml=result.text.encode('utf-8') if self.__CheckSecretKey__(texthtml): for cur in MCXNOW_ALLCURRENCY: if cur == 'PPC': self.User.Details.Funds[cur].Balance=float(texthtml\ .split("PPC</tla><balavail>")[1].split("</baltotal><btctotal>")[0].split("<")[0]) elif cur == 'BTC': self.User.Details.Funds[cur].Balance=float(texthtml\ .split("PPC</tla><balavail>")[1].split("</baltotal><btctotal>")[0].split("<")[0]) self.User.Details.Funds[cur].DepositAddress='' self.User.Details.Funds[cur].MinimumDeposit=0.0 self.User.Details.Funds[cur].WithdrawFee=0.0 self.User.Details.Funds[cur].DepositConfirmations=0.0 self.User.Details.Funds[cur].Incoming=0.0 self.Return=self.User.Details return MCXNOW_ERROR['Ok'] else: self.Trading=False return MCXNOW_ERROR['Session ended'] else: return MCXNOW_ERROR['Anonymous connexion']
def ui(self): global reqt reqt = request() while (1): _ = input( "Enter 1 to update, 2 to read list of connections,3 to get list of user id's,4 to start server" ) if (_ == '1'): filename = input("input file name:") print("Updating list...") with open(filename, "r+") as f: for n in f: for i in range(len(n) - 1, 0, -1): #print(i,' ',n[i]) if n[i] == ' ': usid = n[:i] psw = n[i + 1:len(n) - 1] self.update_dict(usid, psw) break f.close() self.update_dict('123', '123') elif (_ == '2'): print(self.list_of_conns) elif (_ == '3'): print(self.uid) elif (_ == '4'): break
def get_market_cap(ticker): url = YAHOO_QUOTE.format(TICKER=ticker) page = request(CONFIG, url) page = BeautifulSoup(page.content, features=FEATURES) if page.find("span", text="Net Assets"): span = page.find("span", text="Net Assets") span = span.findNext("span") value = span.text elif page.find("span", text="Market Cap"): span = page.find("span", text="Market Cap") span = span.findNext("span") value = span.text else: return 0 if value[-1] in NUMBERS: value = value.replace(',', '') return float(value) if value == 'N/A': return 0 return float(value[:-1]) * CONVERTER[value[-1]]
def __init__(self, uName): lg.debug("user::__init__ " + uName) self.uName = uName rTxtOverview = 'user/'+uName+'/overview/' self.rOverview = request(rTxtOverview)
def __LoadChat__(self): # # __LoadChat__ : download all chat # # All Message are in the liste Public.Chat # self.Return=None result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["chat"]) if result==0: return MCXNOW_ERROR['HTTP Error'] else: textxml=result.text if '<doc>' in textxml: # Messages self.Public.Chat.ClearChat() messages=textxml.split('<c>') for i in range(1, len(messages)): message_user=messages[i].split('<n>')[1].split('</n>')[0] message_id=messages[i].split('<i>')[1].split('</i>')[0] message_text=messages[i].split('<t>')[1].split('</t>')[0] self.Public.Chat.AddMessage(message_id, message_user, message_text) self.Return=self.Public return MCXNOW_ERROR['Ok'] else: return MCXNOW_ERROR['Unknown']
def SendBuyOrder(self, cur=None, amt=0, price=0 , confirm=0): # # SendBuyOrder : send a buy order # Return 1 if success or 0 if error # # if return 0 then self.ErrorCode give you the errorcode ! # if confirm=0: # self.Return=[$id$,1,$time$,$amt$,$price$] # else : # self.Return=[None,1,$time$,$amt$,$price$] (because this order can be executed yet) # # if self.Trading: result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["useraccount"]) if result==0: self.ErrorCode=MCXNOW_ERROR['HTTP Error'] return 0 else: texthtml=result.text if self.__CheckSecretKey__(texthtml): self.ErrorCode=self.__SendAOrder__(cur, 1, amt, price, confirm, True) if self.ErrorCode==1: return 1 else: return 0 else: self.ErrorCode=MCXNOW_ERROR['Session ended'] return 0 else: self.ErrorCode=MCXNOW_ERROR['Anonymous connexion'] return 0
def parse_page(area, url): html = request(url) if html: doc = pq(html) items = doc('body > div.content > div.leftContent > ul > li').items() for item in items: all_conflg = item.find('.houseInfo').text().split(' | ') if all_conflg[1].find('别墅') != -1: del (all_conflg[1]) if all_conflg[-1].find('有电梯') != -1: all_conflg[-1] = '有' else: all_conflg[-1] = '无' page_url = item.find('a.img').attr('href') title = item.find('.title').text() total_price = item.find('.totalPrice span').text() unit_price = item.find('.unitPrice').text() loaction = item.find('.positionInfo a').text() data = { '标题': title, 'URL': page_url, '所在区': area, '所在地': loaction, '所在小区': all_conflg[0], '规格': all_conflg[1], '面积': all_conflg[2], '电梯': all_conflg[-1], '总价': total_price, '单价': unit_price, '查重url': url, } save_to_mongo(data) time.sleep(3)
def __SendChatMsg__(self, msg, skchecked=False): # # __SendChatMsg__ : Send a message $msg$ to chat # self.Return=None if self.Trading: if not(skchecked): result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["useraccount"]) if result==0: return MCXNOW_ERROR['HTTP Error'] else: texthtml=result.text if self.__CheckSecretKey__(texthtml): skOk=True else: skOk=False else: skOk=True if skOk: self.Session.get(MCXNOW_DOMAIN+MCXNOW_ACTION["sendchat"]+"&sk="+self.User.SecretKey+"&t="+str(msg)) return MCXNOW_ERROR['Ok'] else: self.Trading=False return MCXNOW_ERROR['Session ended'] else: return MCXNOW_ERROR['Anonymous connexion']
def __init__(self, rLink, rType=sortType.HOT, rLimit=25): rTypeTxt = typeStr(rType) lg.debug("comments::__init__ " + rLink + "" + rTypeTxt + " " + str(rLimit)) # TODO strip rlink to show in history self.rLink = rLink self.rType = rType self.currComment = 0 rLimitTxt = None if rLimit is not 25: rLimitTxt = "limit="+str(rLimit) params = rLimitTxt self.r = request(rLink, params) self.ok = self.r.ok self.status = self.r.status if self.ok: self.json = self.r.json self.__fetchComments__()
def get_table(): username = input("Username: "******"twu_website.html") make_table(selected_term)
def __SendChatMsg__(self, msg, skchecked=False): # # __SendChatMsg__ : Send a message $msg$ to chat # self.Return = None if self.Trading: if not (skchecked): result = request(self.Session, 'get', MCXNOW_DOMAIN + MCXNOW_ACTION["useraccount"]) if result == 0: return MCXNOW_ERROR['HTTP Error'] else: texthtml = result.text if self.__CheckSecretKey__(texthtml): skOk = True else: skOk = False else: skOk = True if skOk: self.Session.get(MCXNOW_DOMAIN + MCXNOW_ACTION["sendchat"] + "&sk=" + self.User.SecretKey + "&t=" + str(msg)) return MCXNOW_ERROR['Ok'] else: self.Trading = False return MCXNOW_ERROR['Session ended'] else: return MCXNOW_ERROR['Anonymous connexion']
def __LoadChat__(self): # # __LoadChat__ : download all chat # # All Message are in the liste Public.Chat # self.Return = None result = request(self.Session, 'get', MCXNOW_DOMAIN + MCXNOW_ACTION["chat"]) if result == 0: return MCXNOW_ERROR['HTTP Error'] else: textxml = result.text if '<doc>' in textxml: # Messages self.Public.Chat.ClearChat() messages = textxml.split('<c>') for i in range(1, len(messages)): message_user = messages[i].split('<n>')[1].split('</n>')[0] message_id = messages[i].split('<i>')[1].split('</i>')[0] message_text = messages[i].split('<t>')[1].split('</t>')[0] self.Public.Chat.AddMessage(message_id, message_user, message_text) self.Return = self.Public return MCXNOW_ERROR['Ok'] else: return MCXNOW_ERROR['Unknown']
def get_sector_and_industry(ticker): url = YAHOO_PROFILE.format(TICKER=ticker) page = request(CONFIG, url) page = BeautifulSoup(page.content, features=FEATURES) if page.find("span", text="Fund Overview"): sector = page.find("span", text="Category").next_sibling.text industry = page.find("span", text="Fund Family").next_sibling.text return sector, industry, "ETF" elif page.find("span", text="Industry"): span = page.find("span", text=["Sector", "Sector(s)"]) sibs = span.fetchNextSiblings()[0] sector = sibs.text span = page.find("span", text="Industry") sibs = span.fetchNextSiblings()[0] industry = sibs.text return sector, industry, "Equity" else: return None, None, None
def api_pull_match_history(region, summoner, begin_index): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) # always try getting 15 matches (max) at a time end_index = begin_index + 15 # fetch matches from the api logger.warning('[api_pull_match_history] adding request for match history of {}'.format(summoner['id'])) response = request(API_URL_MATCH_LIST, region, summonerId=summoner['id'], beginIndex=begin_index, endIndex=end_index) if response: logger.warning('[api_pull_match_history] got {} matches: [{}]'.format(len(response.get('matches', [])), [str(match['matchId']) for match in response.get('matches', [])])) matches = response.get('matches', []) if matches: # see which matches we already have recorded sql = u""" SELECT match_id FROM matches WHERE 1 AND match_region = {region} AND summoner_id = {summoner_id} AND match_id IN ({match_ids}) {missing_item_temp_fix_sql} """.format( region=database.escape(region), summoner_id=summoner['id'], match_ids=','.join(str(match['matchId']) for match in matches), missing_item_temp_fix_sql=MISSING_ITEM_TEMP_FIX_SQL, ) recorded_match_ids = database.fetch_all_value(sql) logger.warning('[api_pull_match_history] sql: {}'.format(sql)) logger.warning('[api_pull_match_history] recorded match ids: {}'.format(recorded_match_ids)) match_stats = [] for match in matches: # if the match is not already recorded and in this season, then record it if match['matchId'] not in recorded_match_ids and match['season'] == SEASON_NAME: logger.warning('[api_pull_match_history] getting stats for match {}'.format(match['matchId'])) thread = SimpleThread(match_helper.get_stats, matchId=match['matchId'], region=region, detailed=False) match_stats.append(thread) if match_stats: match_stats = [stats.result() for stats in match_stats] logger.warning('[api_pull_match_history] doing player_helper.get_stats()') player_stats = player_helper.get_stats(match_stats, database) logger.warning('[api_pull_match_history] inserting player {}'.format(summoner['id'])) player_helper.insert(player_stats, database, summoner['id']) for match_stat in match_stats: try: logger.warning('[api_pull_match_history] inserting match stats for match {}'.format(match_stat['match']['id'])) match_helper.insert(match_stat, player_stats, database, detailed=False) except Exception, e: logger.warning('[api_pull_match_history] FAILED inserting match stats for match {}: {}'.format(match_stat['match']['id'], e)) pass
def getrequest(url): try: getrequest = request.request(url) if getrequest.status_code == 200: if url not in goods: goods.append(url) except BaseException: pass
def main(): if len(sys.argv) == 2: url = sys.argv[1] if 'suwen' in url: req = request(url) return req else: req = request(url) if type(req) == str: return req sear = search(req) cleaner_data = parse(sear) print(cleaner_data) return cleaner_data else: print('please provide link') return('please provide link')
def __init__(self): super().__init__() self.db = db(); self.req = request() self.hosts_file = "/etc/hosts" return
def ok(x,y): global t t+=1 lovit.append((x, y)) if request.request(x,y) in ["HIT", "DESTROYED"]: return True else: return False
def validate(input): if (len(input) == 3): # covers location, time scenario #print "length 2" destination = input[1].title() time = input[2] if (locations.valid_location(destination) and day.valid_time(time)): #valid location #print "valid location" req = request.request(destination, time) return req elif(len(input) == 4): # covers location, day, time scenario print "length 3" destination = input[1].title() time = input[3] day_specified = input[2].title() if (locations.valid_location(destination)): #valid location print "valid location" if (day.valid_time(time)): print "valid time" if (day.valid_day(day_specified)): print "valid day" req = request.request(destination, time) day_of_week = day.get_specified(day_of_week) today = day.get_day(datetime.datetime.today().weekday()) if (today != day_of_week): req.set_future() # set n days from return req elif(day.valid_specifier(day_specified)): print "valid day specifier" req = request.request(destination, time) return req # check valid day # check valid time elif(len(input) > 4): # covers location, n days from, time print "length > 3" destination = input[1].title() if (locations.valid_location(destination)): #valid location print "valid location" # check valid n days from # check valid time
def get_data(start, period): res = [] temp = start for i in range(period): params = {'date': temp.strftime(START)} r = request(BPS, params) res.append(DailyData(parse(r.text))) temp += ONE return res
def __Login__(self): self.Return=None logindata={'user': self.User.UserName, 'pass': self.User.Password} result=request(self.Session, 'post', MCXNOW_DOMAIN+MCXNOW_ACTION["login"], data=logindata) if result==0: return MCXNOW_ERROR['HTTP Error'] else: if result.status_code==200: result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["useraccount"]) texthtml=result.text if 'sk' in texthtml : self.User.SecretKey=texthtml.split('sk=')[1].split("'")[0] self.Trading=True return MCXNOW_ERROR['Ok'] else : return MCXNOW_ERROR['Unknown'] else: return MCXNOW_ERROR['HTTP Error']
def wiki(term): resp = json.loads(request('http://en.wikipedia.org/w/api.php/', data={ 'action' : 'opensearch', 'search' : term, 'limit' : 1, 'format' : 'json' }).read().decode()) print("{title} : {description}".format(title=resp[1][0], description=resp[2][0])) print("Read more at {link}".format(link=resp[3][0]))
def scrape_category(url): url_base = url html = request(url_base) soup = BeautifulSoup(html, "html.parser") all_book_list = [] all_book_list.extend(book_list(soup)) if len(all_book_list) == 20: page_next = soup.find("ul", "pager").find("li", "next") page_number = 1 while page_next: page_number += 1 page_url = url_base.replace("index.html", f"page-{page_number}.html") html = request(page_url) soup = BeautifulSoup(html, "html.parser") all_book_list.extend(book_list(soup)) page_next = soup.find("ul", "pager").find("li", "next") return all_book_list
def search(x,y): print("Am pornit") print(x,y) for i in range(0,4): m,n=xoy[i] if(x+m<11)&(y+n<11)&(x+m>-1)&(y+n>-1): lovit.append((x, y)) if (request.request(chr(x+m-1+ord('A')),y+n+1)in["HIT","DESTROYED"]) & ((x+m,y+n) not in lovit): search(x+m,y+n) hit(m+x,n+y)
def face_celebrity(file): response = request(URLS.CFR.celebrity, file) if response.status_code == 200: json = ujson.loads(response.text) face = json['faces'][0] print('닯은 연예인: {} ({}%)'.format( face['celebrity']['value'], int(float(face['celebrity']['confidence']) * 100))) else: print('유명인사 인식 에러({})'.format(response.status_code))
def face_recognize(file): response = request(URLS.CFR.face, file) if response.status_code == 200: json = ujson.loads(response.text) face = json['faces'][0] print('나이: {}세, 성별: {}, 표정: {}'.format(face['age']['value'], face['gender']['value'], face['emotion']['value'])) else: print('얼굴 인식 에러({})'.format(response.status_code))
def scrape(url): html = request(url) soup = BeautifulSoup(html, "html.parser") all_category_list = [] lis = soup.find("ul", {"class": "nav nav-list"}).find("ul").findAll("li") for li in lis: link = BASE_DIR + li.find("a")["href"] all_category_list.append(link) for link in tqdm(all_category_list): scrape_all_book_category(link)
def excel_dict(xlsx_url): """ Download and extract zip/rar file in memory rather than a file in local disk """ download_page = request(xlsx_url).text resourceId_pattern = '"id":(.*?),"linkType"' resource = re.search(resourceId_pattern, download_page) resourceDownloadPermeters = 'data: "(.*?)&.*&taskId=(.*?)&iscomplete' postPermeters = re.search(resourceDownloadPermeters, download_page) # print('正在下载excel文件...') download_url = '/checkResourceDownload.do' #?{}'.format(postPermeters[1]) data = { 'MIME Type': 'application/x-www-form-urlencoded', postPermeters[1]: '', 'resourceId': resource.group(1), 'downloadFrom': 1, 'isMobile': 'false', 'taskId': postPermeters[2], 'iscomplete': 'false', 'history': 'false' } xls_resource = request(download_url, data) # content字节码, stream=True # print(xls_resource.json()) if 'status' in xls_resource.json() and xls_resource.json( )['status'] == 'indirect': # 'status' in xls_resource.json and download_url = '/filePreviewServlet?indirect=true&resourceId={}'.format( resource.group(1)) xls_resource = request(download_url) # print('资源下载成功,正在解压...') import rarfile, zipfile try: zf = zipfile.ZipFile(io.BytesIO(xls_resource.content)) archive = zf except zipfile.error as ze: try: rf = rarfile.RarFile(io.BytesIO(xls_resource.content)) archive = rf except rarfile.error as rfe: return None for file in archive.infolist(): if file.filename.endswith('.xls'): with archive.open(file) as fd: import xlrd xls = xlrd.open_workbook(file_contents=fd.read()) return xls2dict(xls)
def render(): file = open(path + "cities.txt", "r") l = file.read().split(',') cities = [] print(l) for city in l: if (city != ""): cities.append(request(city)) generate_html(cities, path) file.close()
def extract_bbs(href): text = request(href).text tree = html.fromstring(text) title = tree.xpath('//div[@class="assignment-head"]/text()') content = tree.xpath('//div[@class="BbsContent"]/span//text()') if not content: content = title[0].strip() else: content = '\n'.join([stm.strip() for stm in content]) return (title[0].strip(), content)
def xhr_question(date_time, reply_id, student_exercise_id, exercise_id): url = '/student/exam/manageExam.do' data = { str(date_time): '', 'method': 'getExerciseInfo', 'examReplyId': reply_id, 'exerciseId': exercise_id, 'examStudentExerciseId': student_exercise_id, } return request(url, data).json()
def search(x,y): for i in range(0, 4): cx, cy = xoy[i] newx = x + cx newy = y + cy if (newx > 0) and (newx < 11) and (newy > 0) and (newy < 11) and (newx, newy) not in lovit: lovit.append((newx, newy)) hit2(newx, newy) if request.request(chr(newx + ord('A')-1), y) in ["HIT", "DESTROYED"]: hit(newx, newy) search(newx, newy)
def grab_portland_crime_data(size=1000): import os fn = os.path.join('portland_crime_data', str(size)) try: with open(fn, 'r') as stream: resp = json.load(stream) except IOError: resp = request('select * from crime_data.portland_crime', wsUser, wsPass, rowLimit=size) with open(fn, 'w') as out: json.dump(resp, out) return resp['results'][0]['data']
def hit(x,y): print("Am pornit") print(x,y) if (s[x-1][y-1]=="1") : print("lovit") for i in range(0,4): m,n=xoy[i] if(x+m<11)&(y+n<11)&(x+m>-1)&(y+n>-1): if (request.request(chr(x+m-1+ord('A')),y+n+1)in["HIT","DESTROYED"]) & ((x+m,y+n) not in lovit): hit(x+m,y+n) lovit.append((m+x,n+y))
def add_request(self, req_json={}): try: from request import request req = request(json_input=req_json) except ImportError as ex: self.logger.error('Could not import \'request\' module. Reason: %s' % (ex)) return {} except self.IllegalAttributeName() as ex: return {} req.transfer_from(self) return req.json()
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. response = request('POST', '/stop', self.query_string) self.assertEqual(response['http_status'], '202 Accepted') self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Delete the results. for url in urls: self.response = request('DELETE', '/result', 'url=' + parse.quote(url))
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string)
def makeRequest(self): print("AUTHPAGE EROM",self.controller.getHash()) rr = request() res = rr.auth(self.controller.getHash(),self.controller.getRemote()) if(res.status == 200): #authentication is succesful. Time to grant address and ask for a coin #next page try: api = self.controller.getSavoir() o = str(api.getnewaddress()) self.controller.setAddress(str(o)) print(o) self.controller.show_frame(votepage) except Exception, e: raise e
def __Logout__(self): self.Return=None result=request(self.Session, 'get', MCXNOW_DOMAIN+MCXNOW_ACTION["logout"]+"&sk="+self.User.SecretKey) if result==0: return MCXNOW_ERROR['HTTP Error'] else: if result.status_code==200: texthtml=result.text if 'sk' in texthtml : return MCXNOW_ERROR['Unknown'] else : self.User=McxNowUser() return MCXNOW_ERROR['Ok'] else: return MCXNOW_ERROR['HTTP Error']
def solve(): global c global t global aux global m global lovit c = 0 t = 0 lovit = [] aux=[[0 for x in range(11)] for y in range(11)] while c<10: x = randint(1, 10) y = randint(1, 10) if ((x, y) not in lovit) and ( ok(x,y) ): c+=1 m=1 aux=[] hit(x, y) search(x, y) dele(aux) print(m) if m == 3: l,m=aux[1] t+=1 request.request(l,m) elif m > 3: for x, y in aux: t+=1 if request.request(x,y) == "DESTROYED": break if c==10: print(t) c=11
def approve_request(self, prepid=''): while True: ob = self.get_request(prepid) if not ob: return False rt = request('Nikolaos', request_json=ob) try: rt.approve('Nikolaos') except Exception as ex: pass try: self.db.update([rt.json()]) return True except couchdb.ResourceConflict: print 'collision!', self.getName() time.sleep(1) continue
def add_new_gen_parameters(self, prepid=''): while True: ob = self.get_request(prepid) if not ob: return False rt = request('Nikolaos', request_json=ob) try: rt.update_generator_parameters(generator_parameters('Nikolaos').build()) except Exception as ex: pass try: self.db.update([rt.json()]) return True except couchdb.ResourceConflict: print 'collision!', self.getName() time.sleep(1) continue