def put(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) access_token = None try: access_token = self.request.headers['Authorization'] access_token = access_token.replace('Bearer ','') except: logging.info("got access_token null") self.set_status(400) # Bad Request self.write('Bad Request') self.finish() return logging.info("got access_token %r", access_token) url = "http://"+AUTH_HOST+"/blog/articles/"+article_id+"/pub" http_client = HTTPClient() body_data = {'article_id':article_id} logging.info("post body %r", body_data) _json = json_encode(body_data) response = http_client.fetch(url, method="PUT", body=_json, headers={"Authorization":"Bearer "+access_token}) logging.info("got response %r", response.body) self.finish()
def run(self): while True: try: maxid = self.db.news_list()[0]['id'] except: maxid = 1 print(maxid) client = HTTPClient() response = client.fetch('http://cs.hust.edu.cn/rss') result = response.body.decode("utf-8",errors='ignore') soup = BeautifulStoneSoup(result) items = soup.find_all('item') for item in items: title = item.title.text link = item.link.text desc = item.description.text linkid = self.link_id(link) if linkid > maxid: result = self.db.add_news(linkid,title,desc,link) if result: result = self.get_article(link) else: break time.sleep(3600)
def get(self, account_id): logging.info(self.request) logging.info("got account_id %r from uri", account_id) _timestamp = self.get_argument("last", 0) # datetime as timestamp logging.info("got last %r", _timestamp) _timestamp = int(_timestamp) if _timestamp == 0: _timestamp = int(time.time()) logging.info("got _timestamp %r", _timestamp) try: params = {"before":_timestamp, "limit":2, "status":"all"} url = url_concat("http://"+AUTH_HOST+"/blog/accounts/"+account_id+"/articles", params) http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) _articles = json_decode(response.body) for _article in _articles: # publish_time 转换成友好阅读方式(例如:10分钟前),保留此值为分页使用 _article["timestamp"] = _article["publish_time"] _article["publish_time"] = time_span(_article["publish_time"]) self.finish(JSON.dumps(_articles)) except: err_title = str( sys.exc_info()[0] ); err_detail = str( sys.exc_info()[1] ); logging.error("error: %r info: %r", err_title, err_detail) if err_detail == 'HTTP 404: Not Found': self.finish()
def authApi(username,password): data = { 'username':username, 'password':password } result = {'code':200,'content':''} try: client = HTTPClient() request = HTTPRequest( "https://mobile4.seu.edu.cn/_ids_mobile/login18_9", method='POST', body=urllib.urlencode(data), validate_cert=False, request_timeout=TIME_OUT) response = client.fetch(request) header = response.headers if 'Ssocookie' in header.keys(): headertemp = json.loads(header['Ssocookie']) cookie = headertemp[0]['cookieName']+"="+headertemp[0]['cookieValue'] cookie += ";"+header['Set-Cookie'].split(";")[0] result['content'] = cookie else: result['code'] = 400 except HTTPError as e: result['code'] = 400 except Exception,e: result['code'] = 500
def getAccessToken(appId, appSecret, code): url = "https://api.weixin.qq.com/sns/oauth2/access_token?appid="+appId+"&secret="+appSecret+"&code="+code+"&grant_type=authorization_code" http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) accessToken = json_decode(response.body) return accessToken
def get(self): code = self.get_argument("code", False) if not code: raise web.HTTPError(400, "oauth callback made without a token") http_client = HTTPClient() authorization = "Basic %s" % str( base64.b64encode(bytes(APP_Key + ":" + APP_Secret, encoding='utf-8')), encoding='utf-8') req = HTTPRequest("https://open.hs.net/oauth2/oauth2/token", method="POST", headers={ "Authorization":authorization, "Content-Type":"application/x-www-form-urlencoded" }, body=urllib.parse.urlencode({ "grant_type":"authorization_code", "code":code }) ) resp = http_client.fetch(req) resp_json = json.loads(resp.body.decode('utf8', 'replace')) print('===============================') print(resp_json)
def http_request(url, server, port=80, timeout=20.0): def check_twitter_response(response): return (response is not None and len(response.headers.get_list('Server')) > 0 \ and response.headers.get_list('Server')[0] == 'tfe') def get_rate_limits(headers): rtime = headers.get_list('X-RateLimit-Reset') rhits = headers.get_list('X-RateLimit-Remaining') if len(rtime) > 0: rtime = int(rtime[0]) else: rtime = None if len(rhits) > 0: rhits = int(rhits[0]) else: rhits = None return rhits, rtime AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") http_client = HTTPClient() code, data, rtime, rhits = 999, None, None, None try: response = http_client.fetch(url, proxy_host=server, proxy_port=port, connect_timeout=timeout, request_timeout=timeout) response.rethrow() if check_twitter_response(response): code, data = response.code, response.body rhits, rtime = get_rate_limits(response.headers) except HTTPError as e: if check_twitter_response(e.response): code, data = e.code, None rhits, rtime = get_rate_limits(e.response.headers) return code, data, rhits, rtime
def getUserInfo(token, openid): url = "https://api.weixin.qq.com/sns/userinfo?access_token="+token+"&openid="+openid+"&lang=zh_CN" http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) userInfo = json_decode(response.body) return userInfo
def getCookie(cardnum,card_pwd): # print "refresh" data = { 'username':cardnum, 'password':card_pwd } try: client = HTTPClient() request = HTTPRequest( LOGIN_URL, method='POST', body=urllib.urlencode(data), validate_cert=False, request_timeout=4) response = client.fetch(request) header = response.headers if 'Ssocookie' in header.keys(): headertemp = json.loads(header['Ssocookie']) cookie = headertemp[0]['cookieName']+"="+headertemp[0]['cookieValue'] cookie += ";"+header['Set-Cookie'].split(";")[0] return True,cookie else: return False,"No cookie" except Exception,e: # print str(e) return False,str(e)
def get_motion_detection(camera_id): thread_id = camera_id_to_thread_id(camera_id) if thread_id is None: return logging.error('could not find thread id for camera with id %s' % camera_id) url = 'http://127.0.0.1:7999/%(id)s/detection/status' % {'id': thread_id} request = HTTPRequest(url, connect_timeout=5, request_timeout=5) http_client = HTTPClient() try: response = http_client.fetch(request) if response.error: raise response.error except Exception as e: logging.error('failed to get motion detection status for camera with id %(id)s: %(msg)s' % { 'id': camera_id, 'msg': unicode(e)}) return None enabled = bool(response.body.lower().count('active')) logging.debug('motion detection is %(what)s for camera with id %(id)s' % { 'what': ['disabled', 'enabled'][enabled], 'id': camera_id}) return enabled
def get_api_return(api_name, user, data={}, timeout=TIME_OUT): ret = {} client = HTTPClient() data['uuid'] = user.uuid params = urllib.urlencode(data) request = HTTPRequest(SERVICE + api_name, method='POST', body=params, request_timeout=timeout) try: response = client.fetch(request) ret = json.loads(response.body) if 200<=ret['code']< 300: return ret elif ret['code'] == 401: ret['content'] = error_map[401] % (LOCAL, user.openid) else: ret['content'] = error_map[ret['code']] except HTTPError as e: ret['code'] = e.code if ret['code'] == 401: ret['content'] = error_map[401] % (LOCAL, user.openid) else: ret['content'] = error_map[ret['code']] except Exception,e: with open('api_error.log','a+') as f: f.write(strftime('%Y%m%d %H:%M:%S in [get_api_return]', localtime(time()))+'\n'+str(e)+'\n['+api_name+']\t'+str(user.cardnum)+'\nString:'+str(ret)+'\n\n') ret['code'] = 500 ret['content'] = u'=。= 服务器未能及时回应请求,不如再试试'
def get_current_user(self): # return self.get_secure_cookie("session_token") session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r", session_token) expires_at = self.get_secure_cookie("expires_at") if expires_at is None or expires_at == "": expires_at = 0 refresh_token = self.get_secure_cookie("refresh_token") _timestamp = int(time.time()) if _timestamp > int(expires_at): return session_token else: url = "http://" + AUTH_HOST + "/auth/refresh-token" http_client = HTTPClient() response = http_client.fetch(url, method="GET", headers={"Authorization":"Bearer "+refresh_token}) logging.info("got refresh-token response %r", response.body) token = json_decode(response.body) expires_at = _timestamp + token['expires_in'] session_token = token['access_token'] self.set_secure_cookie("session_token", session_token) self.set_secure_cookie("expires_at", str(expires_at)) self.set_secure_cookie("refresh_token", token['refresh_token']) self.set_secure_cookie("account_id", token['account_id']) return session_token
def do_post_with_cert(url, params={}, headers={}, client_key=None, client_cert=None): body = params if isinstance(params, str) else urllib.urlencode(params) http_request = HTTPRequest(url, 'POST', body=body, headers=headers, validate_cert=False, client_key=client_key, client_cert=client_cert) http_client = HTTPClient() fetch_result = http_client.fetch(http_request) return fetch_result.body
def get_steam_user(db, steamid): user = None key = yield Op(db['server'].find_one, {'key': 'apikey'}) url = url_concat('http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/', {'key': key['value'], 'steamids': steamid}) client = HTTPClient() try: response = client.fetch(url) get_user = json_decode(response.body)['response']['players'][0] user = {'steamid': get_user['steamid'], 'steamid32': converter(steamid), 'personaname': get_user['personaname'], 'profileurl': get_user['profileurl'], 'avatar': get_user['avatarfull'], 'registration': datetime.now(), 'bookmarks': [], 'favorites': [], 'update': datetime.now() + timedelta(minutes=1), 'dota_count': 0} if 'realname' in get_user.keys(): user['realname'] = get_user['realname'] else: user['realname'] = None except HTTPError as e: logging.error('Error: %s' % e) client.close() return user
def get_search(content): io_loop = tornado.ioloop.IOLoop.current() io_loop.start() blog_id = [] try: html = None rc = '<h2><a href="/blog/(.+?)/" target="_blank">.*' + str(content) + ".*?</a></h2>" ra = re.compile(rc, re.IGNORECASE) # url = "http://{0}/catalog".format(config()["pyworm_blog"]["url"]) url = "http://www.pyworm.com/catalog/" http_client = HTTPClient() response = None try: response = http_client.fetch(url, request_timeout=5) except Exception as e: error_log.error(e) if response and response.code == 200: html = response.body.decode('utf-8') try: blog_id = re.findall(ra, html) except Exception as e: error_log.error(e) except Exception as e: error_log.error(e) finally: io_loop.stop() return {"blog_id": blog_id}
def submit_batch(base_url, batch): print("Submitting a batch") http = HTTPClient() url = base_url + '_bulk' body = '\n'.join(json.dumps(doc) for doc in batch) resp = http.fetch(url, method = 'POST', body = body) resp.rethrow()
def call(self, method, params, okay=None, fail=None): """Make an asynchronous JSON-RPC method call. """ body = tornado.escape.json_encode({ 'jsonrpc': '2.0', 'method': method, 'params': params, 'id': uuid.uuid4().hex, }); logging.info("JSON-RPC: call '%s' method on %s" % (method, self.url)) headers = HTTPHeaders({'Content-Type': 'application/json'}) request = HTTPRequest(self.url, method='POST', body=body, headers=headers, request_timeout=0) if okay is None and fail is None: client = HTTPClient() response = client.fetch(request) if response.code != 200 or not response.body: return None try: data = tornado.escape.json_decode(response.body) except ValueError: return None else: return data else: client = AsyncHTTPClient() client.fetch(request, functools.partial(self._on_response, okay, fail))
def get_schoolnum_name(self, number): try: CURR_URL = 'http://xk.urp.seu.edu.cn/jw_service/service/stuCurriculum.action' term = "16-17-1" params = urllib.urlencode({ 'queryStudentId': number, 'queryAcademicYear': term}) client = HTTPClient() request = HTTPRequest( CURR_URL, method='POST', body=params, request_timeout=TIME_OUT) response = client.fetch(request) body = response.body if not body: return "-1" else: soup = BeautifulSoup(body) number = soup.findAll('td', align='left')[2].text[3:] name = soup.findAll('td', align='left')[4].text[3:] cardnum = soup.findAll('td', align='left')[3].text[5:] return name, number, cardnum except Exception,e: return "-1"
def main(url,downloadto): #url = "http://www.imagefap.com/pictures/4148883/Waiting-for-Daddy?gid=4148883&view=2" client = HTTPClient() print "Gathering links from Gallery" gallery_response = client.fetch(url) gallery_pool = BeautifulSoup(gallery_response.body) gallery_links = gallery_pool.findAll("a") for gallery_link in gallery_links: if "/photo" in gallery_link["href"]: photo_page_url = "".join(["http://imagefap.com",gallery_link["href"]]) gallery_list.append(photo_page_url) print "Parsing individual pages for actual image" for link in gallery_list: photo_response = client.fetch(link) photo_pool = BeautifulSoup(photo_response.body) photo_images = photo_pool.findAll("img",src=True) for image in photo_images: if image["src"].startswith("http://fap.to"): image_src = image["src"] filename = image_src.split("/")[-1:][0] image_response = client.fetch(image_src) print "Downloading %s" % filename dest = "".join([downloadto,"/",filename]) with open(dest,"wb") as f: f.write(image_response.body)
def post_api(path): "POST command to remote API" http = HTTPClient() resp = http.fetch(get_url(path), method="POST", body='') return json_decode(resp.body)
def post_context_feedback(self, context_id: str, user_id: str, application_id: str, session_id: str, product_id: str, _type: str, meta_data: dict = None): self.logger.debug( "context_id=%s,user_id=%s,application_id=%s,session_id=%s,product_id=%s," "_type=%s,meta_data=%s", context_id, user_id, application_id, session_id, product_id, _type, meta_data ) try: url = "%s/%s/feedback/?application_id=%s&session_id=%s&product_id=%s&type=%s" % ( CONTEXT_URL, context_id, application_id, session_id, product_id, _type ) url += "&user_id=%s" if user_id is not None else "" request_body = { } if meta_data is not None: request_body["meta_data"] = meta_data http_client = HTTPClient() response = http_client.fetch(HTTPRequest(url=url, body=dumps(request_body), method="POST")) http_client.close() return response.headers["_rev"] except HTTPError: self.logger.error("post_context_feedback,url=%s", url) raise
def gen_msg_token(phone): s = DBSession() code = "".join(random.sample("123456789",4)) flag = False url = "http://106.ihuyi.cn/webservice/sms.php?method=Submit&account={account}&password={password}&mobile={phone}&content={content}".format(account=account,password=password,phone=phone,content=url_escape(content.format(code=code))) h = HTTPClient() try: res = h.fetch(url,connect_timeout = 5.0) except: flag,msg = sendTemplateSMS(phone,{code},32417) if flag: update_code(phone,code) return True else: return msg h.close() root = ElementTree.fromstring(res.body.decode()) if not root[0].text == '2': # print("[VerifyMsg]Send error:",root[0].text,root[1].text) #如果发送失败,则改用云通讯发送 flag,msg = sendTemplateSMS(phone,{code},32417) if flag: update_code(phone,code) return True else: return msg else: update_code(phone,code) return True
def getData(self,url,method,data,cookie): try: client = HTTPClient() request = HTTPRequest( url, method=method, headers={ 'Cookie':cookie } ) if data and method=="GET": url = url_concat(url,data) url = url.replace("+","%20") request.url = url elif data and method=="POST": realData = {} for i in data: realData[i[0]] = i[1] data = urllib.urlencode(realData) request.body = data response = client.fetch(request) return json.loads(response.body) except Exception,e: # print str(e) #traceback.print_exc() return str(e)
def post(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) paragraphs = self.get_argument("paragraphs", "") logging.info("got paragraphs %r", paragraphs) # 使用 html2text 将网页内容转换为 Markdown 格式 h = html2text.HTML2Text() h.ignore_links = False paragraphs = h.handle(paragraphs) logging.info("got paragraphs %r", paragraphs) random = random_x(8) logging.info("got random %r", random) session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r from cookie", session_token) # 修改文章段落内容 url = "http://" + AUTH_HOST + "/blog/articles/" + article_id + "/paragraphs" body_data = {'paragraphs':paragraphs} logging.info("put body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="PUT", body=_json, headers={"Authorization":"Bearer "+session_token}) logging.info("got token response %r", response.body) self.redirect('/blog/articles/mine?random=' + random)
def post(self): logging.info(self.request) random = random_x(8) logging.info("got random %r", random) image = self.get_argument("filename", "") logging.info("got image %r", image) title = self.get_argument("article_title", "") logging.info("got article_title %r", title) desc = self.get_argument("article_desc", "") logging.info("got article_desc %r", desc) session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r from cookie", session_token) url = "http://" + AUTH_HOST + "/blog/articles" body_data = {'type':'blog', 'image':image, 'title':title, 'desc':desc} logging.info("post body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_json, headers={"Authorization":"Bearer "+session_token}) logging.info("got token response %r", response.body) self.redirect('/blog/articles/mine?random=' + random)
def post(self): logging.info(self.request) phone = self.get_argument("registerPhone", "") md5pwd = self.get_argument("registerPwd", "") logging.info("phone %r", phone) try: url = "http://" + AUTH_HOST + "/auth/account" body_data = {"appid":APPID, "app_secret":APP_SECRET, "login":phone, "pwd":md5pwd} logging.info("post body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_json) logging.info("got token response %r", response.body) _err_msg = _("You have already register an account, please login.") self.render('auth/login.html', err_msg=_err_msg) except: err_title = str( sys.exc_info()[0] ); err_detail = str( sys.exc_info()[1] ); logging.error("error: %r info: %r", err_title, err_detail) if err_detail == 'HTTP 409: Conflict': _err_msg = _("This phone already exist, please enter a new one.") self.render('auth/register.html', err_msg=_err_msg) return else: _err_msg = _(err_detail) self.render('auth/register.html', err_msg=_err_msg) return
def parser(self): retjson = {'code':200,'content':''} header = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate, sdch', 'Accept-Language': 'zh-CN,zh;q=0.8', 'Host':'jwc.seu.edu.cn', 'Connection':'keep-alive', 'Upgrade-Insecure-Requests':'1' } try: client = HTTPClient() request = HTTPRequest(JWC_URL, method='GET',headers=header,request_timeout=TIME_OUT) response = client.fetch(request) html = response.body soup = BeautifulSoup(html) items = soup.findAll('table', {'width':"100%"}) info = { '最新动态': self.abstract(items[12:17],1), '教务信息': self.abstract(items[34:34+7],0), '学籍管理': self.abstract(items[45:45+7],0), '实践教学': self.abstract(items[56:56+7],0), '合作办学': self.abstract(items[67:67+4],0), } retjson = {'code':200, 'content':info} except: retjson['code'] = 400 # print traceback.print_exc() return retjson
def getData(self,url,method,data,cookie): try: client = HTTPClient() request = HTTPRequest( url, method=method, headers={ 'Cookie':cookie } ) if data and method=="GET": data = json.loads(data) url = url_concat(url,data) request.url = url elif data and method=="POST": data = json.loads(data) print data data = urllib.urlencode(data) request.body = data # print request.url response = client.fetch(request) return response.body except Exception,e: # print str(e) return None
def sina_ip(ip): attribution = "" if ip == "127.0.0.1": ip = '183.208.22.171' http_client = HTTPClient() response = None url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=js&ip={0}".format(ip) try: response = http_client.fetch(url, method='GET', request_timeout=120) except Exception as e: request_log.info(e) finally: http_client.close() if response and response.code == 200: response_body = eval(response.body.decode('utf8')[21:-1]) try: province = response_body['province'] city = response_body['city'] attribution = city #+province except Exception as e: error_log.error(e) ip_piece = ip.split(".") ip_piece[1] = '*' ip_piece[2] = '*' ip_attribution = '网友' + '.'.join(ip_piece) + '[' + attribution + ']' request_log.info(ip_attribution) return ip_attribution
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): self.server_ioloop = IOLoop() event = threading.Event() @gen.coroutine def init_server(): sock, self.port = bind_unused_port() app = Application([("/", HelloWorldHandler)]) self.server = HTTPServer(app) self.server.add_socket(sock) event.set() def start(): self.server_ioloop.run_sync(init_server) self.server_ioloop.start() self.server_thread = threading.Thread(target=start) self.server_thread.start() event.wait() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by several iterations because # the server may still have some cleanup work left when # the client finishes with the response (this is noticeable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). @gen.coroutine def slow_stop(): # The number of iterations is difficult to predict. Typically, # one is sufficient, although sometimes it needs more. for i in range(5): yield self.server_ioloop.stop() self.server_ioloop.add_callback(slow_stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return "http://127.0.0.1:%d%s" % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url("/")) self.assertEqual(b"Hello world!", response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url("/notfound")) self.assertEqual(assertion.exception.code, 404)
def shutdown_all(self, now=False): """Shutdown all kernels.""" # Note: We have to make this sync because the NotebookApp does not wait for async. shutdown_kernels = [] kwargs = {'method': 'DELETE'} kwargs = GatewayClient.instance().load_connection_args(**kwargs) client = HTTPClient() for kernel_id in self._kernels.keys(): kernel_url = self._get_kernel_endpoint_url(kernel_id) self.log.debug("Request delete kernel at: %s", kernel_url) try: response = client.fetch(kernel_url, **kwargs) except HTTPError: pass else: self.log.debug("Delete kernel response: %d %s", response.code, response.reason) shutdown_kernels.append( kernel_id) # avoid changing dict size during iteration client.close() for kernel_id in shutdown_kernels: self.remove_kernel(kernel_id)
def _on_auth(self, user): if not user: raise HTTPError(500, 'OAuth authentication failed') access_token = user['access_token'] req = HTTPRequest(os.environ['fossir_FLOWER_USER_URL'], headers={'Authorization': 'Bearer ' + access_token, 'User-agent': 'Tornado auth'}, validate_cert=False) response = HTTPClient().fetch(req) payload = json.loads(response.body.decode('utf-8')) if not payload or not payload['admin']: raise HTTPError(403, 'Access denied') self.set_secure_cookie('user', 'fossir Admin') self.redirect(self.get_argument('next', '/'))
def setUp(self): if IOLoop.configured_class().__name__ in ('TwistedIOLoop', 'AsyncIOMainLoop'): # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. # AsyncIOMainLoop doesn't work with the default policy # (although it could with some tweaks to this test and a # policy that created loops for non-main threads). raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop or ' 'AsyncIOMainLoop') self.server_ioloop = IOLoop() sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) self.server = HTTPServer(app, io_loop=self.server_ioloop) self.server.add_socket(sock) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient()
def getData(self, url, method, data, cookie): try: client = HTTPClient() request = HTTPRequest(url, method=method, headers={'Cookie': cookie}) if data and method == "GET": url = url_concat(url, data) url = url.replace("+", "%20") request.url = url elif data and method == "POST": realData = {} for i in data: realData[i[0]] = i[1] data = urllib.urlencode(realData) request.body = data response = client.fetch(request) return json.loads(response.body) except Exception, e: # print str(e) #traceback.print_exc() return str(e)
def get_api_return(api_name, user): ret = {} client = HTTPClient() params = urllib.urlencode({'uuid': user.uuid}) request = HTTPRequest(SERVICE + api_name, method='POST', body=params, request_timeout=TIME_OUT) try: response = client.fetch(request) ret = json.loads(response.body) if ret['code'] == 200: return ret ret['content'] = error_map[ret['code']] except HTTPError as e: ret['code'] = e.code if ret['code'] == 401: ret['content'] = error_map[401] % (LOCAL, user.openid) else: ret['content'] = error_map[ret['code']] except: ret['code'] = 500 ret['content'] = u'=。= 未知的错误' return ret
def get(self, vendor_id): logging.info("got vendor_id %r in uri", vendor_id) product_type = self.get_argument("product_type", "all") page = self.get_argument("page", 1) logging.debug("get page=[%r] from argument", page) limit = self.get_argument("limit", 20) logging.debug("get limit=[%r] from argument", limit) access_token = self.get_access_token() params = { "filter": "club", "club_id": vendor_id, "page": page, "limit": limit, "product_type": product_type } url = url_concat(API_DOMAIN + "/api/applies", params) http_client = HTTPClient() headers = {"Authorization": "Bearer " + access_token} response = http_client.fetch(url, method="GET", headers=headers) logging.info("got response.body %r", response.body) data = json_decode(response.body) rs = data['rs'] applies = rs['data'] for _apply in applies: # 下单时间,timestamp -> %m月%d 星期%w _apply['create_time'] = timestamp_datetime( float(_apply['create_time'])) if _apply['gender'] == 'male': _apply['gender'] = u'男' else: _apply['gender'] = u'女' _json = json_encode(applies) logging.info("got _json %r", _json) self.write(JSON.dumps(rs, default=json_util.default)) self.finish()
def post(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) paragraphs = self.get_argument("paragraphs", "") logging.info("got paragraphs %r", paragraphs) random = random_x(8) logging.info("got random %r", random) session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r from cookie", session_token) # 修改文章段落内容 url = "http://" + AUTH_HOST + "/blog/articles/" + article_id + "/paragraphs" body_data = {'paragraphs':paragraphs} logging.info("put body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="PUT", body=_json, headers={"Authorization":"Bearer "+session_token}) logging.info("got token response %r", response.body) self.redirect('/blog/articles/mine?random=' + random)
def mdsShake(self, do, path, data=None): status = 0 if self.threading: http_client = HTTPClient() http_request = HTTPRequest(url=self.mdsHost, method="POST", body=json.dumps({ "code": self.mdsCode, "do": do, "path": path, "data": data })) response = http_client.fetch(http_request) over = json.loads(response.body) if "status" in over: status = over["status"] assert_result = "mds - ERROR {}\n{} on {}".format(status, do, path) assert status == 200, assert_result return over else: status = {"status": status} return json.dumps(status)
def get_session(self): fetch_url = 'https://%s/%s' % (self.ilo4_host, self.auth_url) post_data = json.dumps({"UserName": self.user_name, "Password": self.user_password}) http_client = HTTPClient() headers = HTTPHeaders() headers.add('Content-Type', 'application/json') try: response = http_client.fetch( fetch_url, method='POST', headers=headers, validate_cert=False, body=post_data) except HTTPError as error: gen_log.error('ILO4 %s session error:%s' % (self.ilo4_host,error)) raise error else: sessions = { self.SESSTION_LOCATION: response.headers['Location'], self.SESSION_X_AUTH_TOKEN: response.headers['X-Auth-Token'] } return sessions
def _fetch(self, path, **kwargs): http_client = HTTPClient() server = self.backends.server url = HTTPRequest("http://{}:{}{}".format(server.hostname, server.port, path), connect_timeout=self.timeout, request_timeout=self.timeout, **kwargs) try: out = json.loads(http_client.fetch(url).body) except HTTPError as err: if err.code == 599: raise exc.BackendConnectionError(err.message, server) try: out = json.loads(err.response.body) if "status" not in out: raise exc.BackendIntegrityError(err.message, server) except (ValueError, TypeError): raise exc.BackendIntegrityError(err.message, server) now = time.time() drift = now - out["checkpoint_time"] if self.max_drift is not None and self.max_drift > abs(drift): raise exc.BackendMaxDriftError( "Backend last checkpoint stale by {} seconds.".format(drift), server) with self._lock: new_checkpoint = Checkpoint(out["checkpoint"], out["checkpoint_time"]) old_checkpoint = self.checkpoint if not _checkpoint_is_greater(new_checkpoint, old_checkpoint) and \ not self.allow_time_travel: raise exc.TimeTravelNotAllowed( "Received checkpoint of {} when previously {}".format( new_checkpoint, old_checkpoint), server) self.checkpoint = new_checkpoint return out
def get(self, vendor_id, activity_id): logging.info("got vendor_id %r in uri", vendor_id) logging.info("got activity_id %r in uri", activity_id) product_type = self.get_argument("product_type", "all") page = self.get_argument("page", 1) logging.debug("get page=[%r] from argument", page) limit = self.get_argument("limit", 20) logging.debug("get limit=[%r] from argument", limit) access_token = self.get_access_token() params = { "filter": "item", "item_id": activity_id, "page": page, "limit": limit, "product_type": product_type } url = url_concat(API_DOMAIN + "/api/orders", params) http_client = HTTPClient() headers = {"Authorization": "Bearer " + access_token} response = http_client.fetch(url, method="GET", headers=headers) logging.info("got response.body %r", response.body) data = json_decode(response.body) rs = data['rs'] orders = rs['data'] for order in orders: # 下单时间,timestamp -> %m月%d 星期%w order['create_time'] = timestamp_datetime( float(order['create_time'])) order['booking_time'] = timestamp_datetime( float(order['booking_time'])) # 合计金额 order['amount'] = float(order['amount']) / 100 order['actual_payment'] = float(order['actual_payment']) / 100 self.write(JSON.dumps(rs, default=json_util.default)) self.finish()
def sync_fuzz(self, requests, delay=0, follow_cookies=True): ''' This is the synchronous fuzzing engine. Useful for fuzzing with delays and fuzzing that follows cookies''' self.reset() http_client = HTTPClient() cookie = None for request in requests: try: if follow_cookies and cookie: request.headers = HTTPHelper.add_header_param( request.header, "Cookie", cookie) response = http_client.fetch(request) except HTTPError as e: if e.response: response = e.response self.responses.append(response) if follow_cookies: if "Set-Cookie" in response.headers: cookie = response.headers["Set-Cookie"] if delay: sleep(delay) return self.responses
def send_request(self, method, url, data=None, data_type="json"): method = method.upper() has_payload = method == self.POST or method == self.PUT is_CUD = has_payload or method == self.DELETE full_url = urlparse.urlunparse((controller_address['scheme'], controller_address['host'] + ':' + str(controller_address['port']), url, None, None, None)) headers = { 'Content-Type': HttpClient.set_content_type(data_type) } request = HTTPRequest(url=full_url, method=method, headers=headers, auth_username=controller_auth['username'], auth_password=controller_auth['password'], connect_timeout=http_client_settings.get("timeouts", {}).get("connect", 3), request_timeout=http_client_settings.get("timeouts", {}).get("request", 10)) if has_payload: if data_type == "json": request.body = json.dumps(data) if is_CUD: if self.dry_run: logger.info("\nDRY RUN") logger.debug("\n\nSending {} request.\nUrl: {}\nBody: {}\n".format(method, full_url, request.body)) if is_CUD and self.dry_run: response = json.dumps({ "status": "ok", "msg": "dry_run" }) else: try: response = HTTPClient().fetch(request) if not self.fail_silently and not self.is_ok(response.code): raise HttpClientException(response) logger.debug("\n\nResponse ({}).\nUrl: {}\nBody: {}\n".format(response.code, full_url, response.body)) return response except HTTPError as e: logger.debug("HttpClient error: {}".format(e.message)) if not self.fail_silently: raise HttpClientException(e) return None return response
def make_order(openid, title, order_no, fee, remote_ip): # log.info('================openid===========') # log.info(openid) params = { 'appid': YOUCAI_WXPAY_CONF['appid'], 'mch_id': YOUCAI_WXPAY_CONF['mchid'], 'nonce_str': uuid.uuid4().hex, 'body': title, 'detail': '公众号扫码订单', 'out_trade_no': order_no, 'total_fee': fee, 'spbill_create_ip': remote_ip, 'notify_url': YOUCAI_WXPAY_CONF['notify'], 'trade_type': 'JSAPI', 'openid': openid } # log.info('================params===========') # log.info(params) params.update({'sign': wxpay_sign(params)}) try: xml = xmltodict.unparse({'xml': params}, full_document=False) resp = HTTPClient().fetch(YOUCAI_WXPAY_CONF['url'] + '/pay/unifiedorder', method='POST', body=xml) ret = xmltodict.parse(resp.body.decode())['xml'] pay_params = {} if ret['return_code'] == 'SUCCESS' and ret['result_code'] == 'SUCCESS': sign = ret.pop('sign') if sign == wxpay_sign(ret): pay_params = { 'appId': YOUCAI_WXPAY_CONF['appid'], 'timeStamp': round(time.time()), 'nonceStr': uuid.uuid4().hex, 'package': 'prepay_id={prepay_id}'.format(prepay_id=ret['prepay_id']), 'signType': 'MD5' } ret_sign = wxpay_sign(pay_params) pay_params.update({'paySign': ret_sign}) else: log.error(ret) return pay_params except Exception as e: log.error(e)
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ == 'TwistedIOLoop': # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop') self.server_ioloop = IOLoop() sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) server = HTTPServer(app, io_loop=self.server_ioloop) server.add_socket(sock) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): self.server_ioloop.add_callback(self.server_ioloop.stop) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return 'http://localhost:%d%s' % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url('/')) self.assertEqual(b'Hello world!', response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(assertion.exception.code, 404)
def get(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) url = "http://" + AUTH_HOST + "/blog/articles/" + article_id http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got article response %r", response.body) article = json_decode(response.body) article["publish_time"] = time_span(article["publish_time"]) if article.has_key('paragraphs'): html = markdown.markdown(article['paragraphs']) logging.info("got article paragraphs %r", html) # 为图片延迟加载准备数据 # <img alt="" src="http://bighorn.b0.upaiyun.com/blog/2016/11/2/758f7478-d406-4f2e-9566-306a963fb979" /> # <img data-original="真实图片" src="占位符图片"> ptn = "(<img alt=\"\" src=\"http[s]*://[\w\.\/\-]+\" />)" img_ptn = re.compile(ptn) imgs = img_ptn.findall(html) for img in imgs: logging.info("got img %r", img) ptn = "<img alt=\"\" src=\"(http[s]*://[\w\.\/\-]+)\" />" url_ptn = re.compile(ptn) urls = url_ptn.findall(html) url = urls[0] logging.info("got url %r", url) # lazy load #html = html.replace(img, "<img class=\"lazy\" width=\"100%\" data-original=\""+url+"\" />") # not lazy load html = html.replace( img, "<img class=\"lazy\" width=\"100%\" src=\"" + url + "\" />") logging.info("got html %r", html) article['paragraphs'] = html self.finish(JSON.dumps(article))
def run_register(): key = input('Please input node key:') key_secret = input('Please input node key_secret:') config = AgentConfig() server_base = config.get('server') if urlparse(server_base).scheme == '': if config.getint('server_https_port'): server_base = 'https://%s:%d' % ( server_base, config.getint('server_https_port')) else: server_base = 'http://%s:%d' % (server_base, config.getint('server_port')) register_url = urljoin(server_base, '/nodes/register') post_data = {'node_key': key} request = authenticated_request(url=register_url, method="POST", app_key=key, app_secret=key_secret, body=urlencode(post_data)) client = HTTPClient() try: response = client.fetch(request) response_data = json.loads(response.body) if not os.path.exists('conf'): os.makedirs('conf') with open('conf/node.conf', 'w') as f: cp = SafeConfigParser() cp.add_section('agent') cp.set('agent', 'node_id', str(response_data['id'])) cp.set('agent', 'node_key', key) cp.set('agent', 'secret_key', key_secret) cp.write(f) print('Register succeed!') except HTTPError as e: print("Error when registering.") print(e.message)
def sendItemOrderPayedToOpsMessage(access_token, wx_notify_domain, openid, order): # touser = 店小二openid # template_id = 订单支付成功 # url = 模版链接跳转地址 data = { "touser": openid, "template_id": "ewhVRnCh7bkGwOlfPXh13EDT1V1AhdoWJvr76r4BTjo", "url": wx_notify_domain + "/bf/wx/vendors/" + order['club_id'] + "/items/order/" + order['_id'] + "/result", "data": { "first": { "value": u"有用户下单并支付成功; 来自系统: " + wx_notify_domain, "color": "#173177" }, "orderMoneySum": { "value": str(float(order['actual_payment']) / 100) + "元", "color": "#173177" }, "orderProductName": { "value": order['item_name'], "color": "#173177" }, "Remark": { "value": u"下单时间: " + timestamp_datetime(order['create_time']), "color": "#173177" }, } } _json = json_encode(data) url = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token=" + access_token http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_json) logging.info("got sendItemOrderPayedToOpsMessage response %r", response.body)
def product_query(): OP_MAP = { 0: '移动', 1: '联通', 2: '电信' } http_client = HTTPClient() t = time.localtime() tsp = int(time.mktime(t)) user = '******' sign = signature(user, str(tsp), 'RtGpAPFoXiSY8BgDWz85V7GPPFJeWvoh') url = 'http://122.224.212.160:8980/common/quotation.action?cpUser={user}&time={tsp}&sign={sign}'.format( user=user, tsp=tsp, sign=sign) print(url) try: response = http_client.fetch(url, method='GET') if response.code == 200: body = response.body.decode() resp = json.loads(body) for obj in resp.get('data'): op_code = obj['opCode'] size = obj['amount'] price = obj['discountPrice'] print('%s,%s,%s' % (OP_MAP.get(op_code), size, price)) # except HTTPError as http_error: # # request_log.error('CALL UPSTREAM FAIL %s', http_error, extra={'orderid': order_id}) # except Exception as e: # # request_log.error('CALL UPSTREAM FAIL %s', e, extra={'orderid': order_id}) finally: http_client.close()
def import_data(): pg = pg_sync() es_index = os.getenv('ES_INDEX') if not es_index: raise Exception("ES_INDEX variable not set") es_host = os.getenv('ES_HOST', 'localhost') es_port = os.getenv('ES_PORT', '9200') es_type = 'courses' base_url = 'http://' + es_host + ':' + es_port + '/' http = HTTPClient() try: resp = http.fetch(base_url + es_index, method='DELETE') except HTTPError: pass resp = http.fetch(base_url + es_index, method='PUT', body='') resp.rethrow() batch = [] query = ('SELECT %s, array_agg(DISTINCT s.term) AS \"term\", ' 'array_agg(DISTINCT s.instructor1name) as \"instructor\", ' 'array_agg(DISTINCT s.callnumber) as \"callnumber\" ' 'FROM courses_v2_t c JOIN sections_v2_t s ' 'ON c.course = s.course GROUP BY c.course') % ', '.join( 'c.' + colname for colname in COURSE_COLUMNS) cursor = pg.cursor() cursor.execute(query) for row in cursor: add_bulk_item(batch, row, es_index, es_type) if len(batch) == BATCH_SIZE: submit_batch(base_url, batch) del batch[:] # empty out batch so we can add more to it # if there are any more items in the batch, submit them if len(batch) > 0: submit_batch(base_url, batch)
def send(self, data, headers): kwargs = dict(method='POST', headers=headers, body=data) kwargs["validate_cert"] = self.verify_ssl kwargs["connect_timeout"] = self.timeout kwargs["ca_certs"] = self.ca_certs # only use async if ioloop is running, otherwise it will never send if ioloop.IOLoop.initialized(): client = AsyncHTTPClient() kwargs['callback'] = None else: client = HTTPClient() client.fetch(self._url, **kwargs)
def sendStageRequest(patientId, timestamp, station): # Add area entering time body = {station: str(datetime.fromtimestamp(timestamp))} client = HTTPClient() try: headers = {'Content-Type': 'application/json; charset=UTF-8'} url = "http://" + serverIp + ":5000/api/patient/" + patientId.rstrip() print(url) from pprint import pprint pprint(body) request = HTTPRequest(url, method="PUT", body=json.dumps(body), headers=headers) response = client.fetch(request) print(response.body) client.close() except Exception as e: print("error in add stage request") print("Error: " + str(e)) return
def shutdown_all(self): """Shutdown all kernels.""" # TODO: Is it appropriate to do this? Is this notebook server the # only client of the kernel gateway? # TODO: We also have to make this sync because the NotebookApp does not # wait for async. client = HTTPClient() for kernel_id in self._kernels.keys(): kernel_url = url_path_join(KG_URL, self._kernel_id_to_url(kernel_id)) self.log.info("Request delete kernel at: %s", kernel_url) try: response = client.fetch(kernel_url, headers=KG_HEADERS, method='DELETE', validate_cert=VALIDATE_KG_CERT, auth_username=KG_HTTP_USER, auth_password=KG_HTTP_PASS) except HTTPError: pass self.log.info("Delete kernel response: %d %s", response.code, response.reason) client.close()
def inner(*args, **kwargs): global baseurl cli = HTTPClient() # Delete all old databases response = cli.fetch('%s_all_dbs' % baseurl) try: dbs = json.loads(response.body) except ValueError: print >> sys.stderr, \ "CouchDB's response was invalid JSON: %s" % db_string sys.exit(2) for database in dbs: if database.startswith('_'): # Skip special databases like _users continue cli.fetch( '%s%s' % (baseurl, database), method='DELETE', ) return func(baseurl, *args, **kwargs)
def start_lab(self, url_root, cwd): # start a lab server lab = subprocess.Popen(self.lab_args(), cwd=cwd) timeout = 10 while timeout: try: HTTPClient().fetch( "{}?token={}".format(url_root, self.token), method="GET" ) return lab except Exception: timeout -= 1 time.sleep(0.5)
def get(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) random = random_x(8) logging.info("got random %r", random) url = "http://" + AUTH_HOST + "/blog/articles/" + article_id http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) article = json_decode(response.body) # 使用 markdown 将网页内容转换为 html 格式 if article.has_key('paragraphs'): html = markdown.markdown(article['paragraphs']) article['paragraphs'] = html else: article['paragraphs'] = '' self.render('blog/paragraphs-edit.html', random=random, article=article)
def get(self, league_id): logging.info("GET %r", self.request.uri) access_token = self.get_access_token() club_id = self.get_argument("club_id", "") logging.info("got club_id %r", club_id) url = API_DOMAIN + "/api/leagues/" + league_id + "/franchises/" + club_id http_client = HTTPClient() headers = {"Authorization": "Bearer " + access_token} response = http_client.fetch(url, method="GET", headers=headers) logging.info("got response %r", response.body) data = json_decode(response.body) franchise = data['rs'] franchise['create_time'] = timestamp_datetime(franchise['create_time']) if not franchise['club'].has_key('img'): franchise['club']['img'] = '' self.render('resale/supplier.html', api_domain=API_DOMAIN, access_token=access_token, league_id=league_id, franchise=franchise)
def _request_fetch(request): #access to the target ip machine to retrieve the dict,then modify the config http_client = HTTPClient() response = None try: response = http_client.fetch(request) finally: http_client.close() return_result = False if response is None: raise CommonException('response is None!') if response.error: return_result = False message = "remote access,the key:%s,error message:%s" % ( request, response.error) logging.error(message) else: return_result = response.body.strip() return return_result
def test_handler_resource(provider: Provider, http_client: HTTPClient) -> None: class Handler: def __init__(self) -> None: self.count = 0 def __call__(self) -> str: self.count += 1 return f"Testing handler resource {self.count}\n" resource = provider.create(handler=Handler(), extension="txt") assert isinstance(resource, Resource) for i in range(1, 3): assert (http_client.fetch( resource.url).body.decode() == f"Testing handler resource {i}\n")
def post(self, app, url): backend_url = url_join(self.backend, app, url) try: response = HTTPClient().fetch( HTTPRequest(url=backend_url, method='POST', body=self.request.body)) self.set_header("Content-Type", "application/json") self.write(response.body) except HTTPError as e: self.set_status(e.code) self.write(e.message)