class SyncHTTPClientTest(unittest.TestCase): def setUp(self): self.server_ioloop = IOLoop() event = threading.Event() @gen.coroutine def init_server(): sock, self.port = bind_unused_port() app = Application([("/", HelloWorldHandler)]) self.server = HTTPServer(app) self.server.add_socket(sock) event.set() def start(): self.server_ioloop.run_sync(init_server) self.server_ioloop.start() self.server_thread = threading.Thread(target=start) self.server_thread.start() event.wait() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by several iterations because # the server may still have some cleanup work left when # the client finishes with the response (this is noticeable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). @gen.coroutine def slow_stop(): # The number of iterations is difficult to predict. Typically, # one is sufficient, although sometimes it needs more. for i in range(5): yield self.server_ioloop.stop() self.server_ioloop.add_callback(slow_stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return "http://127.0.0.1:%d%s" % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url("/")) self.assertEqual(b"Hello world!", response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url("/notfound")) self.assertEqual(assertion.exception.code, 404)
def reset_results(self, access, secret, group): for instance in group.instances: try: http = HTTPClient() http.fetch('http://' + instance.public_dns_name + ':8888/reset') except: pass
def newAuthApi(username,password): data = { 'Login.Token1':username, 'Login.Token2':password } result = {'code':200,'content':''} try: client = HTTPClient() request = HTTPRequest( url = URL, method='GET') response = client.fetch(request) img = Image.open(io.BytesIO(response.body)) cookie = response.headers['Set-Cookie'] cookieTemp = cookie.split(";") cookie = cookieTemp[0] + ";" + cookieTemp[1].split(",")[1] vercode = recognize(img) data['captcha'] = vercode request = HTTPRequest( url = LOGIN_URL, method='POST', body=urllib.urlencode(data), headers = {'Cookie':cookie}, request_timeout=8 ) response = client.fetch(request) result['content'] = response.headers['Set-Cookie'] except HTTPError as e: result['code'] = 400 except Exception,e: result['code'] = 500
def main(url,downloadto): #url = "http://www.imagefap.com/pictures/4148883/Waiting-for-Daddy?gid=4148883&view=2" client = HTTPClient() print "Gathering links from Gallery" gallery_response = client.fetch(url) gallery_pool = BeautifulSoup(gallery_response.body) gallery_links = gallery_pool.findAll("a") for gallery_link in gallery_links: if "/photo" in gallery_link["href"]: photo_page_url = "".join(["http://imagefap.com",gallery_link["href"]]) gallery_list.append(photo_page_url) print "Parsing individual pages for actual image" for link in gallery_list: photo_response = client.fetch(link) photo_pool = BeautifulSoup(photo_response.body) photo_images = photo_pool.findAll("img",src=True) for image in photo_images: if image["src"].startswith("http://fap.to"): image_src = image["src"] filename = image_src.split("/")[-1:][0] image_response = client.fetch(image_src) print "Downloading %s" % filename dest = "".join([downloadto,"/",filename]) with open(dest,"wb") as f: f.write(image_response.body)
def call(self, method, params, okay=None, fail=None): """Make an asynchronous JSON-RPC method call. """ body = tornado.escape.json_encode({ 'jsonrpc': '2.0', 'method': method, 'params': params, 'id': uuid.uuid4().hex, }); logging.info("JSON-RPC: call '%s' method on %s" % (method, self.url)) headers = HTTPHeaders({'Content-Type': 'application/json'}) request = HTTPRequest(self.url, method='POST', body=body, headers=headers, request_timeout=0) if okay is None and fail is None: client = HTTPClient() response = client.fetch(request) if response.code != 200 or not response.body: return None try: data = tornado.escape.json_decode(response.body) except ValueError: return None else: return data else: client = AsyncHTTPClient() client.fetch(request, functools.partial(self._on_response, okay, fail))
def get(self, article_id): logging.info("got article_id %r in uri", article_id) logging.info(self.request) url = "http://" + STP + "/blogs/articles/" + article_id http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got _article response %r", response.body) _article = json_decode(response.body) _timestamp = _article["timestamp"] _datetime = timestamp_datetime(_timestamp / 1000) _article["timestamp"] = _datetime try: _article['accountNickname'] except: _article['accountNickname'] = "anonymous" url = "http://" + STP + "/blogs/my-articles/" + article_id + "/paragraphs" http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got _paragraphs response %r", response.body) _paragraphs = json_decode(response.body) self.render('blog/article-edit.html', article=_article, paragraphs=_paragraphs)
def get(self): name = self.get_argument('name','') if(name == "atlas"): with open("/var/www/atlas/access.token", 'r') as f: token = f.read() f.close() token = token.rstrip('\n') # TODO: Make this asynchronous and move access.token to aswwu/databases git repo http_client = HTTPClient() try: response = http_client.fetch("https://api.instagram.com/v1/users/self/media/recent/?access_token=" + token) self.write(response.body) except Exception as e: self.write("{error: '" + str(e) + "'}") http_client.close() elif(name == "issuu"): http_client = HTTPClient() try: response = http_client.fetch("http://search.issuu.com/api/2_0/document?username=aswwucollegian&pageSize=1&responseParams=title,description&sortBy=epoch") self.write(response.body) except Exception as e: self.write("{error: '" + str(e) + "'}") http_client.close() else: self.write("Something went wrong.")
def change_status(self, data): req = HTTPClient() try: req.fetch('http://%s:%d/change_status/' % (SERVER_ADDRESS, SERVER_PORT), method='POST', body=json_dumps({'username': self.username, 'data': data})) except Exception as E: print traceback.format_exc()
class TargetHandler(RequestHandler): def initialize(self, targets): self.targets = targets self.http_client = HTTPClient() def post(self, target): url, (x, y) = self.targets[target] print("Firing on {} at {} (coords {}/{})".format(target, url, x, y)) self.http_client.fetch("{}/fire_at/{}/{}".format(url, x, y), method="POST", body="A Rocket")
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ in ('TwistedIOLoop', 'AsyncIOMainLoop'): # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. # AsyncIOMainLoop doesn't work with the default policy # (although it could with some tweaks to this test and a # policy that created loops for non-main threads). raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop or ' 'AsyncIOMainLoop') self.server_ioloop = IOLoop() sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) self.server = HTTPServer(app, io_loop=self.server_ioloop) self.server.add_socket(sock) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by one iteration because # the server may still have some cleanup work left when # the client finishes with the response (this is noticable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). self.server_ioloop.add_callback(self.server_ioloop.stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return 'http://127.0.0.1:%d%s' % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url('/')) self.assertEqual(b'Hello world!', response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(assertion.exception.code, 404)
def delete(self, *args, **kwargs): self.product_cache.clear() client = HTTPClient() url_suggest_clear = "%s/cache" % SUGGEST_URL self.logger.debug("clear suggest_cache,url=%s", url_suggest_clear) suggest_request = HTTPRequest(url_suggest_clear, method="DELETE") client.fetch(suggest_request) url_detect_clear = "%s/refresh" % DETECT_URL self.logger.debug("clear detect_cache,url=%s", url_detect_clear) detect_request = HTTPRequest(url_detect_clear, method="GET") client.fetch(detect_request) self.logger.debug("clear cache completed") self.finish()
def sina_ip(ip): attribution = "" if ip == "127.0.0.1": ip = '183.208.22.171' http_client = HTTPClient() response = None url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=js&ip={0}".format(ip) try: response = http_client.fetch(url, method='GET', request_timeout=120) except Exception as e: request_log.info(e) finally: http_client.close() if response and response.code == 200: response_body = eval(response.body.decode('utf8')[21:-1]) try: province = response_body['province'] city = response_body['city'] attribution = city #+province except Exception as e: error_log.error(e) ip_piece = ip.split(".") ip_piece[1] = '*' ip_piece[2] = '*' ip_attribution = '网友' + '.'.join(ip_piece) + '[' + attribution + ']' request_log.info(ip_attribution) return ip_attribution
def post(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) paragraphs = self.get_argument("paragraphs", "") logging.info("got paragraphs %r", paragraphs) # 使用 html2text 将网页内容转换为 Markdown 格式 h = html2text.HTML2Text() h.ignore_links = False paragraphs = h.handle(paragraphs) logging.info("got paragraphs %r", paragraphs) random = random_x(8) logging.info("got random %r", random) session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r from cookie", session_token) # 修改文章段落内容 url = "http://" + AUTH_HOST + "/blog/articles/" + article_id + "/paragraphs" body_data = {'paragraphs':paragraphs} logging.info("put body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="PUT", body=_json, headers={"Authorization":"Bearer "+session_token}) logging.info("got token response %r", response.body) self.redirect('/blog/articles/mine?random=' + random)
def getUserInfo(token, openid): url = "https://api.weixin.qq.com/sns/userinfo?access_token="+token+"&openid="+openid+"&lang=zh_CN" http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) userInfo = json_decode(response.body) return userInfo
def get_current_user(self): # return self.get_secure_cookie("session_token") session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r", session_token) expires_at = self.get_secure_cookie("expires_at") if expires_at is None or expires_at == "": expires_at = 0 refresh_token = self.get_secure_cookie("refresh_token") _timestamp = int(time.time()) if _timestamp > int(expires_at): return session_token else: url = "http://" + AUTH_HOST + "/auth/refresh-token" http_client = HTTPClient() response = http_client.fetch(url, method="GET", headers={"Authorization":"Bearer "+refresh_token}) logging.info("got refresh-token response %r", response.body) token = json_decode(response.body) expires_at = _timestamp + token['expires_in'] session_token = token['access_token'] self.set_secure_cookie("session_token", session_token) self.set_secure_cookie("expires_at", str(expires_at)) self.set_secure_cookie("refresh_token", token['refresh_token']) self.set_secure_cookie("account_id", token['account_id']) return session_token
def post_api(path): "POST command to remote API" http = HTTPClient() resp = http.fetch(get_url(path), method="POST", body='') return json_decode(resp.body)
def getData(self,url,method,data,cookie): try: client = HTTPClient() request = HTTPRequest( url, method=method, headers={ 'Cookie':cookie } ) if data and method=="GET": data = json.loads(data) url = url_concat(url,data) request.url = url elif data and method=="POST": data = json.loads(data) print data data = urllib.urlencode(data) request.body = data # print request.url response = client.fetch(request) return response.body except Exception,e: # print str(e) return None
def get_search(content): io_loop = tornado.ioloop.IOLoop.current() io_loop.start() blog_id = [] try: html = None rc = '<h2><a href="/blog/(.+?)/" target="_blank">.*' + str(content) + ".*?</a></h2>" ra = re.compile(rc, re.IGNORECASE) # url = "http://{0}/catalog".format(config()["pyworm_blog"]["url"]) url = "http://www.pyworm.com/catalog/" http_client = HTTPClient() response = None try: response = http_client.fetch(url, request_timeout=5) except Exception as e: error_log.error(e) if response and response.code == 200: html = response.body.decode('utf-8') try: blog_id = re.findall(ra, html) except Exception as e: error_log.error(e) except Exception as e: error_log.error(e) finally: io_loop.stop() return {"blog_id": blog_id}
def parser(self): retjson = {'code':200,'content':''} header = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate, sdch', 'Accept-Language': 'zh-CN,zh;q=0.8', 'Host':'jwc.seu.edu.cn', 'Connection':'keep-alive', 'Upgrade-Insecure-Requests':'1' } try: client = HTTPClient() request = HTTPRequest(JWC_URL, method='GET',headers=header,request_timeout=TIME_OUT) response = client.fetch(request) html = response.body soup = BeautifulSoup(html) items = soup.findAll('table', {'width':"100%"}) info = { '最新动态': self.abstract(items[12:17],1), '教务信息': self.abstract(items[34:34+7],0), '学籍管理': self.abstract(items[45:45+7],0), '实践教学': self.abstract(items[56:56+7],0), '合作办学': self.abstract(items[67:67+4],0), } retjson = {'code':200, 'content':info} except: retjson['code'] = 400 # print traceback.print_exc() return retjson
def post(self): logging.info(self.request) random = random_x(8) logging.info("got random %r", random) image = self.get_argument("filename", "") logging.info("got image %r", image) title = self.get_argument("article_title", "") logging.info("got article_title %r", title) desc = self.get_argument("article_desc", "") logging.info("got article_desc %r", desc) session_token = self.get_secure_cookie("session_token") logging.info("got session_token %r from cookie", session_token) url = "http://" + AUTH_HOST + "/blog/articles" body_data = {'type':'blog', 'image':image, 'title':title, 'desc':desc} logging.info("post body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_json, headers={"Authorization":"Bearer "+session_token}) logging.info("got token response %r", response.body) self.redirect('/blog/articles/mine?random=' + random)
def getData(self,url,method,data,cookie): try: client = HTTPClient() request = HTTPRequest( url, method=method, headers={ 'Cookie':cookie } ) if data and method=="GET": url = url_concat(url,data) url = url.replace("+","%20") request.url = url elif data and method=="POST": realData = {} for i in data: realData[i[0]] = i[1] data = urllib.urlencode(realData) request.body = data response = client.fetch(request) return json.loads(response.body) except Exception,e: # print str(e) #traceback.print_exc() return str(e)
def run(self): while True: try: maxid = self.db.news_list()[0]['id'] except: maxid = 1 print(maxid) client = HTTPClient() response = client.fetch('http://cs.hust.edu.cn/rss') result = response.body.decode("utf-8",errors='ignore') soup = BeautifulStoneSoup(result) items = soup.find_all('item') for item in items: title = item.title.text link = item.link.text desc = item.description.text linkid = self.link_id(link) if linkid > maxid: result = self.db.add_news(linkid,title,desc,link) if result: result = self.get_article(link) else: break time.sleep(3600)
def getCookie(cardnum,card_pwd): # print "refresh" data = { 'username':cardnum, 'password':card_pwd } try: client = HTTPClient() request = HTTPRequest( LOGIN_URL, method='POST', body=urllib.urlencode(data), validate_cert=False, request_timeout=4) response = client.fetch(request) header = response.headers if 'Ssocookie' in header.keys(): headertemp = json.loads(header['Ssocookie']) cookie = headertemp[0]['cookieName']+"="+headertemp[0]['cookieValue'] cookie += ";"+header['Set-Cookie'].split(";")[0] return True,cookie else: return False,"No cookie" except Exception,e: # print str(e) return False,str(e)
def post_context_feedback(self, context_id: str, user_id: str, application_id: str, session_id: str, product_id: str, _type: str, meta_data: dict = None): self.logger.debug( "context_id=%s,user_id=%s,application_id=%s,session_id=%s,product_id=%s," "_type=%s,meta_data=%s", context_id, user_id, application_id, session_id, product_id, _type, meta_data ) try: url = "%s/%s/feedback/?application_id=%s&session_id=%s&product_id=%s&type=%s" % ( CONTEXT_URL, context_id, application_id, session_id, product_id, _type ) url += "&user_id=%s" if user_id is not None else "" request_body = { } if meta_data is not None: request_body["meta_data"] = meta_data http_client = HTTPClient() response = http_client.fetch(HTTPRequest(url=url, body=dumps(request_body), method="POST")) http_client.close() return response.headers["_rev"] except HTTPError: self.logger.error("post_context_feedback,url=%s", url) raise
def post(self): logging.info(self.request) phone = self.get_argument("registerPhone", "") md5pwd = self.get_argument("registerPwd", "") logging.info("phone %r", phone) try: url = "http://" + AUTH_HOST + "/auth/account" body_data = {"appid":APPID, "app_secret":APP_SECRET, "login":phone, "pwd":md5pwd} logging.info("post body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_json) logging.info("got token response %r", response.body) _err_msg = _("You have already register an account, please login.") self.render('auth/login.html', err_msg=_err_msg) except: err_title = str( sys.exc_info()[0] ); err_detail = str( sys.exc_info()[1] ); logging.error("error: %r info: %r", err_title, err_detail) if err_detail == 'HTTP 409: Conflict': _err_msg = _("This phone already exist, please enter a new one.") self.render('auth/register.html', err_msg=_err_msg) return else: _err_msg = _(err_detail) self.render('auth/register.html', err_msg=_err_msg) return
def submit_batch(base_url, batch): print("Submitting a batch") http = HTTPClient() url = base_url + '_bulk' body = '\n'.join(json.dumps(doc) for doc in batch) resp = http.fetch(url, method = 'POST', body = body) resp.rethrow()
def gen_msg_token(phone): s = DBSession() code = "".join(random.sample("123456789",4)) flag = False url = "http://106.ihuyi.cn/webservice/sms.php?method=Submit&account={account}&password={password}&mobile={phone}&content={content}".format(account=account,password=password,phone=phone,content=url_escape(content.format(code=code))) h = HTTPClient() try: res = h.fetch(url,connect_timeout = 5.0) except: flag,msg = sendTemplateSMS(phone,{code},32417) if flag: update_code(phone,code) return True else: return msg h.close() root = ElementTree.fromstring(res.body.decode()) if not root[0].text == '2': # print("[VerifyMsg]Send error:",root[0].text,root[1].text) #如果发送失败,则改用云通讯发送 flag,msg = sendTemplateSMS(phone,{code},32417) if flag: update_code(phone,code) return True else: return msg else: update_code(phone,code) return True
class HTTPHelper(object): """HTTP请求 使用tornado.httpclient库. """ def __init__(self, url, method="POST", body=None): self._url = url self._method = method self._body = body or dict() self.client = HTTPClient() self._response = None def fetch(self, url=None, validate_cert=False, **kwargs): url = url or self._url body = self._method == "POST" and urllib.urlencode(self._body) or None self._request = HTTPRequest( url=url, method=self._method, body=body, validate_cert=validate_cert, **kwargs) self._response = self.client.fetch(request=self._request) return self._response @property def body(self): return self._response.body @property def response(self): return self._response
def authApi(username,password): data = { 'username':username, 'password':password } result = {'code':200,'content':''} try: client = HTTPClient() request = HTTPRequest( "https://mobile4.seu.edu.cn/_ids_mobile/login18_9", method='POST', body=urllib.urlencode(data), validate_cert=False, request_timeout=TIME_OUT) response = client.fetch(request) header = response.headers if 'Ssocookie' in header.keys(): headertemp = json.loads(header['Ssocookie']) cookie = headertemp[0]['cookieName']+"="+headertemp[0]['cookieValue'] cookie += ";"+header['Set-Cookie'].split(";")[0] result['content'] = cookie else: result['code'] = 400 except HTTPError as e: result['code'] = 400 except Exception,e: result['code'] = 500
def getAccessToken(appId, appSecret, code): url = "https://api.weixin.qq.com/sns/oauth2/access_token?appid="+appId+"&secret="+appSecret+"&code="+code+"&grant_type=authorization_code" http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) accessToken = json_decode(response.body) return accessToken
def newAuthApi(username, password): data = {'Login.Token1': username, 'Login.Token2': password} result = {'code': 200, 'content': ''} try: client = HTTPClient() request = HTTPRequest(url=URL, method='GET') response = client.fetch(request) img = Image.open(io.BytesIO(response.body)) cookie = response.headers['Set-Cookie'] cookieTemp = cookie.split(";") cookie = cookieTemp[0] + ";" + cookieTemp[1].split(",")[1] vercode = recognize(img) data['captcha'] = vercode request = HTTPRequest(url=LOGIN_URL, method='POST', body=urllib.urlencode(data), headers={'Cookie': cookie}, request_timeout=8) response = client.fetch(request) result['content'] = response.headers['Set-Cookie'] except HTTPError as e: result['code'] = 400 except Exception, e: result['code'] = 500
def get(self, *args, **kwargs): # 获取查询参数中的url(下载资源的网址) url = self.get_query_argument('url') filename = self.get_query_argument('filename', default='index.html') # 发起同步请求 client = HTTPClient() # validate_cert 是否验证SSL证书安全连接 response: HTTPResponse = client.fetch(url, validate_cert=False) print(response.body) # 保存到static/downloads from app import BASE_DIR dir = os.path.join(BASE_DIR, 'static\downloads') with open(os.path.join(dir, filename), 'wb') as f: f.write(response.body) self.write('下载成功')
def getUnifiedOrder(remote_ip, wx_app_id, store_id, product_description, wx_notify_domain, wx_mch_id, wx_mch_key, openid, order_id, actual_payment, timestamp): key = wx_mch_key nonceA = getNonceStr() logging.info("got nonceA %r", nonceA) total_fee = str(actual_payment) logging.info("got total_fee %r", total_fee) notify_url = wx_notify_domain + '/bf/wx/orders/notify' logging.info("got notify_url %r", notify_url) signA = getOrderSign(remote_ip, notify_url, wx_app_id, wx_mch_id, nonceA, openid, key, store_id, order_id, product_description, total_fee) logging.info("got signA %r", signA) _xml = '<xml>' \ + '<appid>' + wx_app_id + '</appid>' \ + '<attach>' + store_id + '</attach>' \ + '<body>' + product_description + '</body>' \ + '<mch_id>' + wx_mch_id + '</mch_id>' \ + '<nonce_str>' + nonceA + '</nonce_str>' \ + '<notify_url>' + notify_url + '</notify_url>' \ + '<openid>' + openid + '</openid>' \ + '<out_trade_no>' + order_id + '</out_trade_no>' \ + '<spbill_create_ip>' + remote_ip + '</spbill_create_ip>' \ + '<total_fee>' + str(actual_payment) + '</total_fee>' \ + '<trade_type>JSAPI</trade_type>' \ + '<sign>' + signA + '</sign>' \ + '</xml>' url = "https://api.mch.weixin.qq.com/pay/unifiedorder" http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_xml) logging.info("got response %r", response.body) order_return = parseWxOrderReturn(response.body) if not order_return.has_key('prepay_id'): order_return['prepay_id'] = "" logging.info("got prepayId %r", order_return['prepay_id']) if not order_return.has_key('nonce_str'): order_return['nonce_str'] = '' signB = getPaySign(timestamp, wx_app_id, order_return['nonce_str'], order_return['prepay_id'], key) logging.info("got signB %r", signB) order_return['pay_sign'] = signB order_return['timestamp'] = timestamp order_return['app_id'] = wx_app_id return order_return
def post(self): logging.info(self.request) phone = self.get_argument("lostPhone", "") verify_code = self.get_argument("lostVerifyCode", "") md5pwd = self.get_argument("lostPwd", "") logging.info("phone %r", phone) logging.info("verify_code %r", verify_code) try: url = "http://" + AUTH_HOST + "/auth/pwd" body_data = { "appid": APPID, "app_secret": APP_SECRET, "login": phone, "verify_code": verify_code, "pwd": md5pwd } logging.info("post body %r", body_data) _json = json_encode(body_data) http_client = HTTPClient() response = http_client.fetch(url, method="POST", body=_json) logging.info("got lost-pwd response %r", response.body) _err_msg = _("Password already updated, please login.") self.render("auth/login.html", err_msg=_err_msg) except: err_title = str(sys.exc_info()[0]) err_detail = str(sys.exc_info()[1]) logging.error("error: %r info: %r", err_title, err_detail) if err_detail == 'HTTP 404: Not Found': _err_msg = _( "This phone not exist in system, please register first.") self.render('auth/lost-pwd.html', err_msg=_err_msg) return elif err_detail == 'HTTP 401: Unauthorized': _err_msg = _( "This verify code not pair for phone, please retype it.") self.render('auth/lost-pwd.html', err_msg=_err_msg) return elif err_detail == 'HTTP 408: Request Timeout': _err_msg = _( "This verify code is timeout, please request new one.") self.render('auth/lost-pwd.html', err_msg=_err_msg) return else: _err_msg = _(err_detail) self.render('auth/lost-pwd.html', err_msg=_err_msg) return
def get_with_retries_std(self, url, results, index, as_file=False): retries = 0 result = None while retries <= 3: try: request = self.simple_request(url) http_client = HTTPClient() response = http_client.fetch(request) result = Result(response, as_file) break except HTTPError as e: retries += 1 msg = "HTTP Exception get_with_retries_std:{0}".format(e) self.printf("{0} {1}".format(e.code, msg)) try: self.printf("TrackingId:{0}".format( e.response.headers.get("Trackingid"))) except Exception as te: self.printf("No TrackingId.") try: try: msg = json.loads(e.response.body.decode('utf8')) except Exception as ex: msg = e.response.body.decode('utf8') except Exception as exx: pass #probably a 599 timeout if e.code in [400, 409, 429] or e.code >= 500: if e.code == 429: retry_after = None try: retry_after = e.response.headers.get("Retry-After") except Exception as e: pass if retry_after == None: retry_after = 30 else: retry_after = 10 msg = "{0} hit, waiting for {1} seconds and then retrying...".format( e.code, retry_after) self.printf(msg) time.sleep(int(retry_after)) else: print("Not handling HTTPError:{0}. url:{1}".format( e, url)) results[index] = [e, False] return results[index] = [result, True] return
def init_select_data(self, cookie): try: client = HTTPClient() request = HTTPRequest(SELECTURL, method="GET", headers=header, request_timeout=TIME_OUT) response = client.fetch(request) content = BeautifulSoup(response.body) viewstate_value = content.find('input', {'id': '__VIEWSTATE'}) EVENTVALIDATION = content.find('input', {'id': '__EVENTVALIDATION'}) selectData['__VIEWSTATE'] = viewstate_value['value'] selectData['__EVENTVALIDATION'] = EVENTVALIDATION['value'] except Exception, e: pass
def handle_online(remote_http): if not g_Online_Server.get(remote_http): url = '%s/hello/' % remote_http req = HTTPRequest(url=url, method='GET', connect_timeout=2, request_timeout=2) http_client = HTTPClient() rsp = http_client.fetch(req) if rsp.code == 200: g_Online_Server[remote_http] = time.time() g_Online_Server_deque.append(remote_http) else: g_Online_Server[remote_http] = time.time() handle_expire()
def put(self, url, body): ''' Perform a PUT request. ''' if hasattr(self, "root_url") and self.root_url: url = self.root_url + url request = HTTPRequest(url, method="PUT", body=body, validate_cert=False) if hasattr(self, "cookie") and self.cookie: request.headers["Cookie"] = self.cookie return HTTPClient.fetch(self, request)
def get(self, article_id): logging.info(self.request) logging.info("got article_id %r from uri", article_id) random = random_x(8) logging.info("got random %r", random) url = "http://"+AUTH_HOST+"/blog/articles/"+article_id http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got response %r", response.body) article = json_decode(response.body) self.render('blog/paragraphs-append.html', random=random, article=article)
def sync_fetch(request, method, default_headers=None, httpclient=None, **kwargs): """ fetch resource using the synchronous HTTPClient :param request: HTTPRequest object or a url :param method: HTTP method in string format, e.g. GET, POST :param kwargs: query string entities or POST data """ updated_request = make_request(request, method, default_headers, **kwargs) if not httpclient: httpclient = HTTPClient() rsp = httpclient.fetch(updated_request) return parse_response(rsp)
def get_combined_services(): """ Merge list of services from Monit and ServiceManager. Returns: A dictionary mapping service name to service state. """ http_client = HTTPClient() status_url = '{}/_status?format=xml'.format(MonitOperator.LOCATION) response = http_client.fetch(status_url) servers = parse_entries(response.body) servers.update({ '-'.join([server.type, str(server.port)]): server.state for server in ServiceManager.get_state() }) return servers
def clean_cache(): """ 调用nebula web清理缓存API """ logger.info("开始清除web API缓存") auth_code = get_auth_code() client = HTTPClient() url = 'http://{}:{}/platform/stats/clean_cache?auth={}&url=/platform/stats/offline_serial&method=GET'.format( global_settings.WebUI_Address, global_settings.WebUI_Port, auth_code) res = client.fetch(url, method='GET') body = json.loads(res.body) if body.get('status') == 0: logger.info('清除web API缓存完成') else: logger.error('清除web API缓存失败')
def shutdown_all(self): """Shutdown all kernels.""" # Note: We have to make this sync because the NotebookApp does not wait for async. kwargs = {'method': 'DELETE'} kwargs = load_connection_args(**kwargs) client = HTTPClient() for kernel_id in self._kernels.keys(): kernel_url = url_path_join(KG_URL, self._kernel_id_to_url(kernel_id)) self.log.debug("Request delete kernel at: %s", kernel_url) try: response = client.fetch(kernel_url, **kwargs) except HTTPError: pass self.log.debug("Delete kernel response: %d %s", response.code, response.reason) client.close()
def sync_request(url, token, body, method): hdr = __gen_header(method, token) if body is None: str_body = None else: str_body = json.dumps(body) req = HTTPRequest(url=url, method=method, headers=hdr, body=str_body, connect_timeout=200, request_timeout=600, validate_cert=False) cli = HTTPClient() rep = cli.fetch(req) return json.loads(rep.body)
def do_post_with_cert(url, params={}, headers={}, client_key=None, client_cert=None): body = params if isinstance(params, str) else urllib.urlencode(params) http_request = HTTPRequest(url, 'POST', body=body, headers=headers, validate_cert=False, client_key=client_key, client_cert=client_cert) http_client = HTTPClient() fetch_result = http_client.fetch(http_request) return fetch_result.body
def signal(host, port, task_type, post_data): """ sync version of signal, which should used in thread """ #logging.info("SIGNAL with - type:%s, data:%s." % (task_type, str(post_data))) http_client = HTTPClient() _r = None try: _r = http_client.fetch( _request(host, port, task_type, post_data, request_timeout=1)) except: traceback.print_exc() return None return _r
def get(self, club_id, order_id): logging.info("GET %r", self.request.uri) access_token = DEFAULT_USER_ID order = self.get_order_index(order_id) order['create_time'] = timestamp_datetime(order['create_time']) order['amount'] = float(order['amount']) / 100 order['actual_payment'] = float(order['actual_payment']) / 100 if order['pay_status'] == 30: order['pay_status'] = u"支付成功" elif order['pay_status'] == 31: order['pay_status'] = u"支付失败" elif order['pay_status'] == 21: order['pay_status'] = u"下单失败" elif order['pay_status'] == 20: order['pay_status'] = u"未支付" if order['_status'] == 0: order['_status'] = u"未填报" if order['_status'] == 50: order['_status'] = u"填报成功" activity = self.get_activity(order['item_id']) params = {"filter":"order", "order_id":order_id, "page":1, "limit":20} url = url_concat(API_DOMAIN + "/api/applies", params) http_client = HTTPClient() headers = {"Authorization":"Bearer " + access_token} response = http_client.fetch(url, method="GET", headers=headers) logging.info("got response.body %r", response.body) data = json_decode(response.body) rs = data['rs'] applies = rs['data'] for _apply in applies: # 下单时间,timestamp -> %m月%d 星期%w _apply['create_time'] = timestamp_datetime(float(_apply['create_time'])) if _apply['gender'] == 'male': _apply['gender'] = u'男' else: _apply['gender'] = u'女' self.render('order/order.html', activity=activity, applies=applies, order=order)
def send_profile_to_contacts(self): ''' External methods to not send too much times the changed profile. A timer is set to wait for other modifications before running this function that sends modification requests to every contacts. ''' client = HTTPClient() self.sending_data = False user = UserManager.getUser() jsonbody = user.toJson() activity = Activity(authorKey=user.key, author=user.name, verb="modifies", docType="profile", method="PUT", docId="none", isMine=True) activity.save() for contact in ContactManager.getTrustedContacts(): try: request = HTTPRequest("%scontacts/update-profile/" % contact.url, method="PUT", body=jsonbody, validate_cert=False) response = client.fetch(request) if response.error: logger.error(""" Profile sending to a contact failed, error infos are stored inside activity. """) activity.add_error(contact) activity.save() except: logger.error(""" Profile sending to a contact failed, error infos are stored inside activity. """) activity.add_error(contact) activity.save() logger.info("Profile update sent to all contacts.")
def message_post(mobile, content): result = False resultStr = "未知的内部错误" # print("准备发送 mobile = {mobile} data_plan={data_plan} expiry_time={expiry_time}".format(mobile=mobile, data_plan=data_plan,expiry_time=expiry_time)) http_client = HTTPClient() try: body_template = "UserName={username}&UserPass={userpass}&Subid={subid}&Mobile={mobile}&Content={content}" body = body_template.format(username=config['username'], userpass=config['password'], subid="", mobile=mobile, content=content) response = http_client.fetch( "http://114.215.130.61:8082/SendMT/SendMessage", method="POST", body=body) if response.code == 200: responselist = response.body.decode().strip().split(',') result_code = responselist[0] if result_code == success_code: result = True resultStr = "发送成功 mobile = {mobile} id = {messageid}".format( mobile=mobile, messageid=responselist[1]) print(resultStr) else: result = False if result_code in return_values: resultStr = return_values[result_code] else: resultStr += ("(" + result_code + ")") print("发送失败 mobile = {mobile} errcode={code} errstr={errstr}". format(mobile=mobile, code=result_code, errstr=resultStr)) else: resultStr += (",http返回码({0})".format(response.code)) print("Send message get a unknown http response code = {0}".format( response.code)) except Exception as e: resultStr += " 出现了异常" print("Send message catch a exception:", e) finally: http_client.close()
def pre_spawn_start(self, user, spawner): # No sense spawning anything if the access/refresh tokens aren't provided if self.access_token == '' and self.refresh_token == '': raise web.HTTPError( 400, "The spawner can't load the necessary access_token and refresh_token parameters...try logging out of JupyterHub, then logging back in. If the error persists, inform the ODR group about it." ) # Set additional environment variables for the soon-to-be-spawned notebook oauth_session_token = secrets.token_hex(32) spawner.env.update({ 'ODR_BASEURL': self.odr_baseurl, 'OAUTH_SESSION_TOKEN': oauth_session_token, 'OAUTH_MANAGER_PORT': self.manager_port, }) # Build an API request so the OAuth_manager can store the user's data params = dict( api_auth_token=self.manager_token, access_token=self.access_token, refresh_token=self.refresh_token, username=user.name, user_session_token=oauth_session_token, ) params = json.dumps(params) req = HTTPRequest( 'http://127.0.0.1:' + self.manager_port + '/services/odr_oauth_manager/create_user', method="POST", body=params, ) # POST this data to the OAuth_manager http_client = HTTPClient() resp = http_client.fetch(req) # Also, copy a readme and a utility python file into the user's directory shutil.copyfile('/root/odr_env.py', '/home/' + user.name + '/odr_env.py') shutil.copystat('/root/odr_env.py', '/home/' + user.name + '/odr_env.py') shutil.copyfile('/root/odr_env_readme.md', '/home/' + user.name + '/odr_env_readme.md') shutil.copystat('/root/odr_env_readme.md', '/home/' + user.name + '/odr_env_readme.md')
def post(): token = otp.get_totp('UWFR72OSH6B4CZCE') body = urllib.parse.urlencode({ 'user_id': 100001, 'operator': 'wangtao', 'value': 1, 'notes': 'хКа', 'token': token }) http_client = HTTPClient() response = http_client.fetch('http://localhost:8899/admin/book', method='POST', body=body) print('OUTPUT=' + response.body.decode('utf8')) http_client.close()
def cull_idle(url, api_token, timeout, async): """cull idle single-user servers""" auth_header = { 'Authorization': 'token %s' % api_token } req = HTTPRequest(url=url + '/api/users', headers=auth_header, ) now = datetime.datetime.utcnow() cull_limit = now - datetime.timedelta(seconds=timeout) client = AsyncHTTPClient() resp = yield client.fetch(req) users = json.loads(resp.body.decode('utf8', 'replace')) blocking_client = HTTPClient() futures = [] req_type = {True:"non-blocking", False:"blocking"}[async] for user in users: last_activity = parse_date(user['last_activity']) if user['server'] and last_activity < cull_limit: name = user['name'] app_log.info("Request %s cull for %s (inactive since %s)", req_type, name, last_activity) req = HTTPRequest(url=url + '/api/users/%s/server' % name, method='DELETE', headers=auth_header, ) if async: futures.append((name, client.fetch(req))) else: try: resp = blocking_client.fetch(req) except Exception as e: app_log.info("Could not cull %s", name) app_log.info(str(e)) else: app_log.info("Culled %s", name) else: app_log.debug("Not culling %s (active since %s)", user['name'], last_activity) for (name, f) in futures: try: yield f except Exception as e: app_log.info("Could not cull %s", name) app_log.info(str(e)) else: app_log.info("Culled %s", name)
def get(self): #获取参数的url(下载资源的路径) url = self.get_query_argument("url") filename = self.get_query_argument('filename','index.html') #发起同步请求 client = HTTPClient() resp: HTTPResponse = client.fetch(url,validate_cert=False) # print(resp.body) #保存到static目录下static/downloads from app import BASE_DIR,os dir = os.path.join(BASE_DIR,"static/downloads") with open(os.path.join(dir,filename),"wb") as f: f.write(resp.body) self.write("下载成功!!")
def get_points_log(self, _account_id, activity_id, _filter): headers = {"Authorization": "Bearer " + DEFAULT_USER_ID} params = { "filter": _filter, "account_id": _account_id, "item_id": activity_id, "page": 1, "limit": 1 } url = url_concat(API_DOMAIN + "/api/points", params) http_client = HTTPClient() response = http_client.fetch(url, method="GET", headers=headers) logging.info("got points_log response.body=[%r]", response.body) data = json_decode(response.body) rs = data['rs'] return rs['data']
def post(self, league_id): logging.info("GET league_id %r", league_id) access_token = self.get_access_token() logging.info("GET access_token %r", access_token) name = self.get_argument("reg_name", "") phone = self.get_argument("reg_phone", "") email = self.get_argument("reg_email", "") city = self.get_argument("reg_city", "") intro = self.get_argument("reg_intro", "") url = API_DOMAIN + "/api/leagues/" + league_id + "/franchises" http_client = HTTPClient() headers = {"Authorization": "Bearer " + access_token} data = { "name": name, "phone": phone, "email": email, "franchise_type": "分销商", "province": city, "city": city, "img": "http://tripc2c-club-title.b0.upaiyun.com/default/banner4.png", "introduction": intro } _json = json_encode(data) logging.info("request %r body %r", url, _json) response = http_client.fetch(url, method="POST", headers=headers, body=_json) logging.info("got response %r", response.body) # 同意申请 # url = API_DOMAIN+"/api/leagues/"+ league_id +"/franchises"+dis_id # http_client = HTTPClient() # headers={"Authorization":"Bearer "+access_token} # data = {"action": "accept"} # _json = json_encode(data) # logging.info("request %r body %r", url, _json) # response = http_client.fetch(url, method="PUT", headers=headers, body=_json) # logging.info("got response %r", response.body) err_msg = "注册成功!" self.render('resale/register-success.html', err_msg=err_msg)
def post(self): logging.info(self.request) session_ticket = self.get_session_ticket() url = "http://" + AUTH_HOST + "/auth/token" http_client = HTTPClient() response = http_client.fetch(url, method="DELETE", headers={ "Authorization": "Bearer " + session_ticket['access_token'] }) logging.info("got logout response %r", response.body) self.redirect("/login")
def get(self, vendor_id): logging.info("got vendor_id %r in uri", vendor_id) _tab = self.get_argument("tab", "") logging.info("got _tab %r", _tab) access_token = self.get_access_token() #_account_id = "728cce49388f423c9c464c4a97cc0a1a" account_id = self.get_secure_cookie("account_id") logging.info("got account_id=[%r] from cookie", account_id) params = {"filter":"account", "account_id":account_id, "page":1, "limit":20} url = url_concat(API_DOMAIN + "/api/orders", params) http_client = HTTPClient() headers = {"Authorization":"Bearer " + access_token} response = http_client.fetch(url, method="GET", headers=headers) logging.info("got response.body %r", response.body) data = json_decode(response.body) rs = data['rs'] orders = rs['data'] for order in orders: # 下单时间,timestamp -> %m月%d 星期%w order['create_time'] = timestamp_datetime(float(order['create_time'])) # 合计金额 order['amount'] = float(order['amount']) / 100 order['actual_payment'] = float(order['actual_payment']) / 100 # # before = time.time() # orders = order_dao.order_dao().query_pagination_by_account(account_id, before, PAGE_SIZE_LIMIT) # for order in orders: # activity = activity_dao.activity_dao().query(order['activity_id']) # order['activity_title'] = activity['title'] # logging.info("got activity_title %r", order['activity_title']) # order['activity_bk_img_url'] = activity['bk_img_url'] # order['create_time'] = timestamp_datetime(order['create_time']) # # 价格转换成元 # order['total_amount'] = float(order['total_amount']) / 100 # for base_fee in order['base_fees']: # # 价格转换成元 # order['activity_amount'] = float(base_fee['fee']) / 100 self.render('wx/my-orders.html', vendor_id=vendor_id, orders=orders, tab=int(_tab))
def do_post(url, args): http_client = HTTPClient() print 'Post request: %s' % url args = json.dumps(args) print 'Post arguments: %s' % args request = HTTPRequest(url=url, method='POST', body=args, headers={ "Content-Type": "application/json", "Content-Length": len(args) }) response = http_client.fetch(request) result = json.loads(response.body) do_print(result)
def find_length(owtf, http_helper, lsig, url, method, detection_struct, ch, headers, body=None): """This function finds the length of the fuzzing placeholder""" size = 8192 minv = 0 http_client = HTTPClient() new_url = url new_body = body new_headers = headers payload = "" for loop in range(0, 15): # used to avoid potential deadloops payload = size * ch if lsig in url: new_url = url.replace(lsig, payload) elif body is not None and lsig in body: new_body = body.replace(lsig, payload) elif headers is not None and lsig in str(headers): raw_val = str(headers) raw_val = raw_val.replace(lsig, payload) new_headers = ast.literal_eval(str(raw_val)) else: Error(owtf, "Length signature not found!") request = http_helper.create_http_request(method, new_url, new_body, new_headers) try: response = http_client.fetch(request) except HTTPError as e: if e.response: response = e.response for struct in detection_struct: if struct["method"](response, struct["arguments"]): http_client.close() return binary_search(http_helper, lsig, minv, size, url, method, detection_struct, ch, headers, body) minv = size size *= 2
def register_volume(self, volume): # globally register volume global volumes volumes[volume.token] = volume # globally register kernel client for this volume in the Jupyter server cf = url_escape(find_connection_file()) http_client = HTTPClient() try: response = http_client.fetch(self.get_server_url() + '/register_token/' + volume.token.decode('utf8') + '/' + cf) except Exception as e: raise RuntimeError("could not register token: " + str(e)) http_client.close()