class SyncHTTPClientTest(unittest.TestCase): def setUp(self): self.server_ioloop = IOLoop() event = threading.Event() @gen.coroutine def init_server(): sock, self.port = bind_unused_port() app = Application([("/", HelloWorldHandler)]) self.server = HTTPServer(app) self.server.add_socket(sock) event.set() def start(): self.server_ioloop.run_sync(init_server) self.server_ioloop.start() self.server_thread = threading.Thread(target=start) self.server_thread.start() event.wait() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by several iterations because # the server may still have some cleanup work left when # the client finishes with the response (this is noticeable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). @gen.coroutine def slow_stop(): # The number of iterations is difficult to predict. Typically, # one is sufficient, although sometimes it needs more. for i in range(5): yield self.server_ioloop.stop() self.server_ioloop.add_callback(slow_stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return "http://127.0.0.1:%d%s" % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url("/")) self.assertEqual(b"Hello world!", response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url("/notfound")) self.assertEqual(assertion.exception.code, 404)
class PrometheusEndpoint(MetricsContext): def setUp(self): super().setUp() self.http_client = HTTPClient() def tearDown(self): super().tearDown() self.http_client.close() def test_should_present_metrics(self): self.context.metrics.incr('test.counter') self.context.metrics.incr('test.counter', 5) self.context.metrics.timing('test.timer', 150) self.context.metrics.timing('test.timer', 350) self.context.metrics.incr('response.status.200', 1) response = self.http_client.fetch('http://localhost:8001') expect(response.body).Not.to_be_null() body = str(response.body) expect(body).to_include('thumbor_test_counter_total 6') expect(body).to_include('thumbor_test_timer_count 2') expect(body).to_include('thumbor_test_timer_sum 500') expect(body).to_include( 'thumbor_response_status_total{statuscode="200"} 1')
def get(self): name = self.get_argument('name','') if(name == "atlas"): with open("/var/www/atlas/access.token", 'r') as f: token = f.read() f.close() token = token.rstrip('\n') # TODO: Make this asynchronous and move access.token to aswwu/databases git repo http_client = HTTPClient() try: response = http_client.fetch("https://api.instagram.com/v1/users/self/media/recent/?access_token=" + token) self.write(response.body) except Exception as e: self.write("{error: '" + str(e) + "'}") http_client.close() elif(name == "issuu"): http_client = HTTPClient() try: response = http_client.fetch("http://search.issuu.com/api/2_0/document?username=aswwucollegian&pageSize=1&responseParams=title,description&sortBy=epoch") self.write(response.body) except Exception as e: self.write("{error: '" + str(e) + "'}") http_client.close() else: self.write("Something went wrong.")
def product_query(): key = '' http_client = HTTPClient() t = time.localtime() tsp = int(time.mktime(t)) x = '' + str(tsp) + key userkey = signature(x) print(tsp, userkey, x) url = 'http://cz.umeol.com:6090/dm/v/cz/getdp.do?spid=3&did=1687×tamp={tsp}&userkey={userkey}'.format( tsp=tsp, userkey=userkey) print(url) try: response = http_client.fetch(url, method='GET') print(response.code, response.body.decode()) # except HTTPError as http_error: # # request_log.error('CALL UPSTREAM FAIL %s', http_error, extra={'orderid': order_id}) # except Exception as e: # # request_log.error('CALL UPSTREAM FAIL %s', e, extra={'orderid': order_id}) finally: http_client.close()
def post_context(self, user_id: str, application_id: str, session_id: str, locale: str) -> dict: self.logger.debug( "user_id=%s,application_id=%s,session_id=%s,locale=%s", user_id, application_id, session_id, locale) try: request_body = {} # # this now goes at message level # # if detection_response is not None: # # request_body["detection_response"] = detection_response url = "%s?session_id=%s&application_id=%s&locale=%s" % ( CONTEXT_URL, session_id, application_id, locale) url += "&user_id=%s" % user_id if user_id is not None else "" http_client = HTTPClient() response = http_client.fetch( HTTPRequest(url=url, body=json_encode(request_body), method="POST")) http_client.close() return response.headers["_id"], response.headers["_rev"] except HTTPError as e: raise
def post_context_feedback(self, context_id: str, user_id: str, application_id: str, session_id: str, product_id: str, _type: str, meta_data: dict = None): self.logger.debug( "context_id=%s,user_id=%s,application_id=%s,session_id=%s,product_id=%s," "_type=%s,meta_data=%s", context_id, user_id, application_id, session_id, product_id, _type, meta_data) try: url = "%s/%s/feedback/?application_id=%s&session_id=%s&product_id=%s&type=%s" % ( CONTEXT_URL, context_id, application_id, session_id, product_id, _type) url += "&user_id=%s" if user_id is not None else "" request_body = {} if meta_data is not None: request_body["meta_data"] = meta_data http_client = HTTPClient() response = http_client.fetch( HTTPRequest(url=url, body=dumps(request_body), method="POST")) http_client.close() return response.headers["_rev"] except HTTPError: self.logger.error("post_context_feedback,url=%s", url) raise
def typeset(latex, id_base): if latex.startswith('$') and latex.endswith('$'): latex = latex[1:-1] mathjax = json.dumps({ 'format': 'TeX', 'math': latex, 'svg': True, ## 'ex': 6, ### ????? 'width': 10000, 'linebreaks': False, }) headers = {'Content-Type': 'application/json'} request = HTTPRequest('http://localhost:8003/', method='POST', headers=headers, body=mathjax) http_client = HTTPClient() try: response = http_client.fetch(request) svg = clean_svg(response.body, id_base) except HTTPError as e: # HTTPError is raised for non-200 responses; the response # can be found in e.response. svg = '<text>ERROR 1</text>' except IOError as e: # Other errors are possible, such as IOError. svg = '<text>ERROR 2</text>' http_client.close() return svg
def run_query(http_client, path, num, seed, query_type, query_num, person_ids=[], message_ids=[]): response_recv = 0 response_time = 0 has_error = None print("- {} {}:".format( IS_NAME if query_type == "is" else II_NAME if query_type == "ii" else ID_NAME if query_type == "id" else IC_NAME if query_type == "ic" else BI_NAME, query_num)) urls = [] urls = get_endpoints(path, query_type, query_num) # By rwang: is、ii、id queries process different 100 urls(diff parameters) # By rwang: ic & id queries process the same url(same parameters) $num times if (query_type == "ic" or query_type == "bi") and len(urls) == 1: url = urls[0] urls = [] for i in range(num): urls.append(url) request_sent = len(urls) try: for i in range(len(urls)): http_client = HTTPClient() response = http_client.fetch(urls[i], method="GET", connect_timeout=3600, request_timeout=3600) t, has_error = handle_response(response) time.sleep(3) http_client.close() if not has_error: # delete the first time result print("-- Each Response Time: {}, {} sec\n".format(i, t)) #By rwang: response time exclude the first processing time if i != 0: response_time += t response_recv += 1 else: break if not has_error: print("\n-- # {}: {}".format("Seeds" if not seed else "Iterations", request_sent)) if response_recv > 0: print("-- Average Response Time: {} sec\n".format( round((response_time / response_recv), 10))) except HTTPClientError as e: print("-- Bad Response: HTTP {} {}".format(e.response.code, e.response.reason)) except Exception as e: print("-- Unexpected Error:\n{}".format(repr(e)))
def query_balance(): http_client = HTTPClient() url = 'http://111.20.150.45:7001/FBSY/FBSYQueryBalance.do' t = time.time() tsp = time.strftime("%Y%m%d%H%M%S", time.localtime(t)) sign = signature('100006', tsp, 'q59ka440') body = '&'.join( ['channelId=' + '100006', 'timeStamp=' + tsp, 'sign=' + sign.lower()]) try: response = http_client.fetch(url, method='POST', headers=h, body=body, request_timeout=120) resp = json.loads(response.body.decode()) print(resp) b = int(resp.get('result').get('balance')) b = '%.02f' % (b / 100) print('BALANCE=%s' % b) finally: http_client.close()
def sina_ip(ip): attribution = "" if ip == "127.0.0.1": ip = '183.208.22.171' http_client = HTTPClient() response = None url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=js&ip={0}".format(ip) try: response = http_client.fetch(url, method='GET', request_timeout=120) except Exception as e: request_log.info(e) finally: http_client.close() if response and response.code == 200: response_body = eval(response.body.decode('utf8')[21:-1]) try: province = response_body['province'] city = response_body['city'] attribution = city #+province except Exception as e: error_log.error(e) ip_piece = ip.split(".") ip_piece[1] = '*' ip_piece[2] = '*' ip_attribution = '网友' + '.'.join(ip_piece) + '[' + attribution + ']' request_log.info(ip_attribution) return ip_attribution
def sina_ip(ip): attribution = "" if ip == "127.0.0.1": ip = '183.208.22.171' http_client = HTTPClient() response = None url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=js&ip={0}".format( ip) try: response = http_client.fetch(url, method='GET', request_timeout=120) except Exception as e: request_log.info(e) finally: http_client.close() if response and response.code == 200: response_body = eval(response.body.decode('utf8')[21:-1]) try: province = response_body['province'] city = response_body['city'] attribution = city # +province except Exception as e: error_log.error(e) ip_piece = ip.split(".") ip_piece[1] = '*' ip_piece[2] = '*' ip_attribution = '网友' + '.'.join(ip_piece) + '[' + attribution + ']' return ip_attribution
def connect(self, event): service_addr, broad_cast_port = event.data[0] service_port = event.data[1] shouldrequestConnect = event.data[2] context = Context() context = self.set_dependencies(context, config) reciever_info = { 'service_name': context.references.config[0].service_name, 'host_address': service_addr, 'port': service_port, 'service_type': context.references.config[0].service_type } url = self.get_service_address_from_request(reciever_info) url = url + '?authentication=' + self.module.cluster_authentication if (shouldrequestConnect): url = url + '&port=' + str(self.module.port) http_client = HTTPClient() try: response = http_client.fetch(url) if (response.body != None): # print(response.body) response = self.load_message(response.body) self.module.dispatch_event(self.obtained_event, response) except HTTPError as e: # print(e) self.module.dispatch_event('LOG', (1, e, config['service_name'])) except Exception as e: # print(e) self.module.dispatch_event('LOG', (1, e, config['service_name'])) http_client.close()
def gen_msg_token(phone): s = DBSession() code = "".join(random.sample("123456789",4)) flag = False url = "http://106.ihuyi.cn/webservice/sms.php?method=Submit&account={account}&password={password}&mobile={phone}&content={content}".format(account=account,password=password,phone=phone,content=url_escape(content.format(code=code))) h = HTTPClient() try: res = h.fetch(url,connect_timeout = 5.0) except: flag,msg = sendTemplateSMS(phone,{code},32417) if flag: update_code(phone,code) return True else: return msg h.close() root = ElementTree.fromstring(res.body.decode()) if not root[0].text == '2': # print("[VerifyMsg]Send error:",root[0].text,root[1].text) #如果发送失败,则改用云通讯发送 flag,msg = sendTemplateSMS(phone,{code},32417) if flag: update_code(phone,code) return True else: return msg else: update_code(phone,code) return True
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ == 'TwistedIOLoop': # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop') self.server_ioloop = IOLoop() @gen.coroutine def init_server(): sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) self.server = HTTPServer(app) self.server.add_socket(sock) self.server_ioloop.run_sync(init_server) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by several iterations because # the server may still have some cleanup work left when # the client finishes with the response (this is noticeable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). @gen.coroutine def slow_stop(): # The number of iterations is difficult to predict. Typically, # one is sufficient, although sometimes it needs more. for i in range(5): yield self.server_ioloop.stop() self.server_ioloop.add_callback(slow_stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return 'http://127.0.0.1:%d%s' % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url('/')) self.assertEqual(b'Hello world!', response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(assertion.exception.code, 404)
def get(self): name = self.get_argument('name', '') if name == "atlas": with open("/var/www/atlas/access.token", 'r') as f: token = f.read() f.close() token = token.rstrip('\n') # TODO: Make this asynchronous and move access.token to aswwu/databases git repo http_client = HTTPClient() try: response = http_client.fetch( "https://api.instagram.com/v1/users/self/media/recent/?access_token=" + token) self.write(response.body) except Exception as e: self.write("{error: '" + str(e) + "'}") http_client.close() elif name == "issuu": http_client = HTTPClient() try: response = http_client.fetch( "http://search.issuu.com/api/2_0/document?username=aswwucollegian&pageSize" "=1&responseParams=title,description&sortBy=epoch") self.write(response.body) except Exception as e: self.write("{error: '" + str(e) + "'}") http_client.close() else: self.write("Something went wrong.")
def read_bearer_token(*args, **kwargs): customer_id = kwargs['customer_id'] client = HTTPClient() response = None token = provider.get_value('jwt.token_basic') base = provider.get_value('service.thirstie_legacy.base_url') version = 'v0' token_url = '{}/{}/payments/user/{}'.format(base, version, customer_id) # Read the token from configuration token = provider.get_value('jwt.token_basic') # Set the headers to communicate headers = {"Content-Type": "application/json", "Authorization": token} try: response = client.fetch(token_url, method='GET', headers=headers) result = json.loads(response.body.decode('utf-8')) except HTTPError as e: cart_error = json.loads(e.response.body) response = {} response['error'] = cart_error response['status'] = e.code return response except Exception as e: response = {} response['error'] = "General error intercepted - {} ".format(e) response['status'] = 500 return response client.close() return "Bearer " + result['token']
def read_legacy_cart(th_customer_id, logistic_order_id): client = HTTPClient() base = provider.get_value('service.thirstie_legacy.base_url') version = 'v0' cart_url = '{}/{}/payments/user/{}/carts/{}'.format( base, version, th_customer_id, logistic_order_id) # Read the token from configuration token = provider.get_value('jwt.token_basic') # Set the headers to communicate headers = {"Content-Type": "application/json", "Authorization": token} try: response = client.fetch(cart_url, method='GET', headers=headers) result = json.loads(response.body.decode('utf-8')) except HTTPError as e: payment_user_error = json.loads(e.response.body) response = {} response['error'] = payment_user_error response['status'] = e.code return response except Exception as e: response = {} response['error'] = "General error intercepted - {} ".format(e) response['status'] = 500 return response client.close() return result
def get_context(self, user_id, application_id, session_id, locale, detection_response, context_id, skip_mongodb_log): if context_id is None or detection_response is not None: request_body = {} if detection_response is not None: request_body["detection_response"] = detection_response url = "%s?session_id=%s&application_id=%s&locale=%s" % ( CONTEXT_URL, session_id, application_id, locale) if user_id is not None: url += "&user_id=%s" % user_id if skip_mongodb_log: url += "&skip_mongodb_log" http_client = HTTPClient() response = http_client.fetch( HTTPRequest(url=url, body=json_encode(request_body), method="POST")) http_client.close() return json_decode(response.body) else: http_client = HTTPClient() url = "%s?context_id=%s&session_id=%s" % (CONTEXT_URL, context_id, session_id) if user_id is not None: url += "&user_id=%s" % user_id context_response = http_client.fetch( HTTPRequest(url=url, method="GET")) http_client.close() return json_decode(context_response.body)
def post(self, *args, **kwargs): body = self.request.body.decode() body_info = json.loads(body) t = body_info['type'] u = body_info['user_id'] url = body_info['url'] ret = None if t == 'prod': q = {'partner_no': u, 'request_no': "R20140512123022", 'contract_id': '100001'} # call & wait http_client = HTTPClient() try: body = json.dumps(q) response = http_client.fetch(url + '/data/prod', method='POST', body=body) ret = response.body.decode('utf8') ret = json.dumps(json.loads(ret), indent=4) except Exception as e: print(e) finally: http_client.close() self.finish(json.dumps({'ret': ret}))
def get_steam_user(db, steamid): user = None key = yield Op(db['server'].find_one, {'key': 'apikey'}) url = url_concat('http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/', {'key': key['value'], 'steamids': steamid}) client = HTTPClient() try: response = client.fetch(url) get_user = json_decode(response.body)['response']['players'][0] user = {'steamid': get_user['steamid'], 'steamid32': converter(steamid), 'personaname': get_user['personaname'], 'profileurl': get_user['profileurl'], 'avatar': get_user['avatarfull'], 'registration': datetime.now(), 'bookmarks': [], 'favorites': [], 'update': datetime.now() + timedelta(minutes=1), 'dota_count': 0} if 'realname' in get_user.keys(): user['realname'] = get_user['realname'] else: user['realname'] = None except HTTPError as e: logging.error('Error: %s' % e) client.close() return user
def __fetch(self, method, path, content_type=None, data=None): assert path[0] == '/' headers = {} body = None if method == 'PUT' or method == 'POST': if content_type is None: headers['Content-Type'] = 'application/json' if data is None: body = '' else: str(data) body = json.dumps(data) else: headers['Content-Type'] = content_type body = data request = HTTPRequest(self.__base_uri + path, method, headers=headers, body=body) http_client = HTTPClient() try: return http_client.fetch(request) except HTTPError as error: return error finally: http_client.close()
def post_context_feedback(self, context_id: str, user_id: str, application_id: str, session_id: str, product_id: str, _type: str, meta_data: dict = None): self.logger.debug( "context_id=%s,user_id=%s,application_id=%s,session_id=%s,product_id=%s," "_type=%s,meta_data=%s", context_id, user_id, application_id, session_id, product_id, _type, meta_data ) try: url = "%s/%s/feedback/?application_id=%s&session_id=%s&product_id=%s&type=%s" % ( CONTEXT_URL, context_id, application_id, session_id, product_id, _type ) url += "&user_id=%s" if user_id is not None else "" request_body = { } if meta_data is not None: request_body["meta_data"] = meta_data http_client = HTTPClient() response = http_client.fetch(HTTPRequest(url=url, body=dumps(request_body), method="POST")) http_client.close() return response.headers["_rev"] except HTTPError: self.logger.error("post_context_feedback,url=%s", url) raise
def _get_from_service(self, _id): try: url = "%s/product_detail/%s.json" % (CONTENT_URL, _id) http_client = HTTPClient() response = http_client.fetch(url) data = json_decode(response.body) http_client.close() return { "_id": data["_id"], "sequence": data["sequence"] if "sequence" in data else None, "title": data["title"], "attributes": [ x for x in data["attributes"] if "key" not in x["_id"] or x["_id"]["key"] not in ["small sizes", "large sizes"] ], "images": data["images"], "brand": data["brand"], "prices": data["prices"], "updated": data["updated"] if "updated" in data else datetime(2015, 1, 1).isoformat() } except: app_log.error("get_from_service,_id=%s", _id) return None
def get_html(url): http_client = HTTPClient() try: response = http_client.fetch(url, follow_redirects=True) return response.body except http_client.HTTPError as e: return None http_client.close()
def get_html(url): http_client = HTTPClient() try: response = http_client.fetch(url,follow_redirects=True) return response.body except Exception as e: return None http_client.close()
def binary_search(http_helper, lsig, minv, maxv, url, method, detection_struct, ch, headers, body=None): mid = mid_value(minv, maxv) new_url = url new_body = body new_headers = headers if minv > maxv: return maxv http_client = HTTPClient() payload = ch * mid if lsig in url: new_url = url.replace(lsig, payload) # warning urlencode and etc elif body is not None and lsig in body: new_body = body.replace(lsig, payload) elif headers is not None and lsig in headers: raw_val = str(headers) raw_val = raw_val.replace(lsig, payload) new_headers = ast.literal_eval(str(raw_val)) request = http_helper.create_http_request(method, new_url, new_body, new_headers) try: response = http_client.fetch(request) except HTTPError as e: response = e.response for struct in detection_struct: if struct["method"](response, struct["arguments"]): http_client.close() return binary_search(http_helper, lsig, minv, mid - 1, url, method, detection_struct, ch, headers, body) http_client.close() return binary_search(http_helper, lsig, mid + 1, maxv, url, method, detection_struct, ch, headers, body)
class TornadoDownloader(object): def __init__(self): self.httpclint = HTTPClient() def fetch(self,request): tornado_request = HTTPRequest(request.url_with_query,method=request.method.upper(),headers=request.headers) tornado_response = self.httpclint.fetch(tornado_request) return Response(request=request,status_code=tornado_response.code,url=tornado_response.effective_url,headers=tornado_response.headers,body=tornado_response.buffer.read()) def __del__(self): self.httpclint.close()
def kodi_rpc(request): print('[XBMC] Calling kodi rpc') query = urllib.urlencode({'request': request}) http_client = HTTPClient() resp = http_client.fetch(u'{}/jsonrpc?{}'.format(KODI_HOST, query), auth_username='******', auth_password='******', connect_timeout=60.0) http_client.close() return resp.body
def http_fetch(url): """ Perform an HTTP request. """ from tornado.httpclient import HTTPClient http_client = HTTPClient() try: response = http_client.fetch(url) except Exception as err: raise FetchError('http fetch failed: %s' % str(err)) finally: http_client.close() return response.body.decode()
def get_detection(self, user_id, application_id, session_id, locale, query, context): url = "%s?application_id=%s&session_id=%s&locale=%s&q=%s" % ( DETECT_URL, application_id, session_id, locale, url_escape(query) # url_escape(json_encode(context)) ) if user_id is not None: url += "&user_id=%s" % user_id http_client = HTTPClient() response = http_client.fetch(HTTPRequest(url=url)) http_client.close() return json_decode(response.body)
def synchronous_fetch(url): print("synchronous_fetch") try: http_client = HTTPClient() time.sleep(5) response = http_client.fetch(url) print(response.body) except Exception as e: print("Error: " + str(e)) return str(e) http_client.close() return response.body
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ in ('TwistedIOLoop', 'AsyncIOMainLoop'): # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. # AsyncIOMainLoop doesn't work with the default policy # (although it could with some tweaks to this test and a # policy that created loops for non-main threads). raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop or ' 'AsyncIOMainLoop') self.server_ioloop = IOLoop() sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) self.server = HTTPServer(app, io_loop=self.server_ioloop) self.server.add_socket(sock) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by one iteration because # the server may still have some cleanup work left when # the client finishes with the response (this is noticable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). self.server_ioloop.add_callback(self.server_ioloop.stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return 'http://127.0.0.1:%d%s' % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url('/')) self.assertEqual(b'Hello world!', response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(assertion.exception.code, 404)
def get(self): normalized_path = self.normalize_path(self.context.request.url) uri = self.context.config.get('RESULT_STORAGE_WEBDAV_URI') + normalized_path logger.debug("[RESULT_STORAGE] Making GET request to: %s", uri) http_client = HTTPClient() result = None try: response = http_client.fetch(uri) result = response.body except HTTPError as e: logger.debug("[RESULT_STORAGE] Error on GET request: %s", e) http_client.close() return result
def __call__(self, message): http_client = HTTPClient() response = ('', None) try: response = http_client.fetch(self.url, body=message, method='POST', connect_timeout=self.connection_timeout, request_timeout=self.connection_timeout) except HTTPError as e: error("HTTP Error: %s, payload size (bytes): %s", e, len(message)) handle_exception(ErrorMessage.from_exception(e, address=self.url)) finally: http_client.close() return response
def get_html(url): http_client = HTTPClient() try: response = http_client.fetch(url, follow_redirects=True) print("body: {}".format(response.body)) data = response.body result = str(data).encode(encoding="utf-8") return result except httpclient.HTTPError as e: return None finally: http_client.close()
def _fetch_rates(self): self.logger.info("Fetching rates from ECB") http_client = HTTPClient() try: response = http_client.fetch( "https://www.ecb.europa.eu/stats/eurofxref/eurofxref-hist-90d.xml" ) self.logger.debug(response.body) finally: http_client.close() self.logger.info("New rates fetched successfully!") return ConversionMap.from_xml_string(response.body, self.logger)
def get(self): normalized_path = self.normalize_path(self.context.request.url) uri = self.context.config.get( 'RESULT_STORAGE_WEBDAV_URI') + normalized_path logger.debug("[RESULT_STORAGE] Making GET request to: %s", uri) http_client = HTTPClient() result = None try: response = http_client.fetch(uri) result = response.body except HTTPError as e: logger.debug("[RESULT_STORAGE] Error on GET request: %s", e) http_client.close() return result
def __init__(self): self.species = [] http_client = HTTPClient() pageNumber = 0 while True: response = http_client.fetch( "http://stapi.co/api/v1/rest/species/search?pageSize=100&pageNumber=" + str(pageNumber)) data = json.loads(response.body) self.species.extend(data['species']) pageNumber += 1 if pageNumber >= int(data['page']['totalPages']): break http_client.close()
def register_volume(self, volume): # globally register volume global volumes volumes[volume.token] = volume # globally register kernel client for this volume in the Jupyter server cf = url_escape(find_connection_file()) http_client= HTTPClient() try: response = http_client.fetch(self.get_server_url() + '/register_token/' + volume.token.decode('utf8') + '/' + cf) except Exception as e: raise RuntimeError("could not register token: " + str(e)) http_client.close()
def find_length(owtf, http_helper, lsig, url, method, detection_struct, ch, headers, body=None): """This function finds the length of the fuzzing placeholder""" size = 8192 minv = 0 http_client = HTTPClient() new_url = url new_body = body new_headers = headers payload = "" for loop in range(0, 15): # used to avoid potential deadloops payload = size * ch if lsig in url: new_url = url.replace(lsig, payload) elif body is not None and lsig in body: new_body = body.replace(lsig, payload) elif headers is not None and lsig in str(headers): raw_val = str(headers) raw_val = raw_val.replace(lsig, payload) new_headers = ast.literal_eval(str(raw_val)) else: Error(owtf, "Length signature not found!") request = http_helper.create_http_request(method, new_url, new_body, new_headers) try: response = http_client.fetch(request) except HTTPError as e: if e.response: response = e.response for struct in detection_struct: if struct["method"](response, struct["arguments"]): http_client.close() return binary_search( http_helper, lsig, minv, size, url, method, detection_struct, ch, headers, body) minv = size size *= 2
def _work(self): #self.write('this job is just for test') headers = dict(self.request.headers) #if headers.has_key('url'): # headers['url']='download.sword?ctrl=CX302ZxcxCtrl_exequery&sjymc=ysctycx_hnlthxcx_g' url = 'http://ysctycx.hnds.tax.cn:7001/download.sword?ctrl=CX302ZxcxCtrl_exequery&sjymc=ysctycx_hnlthxcx_g' body=self.request.body req = HTTPRequest(url=url,method='POST',headers=headers,body=body) clt = HTTPClient() txt = '' try: txt = clt.fetch(req) print txt.body self.write(txt.body) except Exception as e: print str(e) clt.close()
def get_detection(self, user_id, application_id, session_id, locale, query, context): url = "%s?application_id=%s&session_id=%s&locale=%s&q=%s" % ( DETECT_URL, application_id, session_id, locale, url_escape(query) # url_escape(json_encode(context)) ) if user_id is not None: url += "&user_id=%s" % user_id http_client = HTTPClient() response = http_client.fetch( HTTPRequest(url=url) ) http_client.close() return json_decode(response.body)
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ in ("TwistedIOLoop", "AsyncIOMainLoop"): # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. # AsyncIOMainLoop doesn't work with the default policy # (although it could with some tweaks to this test and a # policy that created loops for non-main threads). raise unittest.SkipTest("Sync HTTPClient not compatible with TwistedIOLoop or " "AsyncIOMainLoop") self.server_ioloop = IOLoop() sock, self.port = bind_unused_port() app = Application([("/", HelloWorldHandler)]) self.server = HTTPServer(app, io_loop=self.server_ioloop) self.server.add_socket(sock) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() self.server_ioloop.stop() self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return "http://127.0.0.1:%d%s" % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url("/")) self.assertEqual(b"Hello world!", response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url("/notfound")) self.assertEqual(assertion.exception.code, 404)
def get(self, identity): logging.info("uri=%s [%s] [%s]" % (self.request.uri, identity, self.request.arguments)) args = self.request.arguments cancer_mappings= { "brca": "brca_pw_manuscript" } gene = args["gene"][0] source = args["source"][0] dataset = cancer_mappings[args["cancer"][0]] logging.info("query=[%s][%s][%s]" % (gene, source, dataset)) cli = HTTPClient() response = cli.fetch("http://explorer.cancerregulome.org/data/distributed_select/?q=%2Bf1source%3A%22" + source + "%22%2Bf1label%3A(%22" + gene + "%22)%20%2Blogged_pvalue%3A%5B6%20TO%20*%5D&sort=logged_pvalue%20desc&rows=200&fl=alias1%2Calias2%2Cf1qtinfo%2Cf2qtinfo%2Clink_distance%2Clogged_pvalue%2Ccorrelation%2Cnum_nonna&wt=json&fq=%2Bdataset%3A" + dataset) cli.close() self.write(response.body) self.set_status(200)
def get_weather(): url = "http://api.map.baidu.com/telematics/v3/weather?location=%E5%8D%97%E4%BA%AC&output=XML&ak=FK9mkfdQsloEngodbFl4FeY3" http_client = HTTPClient() response = http_client.fetch(url, method="GET", request_timeout=120) response_body = response.body.decode() root = ET.fromstring(response_body) date = root.findall("results/weather_data/date")[1].text weather = root.findall("results/weather_data/weather")[1].text wind = root.findall("results/weather_data/wind")[1].text temperature = root.findall("results/weather_data/temperature")[1].text pm = root.find("results/pm25").text today = datetime.date.today() tomorrow = today + datetime.timedelta(days=1) weather = ( str(tomorrow) + " " + date + " " + "南京" + " " + temperature + " " + weather + " " + wind + " " + "PM2.5:" + pm ) print(weather) http_client.close() return weather
def sync_fetch(self, url, method=HTTP_METHOD_GET, params=None): """ """ # 打包数据 request = self._pack_data(url, method, params) http_client = HTTPClient() response = {} try: response = http_client.fetch(request) except tornado.web.HTTPError as error: raise HttpClientFetchError, str(error) except Exception as error: raise HttpClientFetchError, str(error) finally: http_client.close() if not response: return response return response.body
def shutdown_all(self): """Shutdown all kernels.""" # TODO: Is it appropriate to do this? Is this notebook server the # only client of the kernel gateway? # TODO: We also have to make this sync because the NotebookApp does not # wait for async. client = HTTPClient() for kernel_id in self._kernels.keys(): kernel_url = url_path_join(KG_URL, self._kernel_id_to_url(kernel_id)) self.log.info("Request delete kernel at: %s", kernel_url) try: response = client.fetch(kernel_url, headers=KG_HEADERS, method='DELETE' ) except HTTPError: pass self.log.info("Delete kernel response: %d %s", response.code, response.reason) client.close()
def shutdown_all(self, now=False): """Shutdown all kernels.""" # Note: We have to make this sync because the NotebookApp does not wait for async. shutdown_kernels = [] kwargs = {'method': 'DELETE'} kwargs = GatewayClient.instance().load_connection_args(**kwargs) client = HTTPClient() for kernel_id in self._kernels.keys(): kernel_url = self._get_kernel_endpoint_url(kernel_id) self.log.debug("Request delete kernel at: %s", kernel_url) try: response = client.fetch(kernel_url, **kwargs) except HTTPError: pass else: self.log.debug("Delete kernel response: %d %s", response.code, response.reason) shutdown_kernels.append(kernel_id) # avoid changing dict size during iteration client.close() for kernel_id in shutdown_kernels: self.remove_kernel(kernel_id)
def get_compound_id(smiles): """ returns kegg id for compund with given smiles """ indigo = Indigo() # convert smiles to standard format mol = indigo.loadMolecule(smiles) mol.aromatize() moi_smiles = mol.canonicalSmiles() # Get list of possible kegg IDs url = "http://rest.genome.jp/subcomp/?smiles=%s&cutoff=1.0" % smiles http_client = HTTPClient() try: response = http_client.fetch(url).body except HTTPError as e: raise RuntimeError("Error:", str(e)) http_client.close() subcomp_results = response.split("\n") subcomp_results.pop() subcomp_results = ([i.split('\t')[0] for i in subcomp_results]) # get smiles for all compound IDs found all_smiles = [] uni = UniChem() mapping = uni.get_mapping("kegg_ligand", "chebi") ch = ChEBI() all_smiles = [ch.getCompleteEntity(mapping[x]).smiles for x in subcomp_results] # convert smiles to a standard format for pos, mol in enumerate(all_smiles): m = indigo.loadMolecule(mol) m.aromatize() all_smiles[pos] = m.canonicalSmiles() # check if smiles matches given and, if so, use that compound ID # if not, errors out try: index = all_smiles.index(moi_smiles) except: raise RuntimeError("SMILES unmatchable to: %s" % str(all_smiles)) return subcomp_results[index]
class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ == 'TwistedIOLoop': # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop') self.server_ioloop = IOLoop() sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) self.server = HTTPServer(app, io_loop=self.server_ioloop) self.server.add_socket(sock) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() self.server_ioloop.stop() self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return 'http://localhost:%d%s' % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url('/')) self.assertEqual(b'Hello world!', response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(assertion.exception.code, 404)
def _get_from_service(self, _id): try: url = "%s/product_detail/%s.json" % (CONTENT_URL, _id) http_client = HTTPClient() response = http_client.fetch(url) data = json_decode(response.body) http_client.close() return { "_id": data["_id"], "sequence": data["sequence"] if "sequence" in data else None, "title": data["title"], "attributes": [x for x in data["attributes"] if "key" not in x["_id"] or x["_id"]["key"] not in ["small sizes", "large sizes"]], "images": data["images"], "brand": data["brand"], "prices": data["prices"], "updated": data["updated"] if "updated" in data else datetime(2015, 1, 1).isoformat() } except: app_log.error("get_from_service,_id=%s", _id) return None
def add_sitemap_urls(self, parent_page): logger.debug("Adding sitemap urls as well for processing") http_client = HTTPClient() try: response = http_client.fetch(self.sitemap_url) val = bytes(response.body) root = objectify.fromstring(val) for url_element in root.url: page = _get_client_page(decode_to_unicode(url_element.loc.text), parent_page, self.base_site, self.base_domain, DOMAINS_TO_BE_SKIPPED) if page not in self.visited_urls and page not in self.non_visited_urls \ and page not in self.intermediate_urls: print(u"Added {}".format(url_element.loc)) self.non_visited_urls.add(page) self.added_count += 1 self.page_queue.put(page) except Exception as e: logger.error(u"Error adding sitemap urls from %s " % self.sitemap_url) finally: http_client.close()
def add_sitemap_urls(self): logger.debug("Adding sitemap urls as well for processing") # response = requests.get(self.sitemap_url) http_client = HTTPClient() try: response = http_client.fetch(self.sitemap_url) val = bytes(response.body) root = objectify.fromstring(val) except HTTPError as e: print "Error:", e http_client.close() for url in root.url: page = _get_client_page(bytes(url.loc), next(iter(self.visited_urls)), self.base_site, self.base_domain, DOMAINS_TO_BE_SKIPPED) if page not in self.visited_urls and page not in self.non_visited_urls \ and page not in self.intermediate_urls: print("Added {}".format(url.loc)) self.non_visited_urls.add(page) self.added_count += 1
def post_context(self, user_id: str, application_id: str, session_id: str, locale: str) -> dict: self.logger.debug( "user_id=%s,application_id=%s,session_id=%s,locale=%s", user_id, application_id, session_id, locale ) try: request_body = {} # # this now goes at message level # # if detection_response is not None: # # request_body["detection_response"] = detection_response url = "%s?session_id=%s&application_id=%s&locale=%s" % ( CONTEXT_URL, session_id, application_id, locale ) url += "&user_id=%s" % user_id if user_id is not None else "" http_client = HTTPClient() response = http_client.fetch(HTTPRequest(url=url, body=json_encode(request_body), method="POST")) http_client.close() return response.headers["_id"], response.headers["_rev"] except HTTPError as e: raise
def get_context(self, user_id, application_id, session_id, locale, detection_response, context_id, skip_mongodb_log): if context_id is None or detection_response is not None: request_body = {} if detection_response is not None: request_body["detection_response"] = detection_response url = "%s?session_id=%s&application_id=%s&locale=%s" % ( CONTEXT_URL, session_id, application_id, locale ) if user_id is not None: url += "&user_id=%s" % user_id if skip_mongodb_log: url += "&skip_mongodb_log" http_client = HTTPClient() response = http_client.fetch( HTTPRequest( url=url, body=json_encode(request_body), method="POST" ) ) http_client.close() return json_decode(response.body) else: http_client = HTTPClient() url = "%s?context_id=%s&session_id=%s" % (CONTEXT_URL, context_id, session_id) if user_id is not None: url += "&user_id=%s" % user_id context_response = http_client.fetch( HTTPRequest( url=url, method="GET" ) ) http_client.close() return json_decode(context_response.body)