def testStream_15(self): s = Stream(encoders=[str_encoder('utf-8'), gzip_encoder()]) s2 = Stream((str is not bytes), encoders=[gzip_decoder(), str_decoder('utf-8')]) retvalue = [] rc = self.rc def write_routine(): for m in s.write('abcde', rc): yield m for m in s.write('defgh', rc): yield m for m in s.write('ijklm', rc, True): yield m def read_routine(): for m in s2.read(rc): yield m retvalue.append(rc.data) rc.subroutine(write_routine()) rc.subroutine(read_routine()) rc.subroutine(s.copyTo(s2, rc)) self.server.serve() self.assertEqual(retvalue, ['abcdedefghijklm'])
def call_docker_api(path, data=None, method=None): if self._docker_conn is None or not self._docker_conn.connected: _create_docker_conn() conn_up = HttpConnectionStateEvent.createMatcher( HttpConnectionStateEvent.CLIENT_CONNECTED) conn_noconn = HttpConnectionStateEvent.createMatcher( HttpConnectionStateEvent.CLIENT_NOTCONNECTED) yield (conn_up, conn_noconn) if self.apiroutine.matcher is conn_noconn: raise IOError('Cannot connect to docker API endpoint: ' + repr(host)) if method is None: if data is None: method = b'GET' else: method = b'POST' if data is None: for m in http_protocol.requestwithresponse( self.apiroutine, self._docker_conn, b'docker', _bytes(path), method, [(b'Accept-Encoding', b'gzip, deflate')]): yield m else: for m in http_protocol.requestwithresponse( self.apiroutine, self._docker_conn, b'docker', _bytes(path), method, [(b'Content-Type', b'application/json;charset=utf-8'), (b'Accept-Encoding', b'gzip, deflate')], MemoryStream(_bytes(json.dumps(data)))): yield m final_resp = self.apiroutine.http_finalresponse output_stream = final_resp.stream try: if final_resp.statuscode >= 200 and final_resp.statuscode < 300: if output_stream is not None and b'content-encoding' in final_resp.headerdict: ce = final_resp.headerdict.get(b'content-encoding') if ce.lower() == b'gzip' or ce.lower() == b'x-gzip': output_stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == b'deflate': output_stream.getEncoderList().append( encoders.deflate_decoder()) if output_stream is None: self.apiroutine.retvalue = {} else: for m in output_stream.read(self.apiroutine): yield m self.apiroutine.retvalue = json.loads( self.apiroutine.data.decode('utf-8')) else: raise ValueError('Docker API returns error status: ' + repr(final_resp.status)) finally: if output_stream is not None: output_stream.close(self.scheduler)
async def open(self, container, request, ignorewebexception=False, timeout=None, datagen=None, cafile=None, key=None, certificate=None, followredirect=True, autodecompress=False, allowcookies=None): ''' Open http request with a Request object :param container: a routine container hosting this routine :param request: vlcp.utils.webclient.Request object :param ignorewebexception: Do not raise exception on Web errors (4xx, 5xx), return a response normally :param timeout: timeout on connection and single http request. When following redirect, new request does not share the old timeout, which means if timeout=2: connect to host: (2s) wait for response: (2s) response is 302, redirect connect to redirected host: (2s) wait for response: (2s) ... :param datagen: if the request use a stream as the data parameter, you may provide a routine to generate data for the stream. If the request failed early, this routine is automatically terminated. :param cafile: provide a CA file for SSL certification check. If not provided, the SSL connection is NOT verified. :param key: provide a key file, for client certification (usually not necessary) :param certificate: provide a certificate file, for client certification (usually not necessary) :param followredirect: if True (default), automatically follow 3xx redirections :param autodecompress: if True, automatically detect Content-Encoding header and decode the body :param allowcookies: override default settings to disable the cookies ''' if cafile is None: cafile = self.cafile if allowcookies is None: allowcookies = self.allowcookies forcecreate = False datagen_routine = None if autodecompress: if not request.has_header('Accept-Encoding'): request.add_header('Accept-Encoding', 'gzip, deflate') while True: # Find or create a connection conn, created = await self._getconnection( container, request.host, request.path, request.get_type() == 'https', forcecreate, cafile, key, certificate, timeout) # Send request on conn and wait for reply try: if allowcookies: self.cookiejar.add_cookie_header(request) if isinstance(request.data, bytes): stream = MemoryStream(request.data) else: stream = request.data if datagen and datagen_routine is None: datagen_routine = container.subroutine(datagen) else: datagen_routine = None timeout_, result = await container.execute_with_timeout( timeout, self._protocol.request_with_response( container, conn, _bytes(request.host), _bytes(request.path), _bytes(request.method), [(_bytes(k), _bytes(v)) for k, v in request.header_items()], stream)) if timeout_: if datagen_routine: container.terminate(datagen_routine) container.subroutine( self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False) raise WebException('HTTP request timeout') finalresp, _ = result resp = Response(request.get_full_url(), finalresp, container.scheduler) if allowcookies: self.cookiejar.extract_cookies(resp, request) if resp.iserror and not ignorewebexception: try: exc = WebException(resp.fullstatus) if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) data = await resp.stream.read(container, 4096) exc.response = resp exc.body = data if datagen_routine: container.terminate(datagen_routine) await resp.shutdown() container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', True), False) raise exc finally: resp.close() else: try: container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', False, finalresp), False) if followredirect and resp.status in (300, 301, 302, 303, 307, 308): request.redirect( resp, ignorewebexception=ignorewebexception, timeout=timeout, cafile=cafile, key=key, certificate=certificate, followredirect=followredirect, autodecompress=autodecompress, allowcookies=allowcookies) resp.close() continue if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) return resp except: resp.close() raise except HttpConnectionClosedException: await self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False) if not created: # Retry on a newly created connection forcecreate = True continue else: if datagen_routine: container.terminate(datagen_routine) raise except Exception as exc: await self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True) raise exc break
def _open(self, container, request, ignorewebexception=False, timeout=None, datagen=None, cafile=None, key=None, certificate=None, followredirect=True, autodecompress=False, allowcookies=None): if cafile is None: cafile = self.cafile if allowcookies is None: allowcookies = self.allowcookies forcecreate = False datagen_routine = None if autodecompress: if not request.has_header('Accept-Encoding'): request.add_header('Accept-Encoding', 'gzip, deflate') while True: # Find or create a connection for m in self._getconnection(container, request.host, request.path, request.get_type() == 'https', forcecreate, cafile, key, certificate, timeout): yield m (conn, created) = container.retvalue # Send request on conn and wait for reply try: if allowcookies: self.cookiejar.add_cookie_header(request) if isinstance(request.data, bytes): stream = MemoryStream(request.data) else: stream = request.data if datagen and datagen_routine is None: datagen_routine = container.subroutine(datagen) else: datagen_routine = None for m in container.executeWithTimeout( timeout, self._protocol.requestwithresponse( container, conn, _bytes(request.host), _bytes(request.path), _bytes(request.method), [(_bytes(k), _bytes(v)) for k, v in request.header_items()], stream)): yield m if container.timeout: if datagen_routine: container.terminate(datagen_routine) container.subroutine( self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False) raise WebException('HTTP request timeout') finalresp = container.http_finalresponse resp = Response(request.get_full_url(), finalresp, container.scheduler) if allowcookies: self.cookiejar.extract_cookies(resp, request) if resp.iserror and not ignorewebexception: try: exc = WebException(resp.fullstatus) if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) for m in resp.stream.read(container, 4096): yield m exc.response = resp exc.body = container.data if datagen_routine: container.terminate(datagen_routine) for m in resp.shutdown(): yield m container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', True), False) raise exc finally: resp.close() else: try: container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', False, finalresp), False) if followredirect and resp.status in (300, 301, 302, 303, 307, 308): request.redirect( resp, ignorewebexception=ignorewebexception, timeout=timeout, cafile=cafile, key=key, certificate=certificate, followredirect=followredirect, autodecompress=autodecompress, allowcookies=allowcookies) resp.close() continue if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) container.retvalue = resp except: resp.close() raise except HttpConnectionClosedException: for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False): yield m if not created: # Retry on a newly created connection forcecreate = True continue else: if datagen_routine: container.terminate(datagen_routine) raise except Exception as exc: for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True): yield m raise exc break
def _open(self, container, request, ignorewebexception = False, timeout = None, datagen = None, cafile = None, key = None, certificate = None, followredirect = True, autodecompress = False, allowcookies = None): if cafile is None: cafile = self.cafile if allowcookies is None: allowcookies = self.allowcookies forcecreate = False datagen_routine = None if autodecompress: if not request.has_header('Accept-Encoding'): request.add_header('Accept-Encoding', 'gzip, deflate') while True: # Find or create a connection for m in self._getconnection(container, request.host, request.path, request.get_type() == 'https', forcecreate, cafile, key, certificate, timeout): yield m (conn, created) = container.retvalue # Send request on conn and wait for reply try: if allowcookies: self.cookiejar.add_cookie_header(request) if isinstance(request.data, bytes): stream = MemoryStream(request.data) else: stream = request.data if datagen and datagen_routine is None: datagen_routine = container.subroutine(datagen) else: datagen_routine = None for m in container.executeWithTimeout(timeout, self._protocol.requestwithresponse(container, conn, _bytes(request.host), _bytes(request.path), _bytes(request.method), [(_bytes(k), _bytes(v)) for k,v in request.header_items()], stream)): yield m if container.timeout: if datagen_routine: container.terminate(datagen_routine) container.subroutine(self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False) raise WebException('HTTP request timeout') finalresp = container.http_finalresponse resp = Response(request.get_full_url(), finalresp, container.scheduler) if allowcookies: self.cookiejar.extract_cookies(resp, request) if resp.iserror and not ignorewebexception: try: exc = WebException(resp.fullstatus) for m in resp.stream.read(container, 4096): yield m exc.response = resp exc.body = container.data if datagen_routine: container.terminate(datagen_routine) for m in resp.shutdown(): yield m container.subroutine(self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False) raise exc finally: resp.close() else: try: container.subroutine(self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False, finalresp), False) if followredirect and resp.status in (300, 301, 302, 303, 307, 308): request.redirect(resp, ignorewebexception = ignorewebexception, timeout = timeout, cafile = cafile, key = key, certificate = certificate, followredirect = followredirect, autodecompress = autodecompress, allowcookies = allowcookies) resp.close() continue if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append(encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append(encoders.deflate_decoder()) container.retvalue = resp except: resp.close() raise except HttpConnectionClosedException: for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False): yield m if not created: # Retry on a newly created connection forcecreate = True continue else: if datagen_routine: container.terminate(datagen_routine) raise except Exception as exc: for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True): yield m raise exc break