def outputdata(self, data): """ Send output with fixed length data """ if not isinstance(data, bytes): data = str(data).encode(self.encoding) self.output(MemoryStream(data))
def call_docker_api(path, data=None, method=None): if self._docker_conn is None or not self._docker_conn.connected: _create_docker_conn() conn_up = HttpConnectionStateEvent.createMatcher( HttpConnectionStateEvent.CLIENT_CONNECTED) conn_noconn = HttpConnectionStateEvent.createMatcher( HttpConnectionStateEvent.CLIENT_NOTCONNECTED) yield (conn_up, conn_noconn) if self.apiroutine.matcher is conn_noconn: raise IOError('Cannot connect to docker API endpoint: ' + repr(host)) if method is None: if data is None: method = b'GET' else: method = b'POST' if data is None: for m in http_protocol.requestwithresponse( self.apiroutine, self._docker_conn, b'docker', _bytes(path), method, [(b'Accept-Encoding', b'gzip, deflate')]): yield m else: for m in http_protocol.requestwithresponse( self.apiroutine, self._docker_conn, b'docker', _bytes(path), method, [(b'Content-Type', b'application/json;charset=utf-8'), (b'Accept-Encoding', b'gzip, deflate')], MemoryStream(_bytes(json.dumps(data)))): yield m final_resp = self.apiroutine.http_finalresponse output_stream = final_resp.stream try: if final_resp.statuscode >= 200 and final_resp.statuscode < 300: if output_stream is not None and b'content-encoding' in final_resp.headerdict: ce = final_resp.headerdict.get(b'content-encoding') if ce.lower() == b'gzip' or ce.lower() == b'x-gzip': output_stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == b'deflate': output_stream.getEncoderList().append( encoders.deflate_decoder()) if output_stream is None: self.apiroutine.retvalue = {} else: for m in output_stream.read(self.apiroutine): yield m self.apiroutine.retvalue = json.loads( self.apiroutine.data.decode('utf-8')) else: raise ValueError('Docker API returns error status: ' + repr(final_resp.status)) finally: if output_stream is not None: output_stream.close(self.scheduler)
def testStream_17(self): s = MemoryStream(b'abcdefg\nhijklmn\nopqrst\n') retvalue = [] rc = self.rc async def read_routine(): data = await s.readline(rc) retvalue.append(data) retvalue.append(s.readonce()) rc.subroutine(read_routine()) self.server.serve() self.assertEqual(retvalue, [b'abcdefg\n', b'hijklmn\nopqrst\n'])
def outputdata(self, data): if not isinstance(data, bytes): data = str(data).encode(self.encoding) self.output(MemoryStream(data))
async def open(self, container, request, ignorewebexception=False, timeout=None, datagen=None, cafile=None, key=None, certificate=None, followredirect=True, autodecompress=False, allowcookies=None): ''' Open http request with a Request object :param container: a routine container hosting this routine :param request: vlcp.utils.webclient.Request object :param ignorewebexception: Do not raise exception on Web errors (4xx, 5xx), return a response normally :param timeout: timeout on connection and single http request. When following redirect, new request does not share the old timeout, which means if timeout=2: connect to host: (2s) wait for response: (2s) response is 302, redirect connect to redirected host: (2s) wait for response: (2s) ... :param datagen: if the request use a stream as the data parameter, you may provide a routine to generate data for the stream. If the request failed early, this routine is automatically terminated. :param cafile: provide a CA file for SSL certification check. If not provided, the SSL connection is NOT verified. :param key: provide a key file, for client certification (usually not necessary) :param certificate: provide a certificate file, for client certification (usually not necessary) :param followredirect: if True (default), automatically follow 3xx redirections :param autodecompress: if True, automatically detect Content-Encoding header and decode the body :param allowcookies: override default settings to disable the cookies ''' if cafile is None: cafile = self.cafile if allowcookies is None: allowcookies = self.allowcookies forcecreate = False datagen_routine = None if autodecompress: if not request.has_header('Accept-Encoding'): request.add_header('Accept-Encoding', 'gzip, deflate') while True: # Find or create a connection conn, created = await self._getconnection( container, request.host, request.path, request.get_type() == 'https', forcecreate, cafile, key, certificate, timeout) # Send request on conn and wait for reply try: if allowcookies: self.cookiejar.add_cookie_header(request) if isinstance(request.data, bytes): stream = MemoryStream(request.data) else: stream = request.data if datagen and datagen_routine is None: datagen_routine = container.subroutine(datagen) else: datagen_routine = None timeout_, result = await container.execute_with_timeout( timeout, self._protocol.request_with_response( container, conn, _bytes(request.host), _bytes(request.path), _bytes(request.method), [(_bytes(k), _bytes(v)) for k, v in request.header_items()], stream)) if timeout_: if datagen_routine: container.terminate(datagen_routine) container.subroutine( self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False) raise WebException('HTTP request timeout') finalresp, _ = result resp = Response(request.get_full_url(), finalresp, container.scheduler) if allowcookies: self.cookiejar.extract_cookies(resp, request) if resp.iserror and not ignorewebexception: try: exc = WebException(resp.fullstatus) if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) data = await resp.stream.read(container, 4096) exc.response = resp exc.body = data if datagen_routine: container.terminate(datagen_routine) await resp.shutdown() container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', True), False) raise exc finally: resp.close() else: try: container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', False, finalresp), False) if followredirect and resp.status in (300, 301, 302, 303, 307, 308): request.redirect( resp, ignorewebexception=ignorewebexception, timeout=timeout, cafile=cafile, key=key, certificate=certificate, followredirect=followredirect, autodecompress=autodecompress, allowcookies=allowcookies) resp.close() continue if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) return resp except: resp.close() raise except HttpConnectionClosedException: await self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False) if not created: # Retry on a newly created connection forcecreate = True continue else: if datagen_routine: container.terminate(datagen_routine) raise except Exception as exc: await self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True) raise exc break
async def handler(env): currenttime = time() if rewriteonly: if not env.rewritefrom: await env.error(404) return if not errorpage and checkreferer: try: referer = env.headerdict.get(b'referer') if referer is None: referer_host = None else: referer_host = urlsplit(referer).netloc if not ((refererallowlocal and referer_host == env.host) or referer_host in refererallows): await env.error(403, showerror=False) return except Exception: await env.error(403, showerror=False) return localpath = env.path_match.expand(expand) realpath = env.getrealpath(relativeroot, localpath) filename = os.path.basename(realpath) if xsendfile or xlighttpdsendfile or xaccelredirect: # Apache send a local file env.start_response(200) if contenttype: env.header('Content-Type', contenttype) else: mime = self.mimetypedatabase.guess_type( filename, mimestrict) if mime[1]: # There should not be a content-encoding here, maybe the file itself is compressed # set mime to application/octet-stream mime_type = 'application/octet-stream' elif not mime[0]: mime_type = 'application/octet-stream' else: mime_type = mime[0] env.header('Content-Type', mime_type, False) if not errorpage and contentdisposition: env.header( 'Content-Disposition', contentdisposition + '; filename=' + quote(filename)) if xsendfile: env.header('X-Sendfile', realpath) if xaccelredirect: env.header( b'X-Accel-Redirect', urljoin(xaccelredirect_root, self.dispatcher.expand(env.path_match, expand))) if xlighttpdsendfile: env.header(b'X-LIGHTTPD-send-file', realpath) return use_gzip = False if gzip: if realpath.endswith('.gz'): # GZIP files are preserved for gzip encoding await env.error(403, showerror=False) encodings = _parseacceptencodings(env) if b'gzip' in encodings or b'x-gzip' in encodings: use_gzip = True use_etag = etag and not errorpage # First time cache check if memorycache: # Cache data: (data, headers, cachedtime, etag) cv = self._cache.get((realpath, use_gzip)) if cv and cv[2] + max(0 if maxage is None else maxage, 3) > currenttime: # Cache is valid if use_etag: if _checketag(env, cv[3]): env.start_response(304, cv[1]) return size = len(cv[0]) rng = None if not errorpage and allowrange: rng = _checkrange(env, cv[3], size) if rng is not None: env.start_response(206, cv[1]) _generaterange(env, rng, size) env.output(MemoryStream(cv[0][rng[0]:rng[1]]), use_gzip) else: if errorpage: m = statusname.match(filename) if m: env.start_response(int(m.group()), cv[1]) else: # Show 200-OK is better than 500 env.start_response(200, cv[1]) else: env.start_response(200, cv[1]) env.output(MemoryStream(cv[0]), use_gzip) return # Test file if use_gzip: try: stat_info = os.stat(realpath + '.gz') if not stat.S_ISREG(stat_info.st_mode): raise ValueError('Not regular file') realpath += '.gz' except Exception: try: stat_info = os.stat(realpath) if not stat.S_ISREG(stat_info.st_mode): raise ValueError('Not regular file') use_gzip = False except Exception: await env.error(404, showerror=False) return else: try: stat_info = os.stat(realpath) if not stat.S_ISREG(stat_info.st_mode): raise ValueError('Not regular file') use_gzip = False except Exception: await env.error(404, showerror=False) return newetag = _createetag(stat_info) # Second memory cache test if memorycache: # use_gzip may change cv = self._cache.get((realpath, use_gzip)) if cv and cv[3] == newetag: # Cache is valid if use_etag: if _checketag(env, cv[3]): env.start_response(304, cv[1]) return self._cache[(realpath, use_gzip)] = (cv[0], cv[1], currenttime, newetag) size = len(cv[0]) rng = None if not errorpage and allowrange: rng = _checkrange(env, cv[3], size) if rng is not None: env.start_response(206, cv[1]) _generaterange(env, rng, size) env.output(MemoryStream(cv[0][rng[0]:rng[1]]), use_gzip) else: if errorpage: m = statusname.match(filename) if m: env.start_response(int(m.group()), cv[1]) else: # Show 200-OK is better than 500 env.start_response(200, cv[1]) else: env.start_response(200, cv[1]) env.output(MemoryStream(cv[0]), use_gzip) return elif cv: # Cache is invalid, remove it to prevent another hit del self._cache[(realpath, use_gzip)] # No cache available, get local file # Create headers if contenttype: env.header('Content-Type', contenttype) else: mime = self.mimetypedatabase.guess_type(filename, mimestrict) if mime[1]: # There should not be a content-encoding here, maybe the file itself is compressed # set mime to application/octet-stream mime_type = 'application/octet-stream' elif not mime[0]: mime_type = 'application/octet-stream' else: mime_type = mime[0] env.header('Content-Type', mime_type, False) if use_etag: env.header(b'ETag', b'"' + newetag + b'"', False) if maxage is not None: env.header('Cache-Control', 'max-age=' + str(maxage), False) if use_gzip: env.header(b'Content-Encoding', b'gzip', False) if not errorpage and contentdisposition: env.header( 'Content-Disposition', contentdisposition + '; filename=' + quote(filename)) if allowrange: env.header(b'Accept-Ranges', b'bytes') if extraheaders: env.sent_headers.extend(extraheaders) if use_etag: if _checketag(env, newetag): env.start_response(304, clearheaders=False) return if memorycache and stat_info.st_size <= memorycachelimit: # Cache cache = True if len(self._cache) >= self.memorycacheitemlimit: if not self._clearcache(currenttime): cache = False if cache: with open(realpath, 'rb') as fobj: data = fobj.read() self._cache[(realpath, use_gzip)] = (data, env.sent_headers[:], currenttime, newetag) size = len(data) rng = None if not errorpage and allowrange: rng = _checkrange(env, newetag, size) if rng is not None: env.start_response(206, clearheaders=False) _generaterange(env, rng, size) env.output(MemoryStream(data[rng[0]:rng[1]]), use_gzip) else: if errorpage: m = statusname.match(filename) if m: env.start_response(int(m.group()), clearheaders=False) else: # Show 200-OK is better than 500 env.start_response(200, clearheaders=False) else: env.start_response(200, clearheaders=False) env.output(MemoryStream(data), use_gzip) return size = stat_info.st_size if not errorpage and allowrange: rng = _checkrange(env, newetag, size) if rng is not None: env.start_response(206, clearheaders=False) _generaterange(env, rng, size) fobj = open(realpath, 'rb') try: fobj.seek(rng[0]) except Exception: fobj.close() raise else: env.output( FileStream(fobj, isunicode=False, size=rng[1] - rng[0]), use_gzip) else: if errorpage: m = statusname.match(filename) if m: env.start_response(int(m.group()), clearheaders=False) else: # Show 200-OK is better than 500 env.start_response(200, clearheaders=False) else: env.start_response(200, clearheaders=False) env.output(FileStream(open(realpath, 'rb'), isunicode=False), use_gzip)
def _open(self, container, request, ignorewebexception=False, timeout=None, datagen=None, cafile=None, key=None, certificate=None, followredirect=True, autodecompress=False, allowcookies=None): if cafile is None: cafile = self.cafile if allowcookies is None: allowcookies = self.allowcookies forcecreate = False datagen_routine = None if autodecompress: if not request.has_header('Accept-Encoding'): request.add_header('Accept-Encoding', 'gzip, deflate') while True: # Find or create a connection for m in self._getconnection(container, request.host, request.path, request.get_type() == 'https', forcecreate, cafile, key, certificate, timeout): yield m (conn, created) = container.retvalue # Send request on conn and wait for reply try: if allowcookies: self.cookiejar.add_cookie_header(request) if isinstance(request.data, bytes): stream = MemoryStream(request.data) else: stream = request.data if datagen and datagen_routine is None: datagen_routine = container.subroutine(datagen) else: datagen_routine = None for m in container.executeWithTimeout( timeout, self._protocol.requestwithresponse( container, conn, _bytes(request.host), _bytes(request.path), _bytes(request.method), [(_bytes(k), _bytes(v)) for k, v in request.header_items()], stream)): yield m if container.timeout: if datagen_routine: container.terminate(datagen_routine) container.subroutine( self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True), False) raise WebException('HTTP request timeout') finalresp = container.http_finalresponse resp = Response(request.get_full_url(), finalresp, container.scheduler) if allowcookies: self.cookiejar.extract_cookies(resp, request) if resp.iserror and not ignorewebexception: try: exc = WebException(resp.fullstatus) if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) for m in resp.stream.read(container, 4096): yield m exc.response = resp exc.body = container.data if datagen_routine: container.terminate(datagen_routine) for m in resp.shutdown(): yield m container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', True), False) raise exc finally: resp.close() else: try: container.subroutine( self._releaseconnection( conn, request.host, request.path, request.get_type() == 'https', False, finalresp), False) if followredirect and resp.status in (300, 301, 302, 303, 307, 308): request.redirect( resp, ignorewebexception=ignorewebexception, timeout=timeout, cafile=cafile, key=key, certificate=certificate, followredirect=followredirect, autodecompress=autodecompress, allowcookies=allowcookies) resp.close() continue if autodecompress and resp.stream: ce = resp.get_header('Content-Encoding', '') if ce.lower() == 'gzip' or ce.lower() == 'x-gzip': resp.stream.getEncoderList().append( encoders.gzip_decoder()) elif ce.lower() == 'deflate': resp.stream.getEncoderList().append( encoders.deflate_decoder()) container.retvalue = resp except: resp.close() raise except HttpConnectionClosedException: for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', False): yield m if not created: # Retry on a newly created connection forcecreate = True continue else: if datagen_routine: container.terminate(datagen_routine) raise except Exception as exc: for m in self._releaseconnection(conn, request.host, request.path, request.get_type() == 'https', True): yield m raise exc break
def options(env): env.output(MemoryStream(b'')) if False: yield
async def options(env): env.output(MemoryStream(b''))