def _runFoodTruck(self, ctx): # See `_run_sse_check` in `piecrust.serving.wrappers` for an explanation # of this check. if (ctx.args.monitor_assets and ( not ctx.args.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true')): from piecrust.app import PieCrustFactory from piecrust.serving.procloop import ProcessingLoop appfactory = PieCrustFactory( ctx.app.root_dir, cache=ctx.app.cache.enabled, cache_key=ctx.app.cache_key, config_variant=ctx.config_variant, config_values=ctx.config_values, debug=ctx.app.debug, theme_site=ctx.app.theme_site) out_dir = os.path.join(ctx.app.root_dir, CACHE_DIR, 'foodtruck', 'server') proc_loop = ProcessingLoop(appfactory, out_dir) proc_loop.start() from foodtruck import settings settings.FOODTRUCK_CMDLINE_MODE = True settings.FOODTRUCK_ROOT = ctx.app.root_dir from foodtruck.main import run_foodtruck run_foodtruck( host=ctx.args.address, port=ctx.args.port, debug=ctx.args.debug)
def __init__(self, app, root_dir, debug=False, sub_cache_dir=None, run_sse_check=None): self.app = app self.root_dir = root_dir self.debug = debug self.sub_cache_dir = sub_cache_dir self.run_sse_check = run_sse_check self._proc_loop = None self._out_dir = os.path.join(root_dir, CACHE_DIR, 'server') if sub_cache_dir: self._out_dir = os.path.join(sub_cache_dir, 'server') self._handlers = { 'debug_info': self._getDebugInfo, 'werkzeug_shutdown': self._shutdownWerkzeug, 'pipeline_status': self._startSSEProvider} if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.serving.procloop import ProcessingLoop self._proc_loop = ProcessingLoop(root_dir, self._out_dir, sub_cache_dir=sub_cache_dir, debug=debug) self._proc_loop.start()
def getWsgiApp(self): # Bake all the assets so we know what we have, and so we can serve # them to the client. We need a temp app for this. app = PieCrust(root_dir=self.root_dir, debug=self.debug) app._useSubCacheDir(self.sub_cache_dir) self._out_dir = os.path.join(app.sub_cache_dir, 'server') self._page_record = ServeRecord() if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.processing.base import ProcessorPipeline from piecrust.serving.procloop import ProcessingLoop pipeline = ProcessorPipeline(app, self._out_dir) self._proc_loop = ProcessingLoop(pipeline) self._proc_loop.start() # Run the WSGI app. wsgi_wrapper = WsgiServerWrapper(self) return wsgi_wrapper
def getWsgiApp(self): # Bake all the assets so we know what we have, and so we can serve # them to the client. We need a temp app for this. app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: app._useSubCacheDir(self.sub_cache_dir) self._out_dir = os.path.join(app.sub_cache_dir, 'server') if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.processing.pipeline import ProcessorPipeline from piecrust.serving.procloop import ProcessingLoop pipeline = ProcessorPipeline(app, self._out_dir) self._proc_loop = ProcessingLoop(pipeline) self._proc_loop.start() # Run the WSGI app. wsgi_wrapper = WsgiServerWrapper(self) return wsgi_wrapper
class Server(object): def __init__(self, root_dir, debug=False, sub_cache_dir=None, enable_debug_info=True, static_preview=True, run_sse_check=None): self.root_dir = root_dir self.debug = debug self.sub_cache_dir = sub_cache_dir self.enable_debug_info = enable_debug_info self.run_sse_check = run_sse_check self.static_preview = static_preview self._out_dir = None self._page_record = None self._proc_loop = None self._mimetype_map = load_mimetype_map() def getWsgiApp(self): # Bake all the assets so we know what we have, and so we can serve # them to the client. We need a temp app for this. app = PieCrust(root_dir=self.root_dir, debug=self.debug) app._useSubCacheDir(self.sub_cache_dir) self._out_dir = os.path.join(app.sub_cache_dir, 'server') self._page_record = ServeRecord() if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.processing.base import ProcessorPipeline from piecrust.serving.procloop import ProcessingLoop pipeline = ProcessorPipeline(app, self._out_dir) self._proc_loop = ProcessingLoop(pipeline) self._proc_loop.start() # Run the WSGI app. wsgi_wrapper = WsgiServerWrapper(self) return wsgi_wrapper def _run_request(self, environ, start_response): try: return self._try_run_request(environ, start_response) except Exception as ex: if self.debug: raise return self._handle_error(ex, environ, start_response) def _try_run_request(self, environ, start_response): request = Request(environ) # We don't support anything else than GET requests since we're # previewing something that will be static later. if self.static_preview and request.method != 'GET': logger.error("Only GET requests are allowed, got %s" % request.method) raise MethodNotAllowed() # Handle special requests right away. response = self._try_special_request(environ, request) if response is not None: return response(environ, start_response) # Also handle requests to a pipeline-built asset right away. response = self._try_serve_asset(environ, request) if response is not None: return response(environ, start_response) # Create the app for this request. app = PieCrust(root_dir=self.root_dir, debug=self.debug) app._useSubCacheDir(self.sub_cache_dir) app.config.set('site/root', '/') app.config.set('server/is_serving', True) if (app.config.get('site/enable_debug_info') and self.enable_debug_info and '!debug' in request.args): app.config.set('site/show_debug_info', True) # We'll serve page assets directly from where they are. app.env.base_asset_url_format = '/_asset/%path%' # Let's see if it can be a page asset. response = self._try_serve_page_asset(app, environ, request) if response is not None: return response(environ, start_response) # Nope. Let's see if it's an actual page. try: response = self._try_serve_page(app, environ, request) return response(environ, start_response) except (RouteNotFoundError, SourceNotFoundError) as ex: raise NotFound(str(ex)) from ex except HTTPException: raise except Exception as ex: if app.debug: logger.exception(ex) raise msg = str(ex) logger.error(msg) raise InternalServerError(msg) from ex def _try_special_request(self, environ, request): static_mount = '/__piecrust_static/' if request.path.startswith(static_mount): rel_req_path = request.path[len(static_mount):] mount = os.path.join(os.path.dirname(__file__), 'resources', 'server') full_path = os.path.join(mount, rel_req_path) try: response = self._make_wrapped_file_response( environ, request, full_path) return response except OSError: pass debug_mount = '/__piecrust_debug/' if request.path.startswith(debug_mount): rel_req_path = request.path[len(debug_mount):] if rel_req_path == 'pipeline_status': from piecrust.server.procloop import ( PipelineStatusServerSideEventProducer) provider = PipelineStatusServerSideEventProducer( self._proc_loop.status_queue) it = ClosingIterator(provider.run(), [provider.close]) response = Response(it) response.headers['Cache-Control'] = 'no-cache' if 'text/event-stream' in request.accept_mimetypes: response.mimetype = 'text/event-stream' response.direct_passthrough = True response.implicit_sequence_conversion = False return response return None def _try_serve_asset(self, environ, request): rel_req_path = request.path.lstrip('/').replace('/', os.sep) if request.path.startswith('/_cache/'): # Some stuff needs to be served directly from the cache directory, # like LESS CSS map files. full_path = os.path.join(self.root_dir, rel_req_path) else: full_path = os.path.join(self._out_dir, rel_req_path) try: response = self._make_wrapped_file_response( environ, request, full_path) return response except OSError: pass return None def _try_serve_page_asset(self, app, environ, request): if not request.path.startswith('/_asset/'): return None full_path = os.path.join(app.root_dir, request.path[len('/_asset/'):]) if not os.path.isfile(full_path): return None return self._make_wrapped_file_response(environ, request, full_path) def _try_serve_page(self, app, environ, request): # Try to find what matches the requested URL. req_path, page_num = split_sub_uri(app, request.path) routes = find_routes(app.routes, req_path) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) rendered_page = None first_not_found = None for route, route_metadata in routes: try: logger.debug("Trying to render match from source '%s'." % route.source_name) rendered_page = self._try_render_page(app, route, route_metadata, page_num, req_path) if rendered_page is not None: break except NotFound as nfe: if first_not_found is None: first_not_found = nfe else: raise SourceNotFoundError( "Can't find path for: %s (looked in: %s)" % (req_path, [r.source_name for r, _ in routes])) # If we haven't found any good match, raise whatever exception we # first got. Otherwise, raise a generic exception. if rendered_page is None: first_not_found = first_not_found or NotFound( "This page couldn't be found.") raise first_not_found # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content # Profiling. if app.config.get('site/show_debug_info'): now_time = time.clock() timing_info = ('%8.1f ms' % ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace('__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() etag = hashlib.md5(rp_content.encode('utf8')).hexdigest() if not app.debug and etag in request.if_none_match: response.status_code = 304 return response response.set_etag(etag) response.content_md5 = etag cache_control = response.cache_control if app.debug: cache_control.no_cache = True cache_control.must_revalidate = True else: cache_time = (page.config.get('cache_time') or app.config.get('site/cache_time')) if cache_time: cache_control.public = True cache_control.max_age = cache_time content_type = page.config.get('content_type') if content_type and '/' not in content_type: mimetype = content_type_map.get(content_type, content_type) else: mimetype = content_type if mimetype: response.mimetype = mimetype if ('gzip' in request.accept_encodings and app.config.get('site/enable_gzip')): try: with io.BytesIO() as gzip_buffer: with gzip.open(gzip_buffer, mode='wt', encoding='utf8') as gzip_file: gzip_file.write(rp_content) rp_content = gzip_buffer.getvalue() response.content_encoding = 'gzip' except Exception: logger.exception("Error compressing response, " "falling back to uncompressed.") response.set_data(rp_content) return response def _try_render_page(self, app, route, route_metadata, page_num, req_path): # Match the route to an actual factory. taxonomy_info = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: return None else: taxonomy = app.getTaxonomy(route.taxonomy_name) route_terms = route_metadata.get(taxonomy.term_name) if route_terms is None: return None tax_page_ref = taxonomy.getPageRef(source.name) factory = tax_page_ref.getFactory() tax_terms = route.unslugifyTaxonomyTerm(route_terms) route_metadata[taxonomy.term_name] = tax_terms taxonomy_info = (taxonomy, tax_terms) # Build the page. page = factory.buildPage() # We force the rendering of the page because it could not have # changed, but include pages that did change. qp = QualifiedPage(page, route, route_metadata) render_ctx = PageRenderingContext(qp, page_num=page_num, force_render=True) if taxonomy_info is not None: taxonomy, tax_terms = taxonomy_info render_ctx.setTaxonomyFilter(taxonomy, tax_terms) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() assert uri == req_path entry = self._page_record.getEntry(uri, page_num) if (taxonomy_info is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if taxonomy_info is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = taxonomy_info[0] message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Ok all good. return rendered_page def _make_wrapped_file_response(self, environ, request, path): logger.debug("Serving %s" % path) # Check if we can return a 304 status code. mtime = os.path.getmtime(path) etag_str = '%s$$%s' % (path, mtime) etag = hashlib.md5(etag_str.encode('utf8')).hexdigest() if etag in request.if_none_match: response = Response() response.status_code = 304 return response wrapper = wrap_file(environ, open(path, 'rb')) response = Response(wrapper) _, ext = os.path.splitext(path) response.set_etag(etag) response.last_modified = datetime.datetime.fromtimestamp(mtime) response.mimetype = self._mimetype_map.get(ext.lstrip('.'), 'text/plain') return response def _handle_error(self, exception, environ, start_response): code = 500 if isinstance(exception, HTTPException): code = exception.code path = 'error' if isinstance(exception, NotFound): path += '404' descriptions = self._get_exception_descriptions(exception) env = Environment(loader=ErrorMessageLoader()) template = env.get_template(path) context = {'details': descriptions} response = Response(template.render(context), mimetype='text/html') response.status_code = code return response(environ, start_response) def _get_exception_descriptions(self, exception): desc = [] while exception is not None: if isinstance(exception, HTTPException): desc.append(exception.description) else: desc.append(str(exception)) inner_ex = exception.__cause__ if inner_ex is None: inner_ex = exception.__context__ exception = inner_ex return desc
class PieCrustDebugMiddleware(object): """ WSGI middleware that handles debugging of PieCrust stuff, and runs the asset pipeline in an SSE thread. """ def __init__(self, app, appfactory, run_sse_check=None): self.app = app self.appfactory = appfactory self.run_sse_check = run_sse_check self._proc_loop = None self._out_dir = os.path.join( appfactory.root_dir, CACHE_DIR, appfactory.cache_key, 'server') self._handlers = { 'debug_info': self._getDebugInfo, 'werkzeug_shutdown': self._shutdownWerkzeug, 'pipeline_status': self._startSSEProvider} if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.serving.procloop import ProcessingLoop self._proc_loop = ProcessingLoop(self.appfactory, self._out_dir) self._proc_loop.start() def __call__(self, environ, start_response): debug_mount = '/__piecrust_debug/' request = Request(environ) if request.path.startswith(debug_mount): rel_req_path = request.path[len(debug_mount):] handler = self._handlers.get(rel_req_path) if handler is not None: try: return handler(request, start_response) except HTTPException as ex: return ex(environ, start_response) return self.app(environ, start_response) def _getDebugInfo(self, request, start_response): app = get_app_for_server(self.appfactory) if not app.config.get('site/enable_debug_info', True): raise Forbidden("PieCrust debug info isn't enabled.") found = False page_path = request.args.get('page') try: req_page = get_requested_page(app, page_path) found = (req_page is not None) except (RouteNotFoundError, PageNotFoundError): pass if not found: raise NotFound("No such page: %s" % page_path) ctx = DataBuildingContext(req_page.page, sub_num=req_page.sub_num) data = build_page_data(ctx) var_path = request.args.getlist('var') if not var_path: var_path = None output = build_var_debug_info(data, var_path) response = Response(output, mimetype='text/html') return response(request.environ, start_response) def _shutdownWerkzeug(self, request, start_response): shutdown_func = request.environ.get('werkzeug.server.shutdown') if shutdown_func is None: raise RuntimeError('Not running with the Werkzeug Server') shutdown_func() response = Response("Server shutting down...") return response(request.environ, start_response) def _startSSEProvider(self, request, start_response): from piecrust.serving.procloop import ( PipelineStatusServerSentEventProducer) provider = PipelineStatusServerSentEventProducer( self._proc_loop) it = provider.run() response = Response(it, mimetype='text/event-stream') response.headers['Cache-Control'] = 'no-cache' response.headers['Last-Event-ID'] = \ self._proc_loop.last_status_id return ClosingIterator( response(request.environ, start_response), [provider.close])
class Server(object): def __init__(self, root_dir, debug=False, sub_cache_dir=None, enable_debug_info=True, static_preview=True, run_sse_check=None): self.root_dir = root_dir self.debug = debug self.sub_cache_dir = sub_cache_dir self.enable_debug_info = enable_debug_info self.run_sse_check = run_sse_check self.static_preview = static_preview self._page_record = ServeRecord() self._out_dir = os.path.join(root_dir, CACHE_DIR, 'server') self._proc_loop = None self._mimetype_map = load_mimetype_map() def getWsgiApp(self): # Bake all the assets so we know what we have, and so we can serve # them to the client. We need a temp app for this. app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: app._useSubCacheDir(self.sub_cache_dir) self._out_dir = os.path.join(app.sub_cache_dir, 'server') if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.processing.pipeline import ProcessorPipeline from piecrust.serving.procloop import ProcessingLoop pipeline = ProcessorPipeline(app, self._out_dir) self._proc_loop = ProcessingLoop(pipeline) self._proc_loop.start() # Run the WSGI app. wsgi_wrapper = WsgiServerWrapper(self) return wsgi_wrapper def _run_request(self, environ, start_response): try: return self._try_run_request(environ, start_response) except Exception as ex: if self.debug: if isinstance(ex, HTTPException): return ex raise return self._handle_error(ex, environ, start_response) def _try_run_request(self, environ, start_response): request = Request(environ) # We don't support anything else than GET requests since we're # previewing something that will be static later. if self.static_preview and request.method != 'GET': logger.error("Only GET requests are allowed, got %s" % request.method) raise MethodNotAllowed() # Handle special requests right away. response = self._try_special_request(environ, request) if response is not None: return response(environ, start_response) # Also handle requests to a pipeline-built asset right away. response = self._try_serve_asset(environ, request) if response is not None: return response(environ, start_response) # Create the app for this request. app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: app._useSubCacheDir(self.sub_cache_dir) app.config.set('site/root', '/') app.config.set('server/is_serving', True) if (app.config.get('site/enable_debug_info') and self.enable_debug_info and '!debug' in request.args): app.config.set('site/show_debug_info', True) # We'll serve page assets directly from where they are. app.env.base_asset_url_format = '/_asset/%path%' # Let's see if it can be a page asset. response = self._try_serve_page_asset(app, environ, request) if response is not None: return response(environ, start_response) # Nope. Let's see if it's an actual page. try: response = self._try_serve_page(app, environ, request) return response(environ, start_response) except (RouteNotFoundError, SourceNotFoundError) as ex: raise NotFound(str(ex)) from ex except HTTPException: raise except Exception as ex: if app.debug: logger.exception(ex) raise msg = str(ex) logger.error(msg) raise InternalServerError(msg) from ex def _try_special_request(self, environ, request): static_mount = '/__piecrust_static/' if request.path.startswith(static_mount): rel_req_path = request.path[len(static_mount):] mount = os.path.join(RESOURCES_DIR, 'server') full_path = os.path.join(mount, rel_req_path) try: response = self._make_wrapped_file_response( environ, request, full_path) return response except OSError: pass debug_mount = '/__piecrust_debug/' if request.path.startswith(debug_mount): rel_req_path = request.path[len(debug_mount):] if rel_req_path == 'pipeline_status': from piecrust.serving.procloop import ( PipelineStatusServerSideEventProducer) provider = PipelineStatusServerSideEventProducer( self._proc_loop.status_queue) it = ClosingIterator(provider.run(), [provider.close]) response = Response(it) response.headers['Cache-Control'] = 'no-cache' if 'text/event-stream' in request.accept_mimetypes: response.mimetype = 'text/event-stream' response.direct_passthrough = True response.implicit_sequence_conversion = False return response return None def _try_serve_asset(self, environ, request): rel_req_path = request.path.lstrip('/').replace('/', os.sep) if request.path.startswith('/_cache/'): # Some stuff needs to be served directly from the cache directory, # like LESS CSS map files. full_path = os.path.join(self.root_dir, rel_req_path) else: full_path = os.path.join(self._out_dir, rel_req_path) try: response = self._make_wrapped_file_response( environ, request, full_path) return response except OSError: pass return None def _try_serve_page_asset(self, app, environ, request): if not request.path.startswith('/_asset/'): return None full_path = os.path.join(app.root_dir, request.path[len('/_asset/'):]) if not os.path.isfile(full_path): return None return self._make_wrapped_file_response(environ, request, full_path) def _try_serve_page(self, app, environ, request): # Try to find what matches the requested URL. req_path, page_num = split_sub_uri(app, request.path) routes = find_routes(app.routes, req_path) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) rendered_page = None first_not_found = None for route, route_metadata in routes: try: logger.debug("Trying to render match from source '%s'." % route.source_name) rendered_page = self._try_render_page( app, route, route_metadata, page_num, req_path) if rendered_page is not None: break except NotFound as nfe: if first_not_found is None: first_not_found = nfe else: raise SourceNotFoundError( "Can't find path for: %s (looked in: %s)" % (req_path, [r.source_name for r, _ in routes])) # If we haven't found any good match, raise whatever exception we # first got. Otherwise, raise a generic exception. if rendered_page is None: first_not_found = first_not_found or NotFound( "This page couldn't be found.") raise first_not_found # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content # Profiling. if app.config.get('site/show_debug_info'): now_time = time.clock() timing_info = ( '%8.1f ms' % ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace( '__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() etag = hashlib.md5(rp_content.encode('utf8')).hexdigest() if not app.debug and etag in request.if_none_match: response.status_code = 304 return response response.set_etag(etag) response.content_md5 = etag cache_control = response.cache_control if app.debug: cache_control.no_cache = True cache_control.must_revalidate = True else: cache_time = (page.config.get('cache_time') or app.config.get('site/cache_time')) if cache_time: cache_control.public = True cache_control.max_age = cache_time content_type = page.config.get('content_type') if content_type and '/' not in content_type: mimetype = content_type_map.get(content_type, content_type) else: mimetype = content_type if mimetype: response.mimetype = mimetype if ('gzip' in request.accept_encodings and app.config.get('site/enable_gzip')): try: with io.BytesIO() as gzip_buffer: with gzip.open(gzip_buffer, mode='wt', encoding='utf8') as gzip_file: gzip_file.write(rp_content) rp_content = gzip_buffer.getvalue() response.content_encoding = 'gzip' except Exception: logger.exception("Error compressing response, " "falling back to uncompressed.") response.set_data(rp_content) return response def _try_render_page(self, app, route, route_metadata, page_num, req_path): # Match the route to an actual factory. taxonomy_info = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: return None else: taxonomy = app.getTaxonomy(route.taxonomy_name) route_terms = route_metadata.get(taxonomy.term_name) if route_terms is None: return None tax_page_ref = taxonomy.getPageRef(source) factory = tax_page_ref.getFactory() tax_terms = route.unslugifyTaxonomyTerm(route_terms) route_metadata[taxonomy.term_name] = tax_terms taxonomy_info = (taxonomy, tax_terms) # Build the page. page = factory.buildPage() # We force the rendering of the page because it could not have # changed, but include pages that did change. qp = QualifiedPage(page, route, route_metadata) render_ctx = PageRenderingContext(qp, page_num=page_num, force_render=True) if taxonomy_info is not None: taxonomy, tax_terms = taxonomy_info render_ctx.setTaxonomyFilter(taxonomy, tax_terms) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() entry = self._page_record.getEntry(uri, page_num) if (taxonomy_info is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if taxonomy_info is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = taxonomy_info[0] message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Ok all good. return rendered_page def _make_wrapped_file_response(self, environ, request, path): logger.debug("Serving %s" % path) # Check if we can return a 304 status code. mtime = os.path.getmtime(path) etag_str = '%s$$%s' % (path, mtime) etag = hashlib.md5(etag_str.encode('utf8')).hexdigest() if etag in request.if_none_match: response = Response() response.status_code = 304 return response wrapper = wrap_file(environ, open(path, 'rb')) response = Response(wrapper) _, ext = os.path.splitext(path) response.set_etag(etag) response.last_modified = datetime.datetime.fromtimestamp(mtime) response.mimetype = self._mimetype_map.get( ext.lstrip('.'), 'text/plain') return response def _handle_error(self, exception, environ, start_response): code = 500 if isinstance(exception, HTTPException): code = exception.code path = 'error' if isinstance(exception, NotFound): path += '404' descriptions = self._get_exception_descriptions(exception) env = Environment(loader=ErrorMessageLoader()) template = env.get_template(path) context = {'details': descriptions} response = Response(template.render(context), mimetype='text/html') response.status_code = code return response(environ, start_response) def _get_exception_descriptions(self, exception): desc = [] while exception is not None: if isinstance(exception, HTTPException): desc.append(exception.description) else: desc.append(str(exception)) inner_ex = exception.__cause__ if inner_ex is None: inner_ex = exception.__context__ exception = inner_ex return desc