def test_serve_category_page(category, expected_indices): categories = ["foo", "foo", "bar", "foo", None, "bar"] def config_factory(i): c = {"title": "Post %d" % (i + 1)} if categories[i]: c["category"] = categories[i] return c fs = ( mock_fs() .withPages(6, "posts/2015-03-{idx1:02}_post{idx1:02}.md", config_factory) .withPage( "pages/_category.md", {"layout": "none", "format": "none"}, "Pages in {{category}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}", ) ) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource("pages").getPage({"slug": "_category", "category": category}) route = app.getTaxonomyRoute("categories", "posts") route_metadata = {"slug": "_category", "category": category} taxonomy = app.getTaxonomy("categories") qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(taxonomy, category) rp = render_page(ctx) expected = "Pages in %s\n" % category if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def test_serve_category_page(category, expected_indices): categories = [ 'foo', 'foo', 'bar', 'foo', None, 'bar'] def config_factory(i): c = {'title': 'Post %d' % (i + 1)} if categories[i]: c['category'] = categories[i] return c fs = (mock_fs() .withPages(6, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory) .withPage('pages/_category.md', {'layout': 'none', 'format': 'none'}, "Pages in {{category}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': '_category', 'category': category}) route = app.getTaxonomyRoute('categories', 'posts') route_metadata = {'slug': '_category', 'category': category} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(category) rp = render_page(ctx) expected = "Pages in %s\n" % category if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def test_serve_tag_page(tag, expected_indices): tags = [['foo'], ['foo', 'bar'], ['bar'], ['bar', 'foo'], ['foo', 'whatever'], ['foo', 'bar'], ['unique'], ['whatever', 'bar']] def config_factory(i): c = {'title': 'Post %d' % (i + 1)} c['tags'] = list(tags[i]) return c fs = (mock_fs().withPages(8, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory).withPage( 'pages/_tag.md', { 'layout': 'none', 'format': 'none' }, "Pages in {{tag}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': '_tag', 'tag': tag}) route = app.getTaxonomyRoute('tags', 'posts') route_metadata = {'slug': '_tag', 'tag': tag} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(tag) rp = render_page(ctx) expected = "Pages in %s\n" % tag if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def _bakeSingle(self, qualified_page, num, out_path, tax_info=None): ctx = PageRenderingContext(qualified_page, page_num=num) if tax_info: ctx.setTaxonomyFilter(tax_info.term) rp = render_page(ctx) out_dir = os.path.dirname(out_path) _ensure_dir_exists(out_dir) with codecs.open(out_path, 'w', 'utf8') as fp: fp.write(rp.content) return rp
def _bakeSingle(self, qualified_page, num, out_path, tax_info=None): ctx = PageRenderingContext(qualified_page, page_num=num) if tax_info: tax = self.app.getTaxonomy(tax_info.taxonomy_name) ctx.setTaxonomyFilter(tax, tax_info.term) rp = render_page(ctx) out_dir = os.path.dirname(out_path) _ensure_dir_exists(out_dir) with codecs.open(out_path, 'w', 'utf8') as fp: fp.write(rp.content) return rp
def _bakeSingle(self, qualified_page, num, out_path, taxonomy_info=None): ctx = PageRenderingContext(qualified_page, page_num=num) if taxonomy_info: ctx.setTaxonomyFilter(taxonomy_info[0], taxonomy_info[1]) rp = render_page(ctx) out_dir = os.path.dirname(out_path) if not os.path.isdir(out_dir): os.makedirs(out_dir, 0o755) with codecs.open(out_path, 'w', 'utf8') as fp: fp.write(rp.content) return ctx, rp
def test_blog_provider(): fs = (mock_fs().withPage('posts/2015-03-01_one.md', { 'title': 'One', 'category': 'Foo' }).withPage('posts/2015-03-02_two.md', { 'title': 'Two', 'category': 'Foo' }).withPage('posts/2015-03-03_three.md', { 'title': 'Three', 'category': 'Bar' }).withPage( 'pages/categories.md', { 'format': 'none', 'layout': 'none' }, "{%for c in blog.categories%}\n" "{{c.name}} ({{c.post_count}})\n" "{%endfor%}\n")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': 'categories'}) route = app.getRoute('pages', None) route_metadata = {'slug': 'categories'} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) rp = render_page(ctx) expected = "\nBar (1)\n\nFoo (2)\n" assert rp.content == expected
def _bakeSingle(self, qualified_page, num, out_path, tax_info=None): ctx = PageRenderingContext(qualified_page, page_num=num) if tax_info: ctx.setTaxonomyFilter(tax_info.term) with self.app.env.timerScope("PageRender"): rp = render_page(ctx) with self.app.env.timerScope("PageSerialize"): out_dir = os.path.dirname(out_path) _ensure_dir_exists(out_dir) with open(out_path, "w", encoding="utf8") as fp: fp.write(rp.content) return rp
def handleJob(self, job): # Render the segments for the first sub-page of this page. fac = load_factory(self.app, job) # These things should be OK as they're checked upstream by the baker. route = self.app.getRoute(fac.source.name, fac.metadata, skip_taxonomies=True) assert route is not None page = fac.buildPage() route_metadata = create_route_metadata(page) qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) self.app.env.abort_source_use = True result = { 'path': fac.path, 'aborted': False, 'errors': None} logger.debug("Preparing page: %s" % fac.ref_spec) try: render_page_segments(ctx) except AbortedSourceUseError: logger.debug("Page %s was aborted." % fac.ref_spec) result['aborted'] = True except Exception as ex: logger.debug("Got rendering error. Sending it to master.") result['errors'] = _get_errors(ex) if self.ctx.debug: logger.exception(ex) finally: self.app.env.abort_source_use = False return result
def test_serve_tag_page(tag, expected_indices): tags = [ ["foo"], ["foo", "bar"], ["bar"], ["bar", "foo"], ["foo", "whatever"], ["foo", "bar"], ["unique"], ["whatever", "bar"], ] def config_factory(i): c = {"title": "Post %d" % (i + 1)} c["tags"] = list(tags[i]) return c fs = ( mock_fs() .withPages(8, "posts/2015-03-{idx1:02}_post{idx1:02}.md", config_factory) .withPage( "pages/_tag.md", {"layout": "none", "format": "none"}, "Pages in {{tag}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}", ) ) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource("pages").getPage({"slug": "_tag", "tag": tag}) route = app.getTaxonomyRoute("tags", "posts") route_metadata = {"slug": "_tag", "tag": tag} taxonomy = app.getTaxonomy("tags") qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(taxonomy, tag) rp = render_page(ctx) expected = "Pages in %s\n" % tag if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def test_serve_tag_page(tag, expected_indices): tags = [ ['foo'], ['foo', 'bar'], ['bar'], ['bar', 'foo'], ['foo', 'whatever'], ['foo', 'bar'], ['unique'], ['whatever', 'bar']] def config_factory(i): c = {'title': 'Post %d' % (i + 1)} c['tags'] = list(tags[i]) return c fs = (mock_fs() .withPages(8, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory) .withPage('pages/_tag.md', {'layout': 'none', 'format': 'none'}, "Pages in {{tag}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': '_tag', 'tag': tag}) route = app.getTaxonomyRoute('tags', 'posts') route_metadata = {'slug': '_tag', 'tag': tag} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(tag) rp = render_page(ctx) expected = "Pages in %s\n" % tag if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def test_serve_category_page(category, expected_indices): categories = ['foo', 'foo', 'bar', 'foo', None, 'bar'] def config_factory(i): c = {'title': 'Post %d' % (i + 1)} if categories[i]: c['category'] = categories[i] return c fs = (mock_fs().withPages(6, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory).withPage( 'pages/_category.md', { 'layout': 'none', 'format': 'none' }, "Pages in {{category}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({ 'slug': '_category', 'category': category }) route = app.getTaxonomyRoute('categories', 'posts') route_metadata = {'slug': '_category', 'category': category} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(category) rp = render_page(ctx) expected = "Pages in %s\n" % category if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def _load_rendered_segment(self, data, name): do_render = True eis = self._page.app.env.exec_info_stack if eis is not None and eis.hasPage(self._page): # This is the pagination data for the page that is currently # being rendered! Inception! But this is possible... so just # prevent infinite recursion. do_render = False assert self is data if do_render: uri = self._get_uri() try: from piecrust.rendering import (QualifiedPage, PageRenderingContext, render_page_segments) qp = QualifiedPage(self._page, self._route, self._route_metadata) ctx = PageRenderingContext(qp) render_result = render_page_segments(ctx) segs = render_result.segments except Exception as e: raise Exception("Error rendering segments for '%s'" % uri) from e else: segs = {} for name in self._page.config.get('segments'): segs[name] = "<unavailable: current page>" for k, v in segs.items(): self._unmapLoader(k) self._setValue(k, v) if 'content.abstract' in segs: self._setValue('content', segs['content.abstract']) self._setValue('has_more', True) if name == 'content': return segs['content.abstract'] return segs[name]
def _try_serve_page(self, app, environ, request): # Find a matching page. req_page = get_requested_page(app, request.path) # If we haven't found any good match, report all the places we didn't # find it at. qp = req_page.qualified_page if qp is None: msg = "Can't find path for '%s':" % request.path raise MultipleNotFound(msg, req_page.not_found_errors) # We have a page, let's try to render it. render_ctx = PageRenderingContext(qp, page_num=req_page.page_num, force_render=True) if qp.route.taxonomy_name is not None: taxonomy = app.getTaxonomy(qp.route.taxonomy_name) tax_terms = qp.route.getTaxonomyTerms(qp.route_metadata) render_ctx.setTaxonomyFilter(tax_terms, needs_slugifier=True) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() entry = self._page_record.getEntry(uri, req_page.page_num) if (qp.route.taxonomy_name is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, req_page.page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if qp.route.taxonomy_name is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = app.getTaxonomy(qp.route.taxonomy_name) message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_page.req_path, req_page.page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content # Profiling. if app.config.get('site/show_debug_info'): now_time = time.perf_counter() timing_info = ('%8.1f ms' % ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace('__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() etag = hashlib.md5(rp_content.encode('utf8')).hexdigest() if not app.debug and etag in request.if_none_match: response.status_code = 304 return response response.set_etag(etag) response.content_md5 = etag cache_control = response.cache_control if app.debug: cache_control.no_cache = True cache_control.must_revalidate = True else: cache_time = (page.config.get('cache_time') or app.config.get('site/cache_time')) if cache_time: cache_control.public = True cache_control.max_age = cache_time content_type = page.config.get('content_type') if content_type and '/' not in content_type: mimetype = content_type_map.get(content_type, content_type) else: mimetype = content_type if mimetype: response.mimetype = mimetype if ('gzip' in request.accept_encodings and app.config.get('site/enable_gzip')): try: with io.BytesIO() as gzip_buffer: with gzip.open(gzip_buffer, mode='wt', encoding='utf8') as gzip_file: gzip_file.write(rp_content) rp_content = gzip_buffer.getvalue() response.content_encoding = 'gzip' except Exception: logger.error("Error compressing response, " "falling back to uncompressed.") response.set_data(rp_content) return response
def _try_render_page(self, app, route, route_metadata, page_num, req_path): # Match the route to an actual factory. taxonomy_info = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: return None else: taxonomy = app.getTaxonomy(route.taxonomy_name) route_terms = route_metadata.get(taxonomy.term_name) if route_terms is None: return None tax_page_ref = taxonomy.getPageRef(source.name) factory = tax_page_ref.getFactory() tax_terms = route.unslugifyTaxonomyTerm(route_terms) route_metadata[taxonomy.term_name] = tax_terms taxonomy_info = (taxonomy, tax_terms) # Build the page. page = factory.buildPage() # We force the rendering of the page because it could not have # changed, but include pages that did change. qp = QualifiedPage(page, route, route_metadata) render_ctx = PageRenderingContext(qp, page_num=page_num, force_render=True) if taxonomy_info is not None: taxonomy, tax_terms = taxonomy_info render_ctx.setTaxonomyFilter(taxonomy, tax_terms) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() assert uri == req_path entry = self._page_record.getEntry(uri, page_num) if (taxonomy_info is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if taxonomy_info is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = taxonomy_info[0] message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Ok all good. return rendered_page
def _try_render_page(self, app, route, route_metadata, page_num, req_path): # Match the route to an actual factory. taxonomy_info = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: return None else: taxonomy = app.getTaxonomy(route.taxonomy_name) route_terms = route_metadata.get(taxonomy.term_name) if route_terms is None: return None tax_page_ref = taxonomy.getPageRef(source) factory = tax_page_ref.getFactory() tax_terms = route.unslugifyTaxonomyTerm(route_terms) route_metadata[taxonomy.term_name] = tax_terms taxonomy_info = (taxonomy, tax_terms) # Build the page. page = factory.buildPage() # We force the rendering of the page because it could not have # changed, but include pages that did change. qp = QualifiedPage(page, route, route_metadata) render_ctx = PageRenderingContext(qp, page_num=page_num, force_render=True) if taxonomy_info is not None: taxonomy, tax_terms = taxonomy_info render_ctx.setTaxonomyFilter(taxonomy, tax_terms) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() entry = self._page_record.getEntry(uri, page_num) if (taxonomy_info is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if taxonomy_info is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = taxonomy_info[0] message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Ok all good. return rendered_page
def render_simple_page(page, route, route_metadata): qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) rp = render_page(ctx) return rp.content
def _try_serve_page(self, app, environ, request): # Find a matching page. req_page = get_requested_page(app, request.path) # If we haven't found any good match, report all the places we didn't # find it at. qp = req_page.qualified_page if qp is None: msg = "Can't find path for '%s':" % request.path raise MultipleNotFound(msg, req_page.not_found_errors) # We have a page, let's try to render it. render_ctx = PageRenderingContext(qp, page_num=req_page.page_num, force_render=True) if qp.route.taxonomy_name is not None: taxonomy = app.getTaxonomy(qp.route.taxonomy_name) tax_terms = qp.route.getTaxonomyTerms(qp.route_metadata) render_ctx.setTaxonomyFilter(tax_terms, needs_slugifier=True) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() entry = self._page_record.getEntry(uri, req_page.page_num) if (qp.route.taxonomy_name is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, req_page.page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if qp.route.taxonomy_name is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = app.getTaxonomy(qp.route.taxonomy_name) message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_page.req_path, req_page.page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content # Profiling. if app.config.get('site/show_debug_info'): now_time = time.perf_counter() timing_info = ( '%8.1f ms' % ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace( '__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() etag = hashlib.md5(rp_content.encode('utf8')).hexdigest() if not app.debug and etag in request.if_none_match: response.status_code = 304 return response response.set_etag(etag) response.content_md5 = etag cache_control = response.cache_control if app.debug: cache_control.no_cache = True cache_control.must_revalidate = True else: cache_time = (page.config.get('cache_time') or app.config.get('site/cache_time')) if cache_time: cache_control.public = True cache_control.max_age = cache_time content_type = page.config.get('content_type') if content_type and '/' not in content_type: mimetype = content_type_map.get(content_type, content_type) else: mimetype = content_type if mimetype: response.mimetype = mimetype if ('gzip' in request.accept_encodings and app.config.get('site/enable_gzip')): try: with io.BytesIO() as gzip_buffer: with gzip.open(gzip_buffer, mode='wt', encoding='utf8') as gzip_file: gzip_file.write(rp_content) rp_content = gzip_buffer.getvalue() response.content_encoding = 'gzip' except Exception: logger.error("Error compressing response, " "falling back to uncompressed.") response.set_data(rp_content) return response