def _getWipData(path, site, fs_endpoints): auto_formats = site.piecrust_app.config.get('site/auto_formats', ['html']) pathname, pathext = os.path.splitext(path) if pathext not in auto_formats: return None source = None for endpoint, s in fs_endpoints.items(): if path.startswith(endpoint): source = s break if source is None: return None fac = source.buildPageFactory(os.path.join(site.root_dir, path)) route = site.piecrust_app.getSourceRoute(source.name, fac.metadata) if not route: return None qp = QualifiedPage(fac.buildPage(), route, fac.metadata) uri = qp.getUri() _, slug = split_uri(site.piecrust_app, uri) with open(fac.path, 'r', encoding='utf8') as fp: raw_text = fp.read() header, offset = parse_config_header(raw_text) extract = text_preview(raw_text, offset=offset) return { 'title': qp.config.get('title'), 'slug': slug, 'url': url_for('.edit_page', slug=slug), 'text': extract }
def test_blog_provider(): fs = (mock_fs().withPage('posts/2015-03-01_one.md', { 'title': 'One', 'category': 'Foo' }).withPage('posts/2015-03-02_two.md', { 'title': 'Two', 'category': 'Foo' }).withPage('posts/2015-03-03_three.md', { 'title': 'Three', 'category': 'Bar' }).withPage( 'pages/categories.md', { 'format': 'none', 'layout': 'none' }, "{%for c in blog.categories%}\n" "{{c.name}} ({{c.post_count}})\n" "{%endfor%}\n")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': 'categories'}) route = app.getRoute('pages', None) route_metadata = {'slug': 'categories'} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) rp = render_page(ctx) expected = "\nBar (1)\n\nFoo (2)\n" assert rp.content == expected
def test_serve_tag_page(tag, expected_indices): tags = [['foo'], ['foo', 'bar'], ['bar'], ['bar', 'foo'], ['foo', 'whatever'], ['foo', 'bar'], ['unique'], ['whatever', 'bar']] def config_factory(i): c = {'title': 'Post %d' % (i + 1)} c['tags'] = list(tags[i]) return c fs = (mock_fs().withPages(8, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory).withPage( 'pages/_tag.md', { 'layout': 'none', 'format': 'none' }, "Pages in {{tag}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': '_tag', 'tag': tag}) route = app.getTaxonomyRoute('tags', 'posts') route_metadata = {'slug': '_tag', 'tag': tag} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(tag) rp = render_page(ctx) expected = "Pages in %s\n" % tag if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def handleJob(self, job): # Render the segments for the first sub-page of this page. fac = load_factory(self.app, job) # These things should be OK as they're checked upstream by the baker. route = self.app.getRoute(fac.source.name, fac.metadata, skip_taxonomies=True) assert route is not None page = fac.buildPage() route_metadata = create_route_metadata(page) qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) self.app.env.abort_source_use = True result = { 'path': fac.path, 'aborted': False, 'errors': None} logger.debug("Preparing page: %s" % fac.ref_spec) try: render_page_segments(ctx) except AbortedSourceUseError: logger.debug("Page %s was aborted." % fac.ref_spec) result['aborted'] = True except Exception as ex: logger.debug("Got rendering error. Sending it to master.") result['errors'] = _get_errors(ex) if self.ctx.debug: logger.exception(ex) finally: self.app.env.abort_source_use = False return result
def _get_requested_page_for_route(app, route, route_metadata, req_path): taxonomy = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: raise PageNotFoundError("No path found for '%s' in source '%s'." % (req_path, source.name)) else: taxonomy = app.getTaxonomy(route.taxonomy_name) # This will raise `PageNotFoundError` naturally if not found. tax_page_ref = taxonomy.getPageRef(source) factory = tax_page_ref.getFactory() # Build the page. page = factory.buildPage() qp = QualifiedPage(page, route, route_metadata) return qp
def handleJob(self, job): # Actually bake the page and all its sub-pages to the output folder. fac = load_factory(self.app, job['factory_info']) route_metadata = job['route_metadata'] tax_info = job['taxonomy_info'] if tax_info is not None: route = self.app.getTaxonomyRoute(tax_info.taxonomy_name, tax_info.source_name) else: route = self.app.getRoute(fac.source.name, route_metadata, skip_taxonomies=True) assert route is not None page = fac.buildPage() qp = QualifiedPage(page, route, route_metadata) result = { 'path': fac.path, 'taxonomy_info': tax_info, 'sub_entries': None, 'errors': None} dirty_source_names = job['dirty_source_names'] previous_entry = None if self.ctx.previous_record_index is not None: key = _get_transition_key(fac.path, tax_info) previous_entry = self.ctx.previous_record_index.get(key) logger.debug("Baking page: %s" % fac.ref_spec) try: sub_entries = self.page_baker.bake( qp, previous_entry, dirty_source_names, tax_info) result['sub_entries'] = sub_entries except BakingError as ex: logger.debug("Got baking error. Sending it to master.") result['errors'] = _get_errors(ex) if self.ctx.debug: logger.exception(ex) return result
def _load_rendered_segment(self, data, name): do_render = True eis = self._page.app.env.exec_info_stack if eis is not None and eis.hasPage(self._page): # This is the pagination data for the page that is currently # being rendered! Inception! But this is possible... so just # prevent infinite recursion. do_render = False assert self is data if do_render: uri = self._get_uri() try: from piecrust.rendering import (QualifiedPage, PageRenderingContext, render_page_segments) qp = QualifiedPage(self._page, self._route, self._route_metadata) ctx = PageRenderingContext(qp) render_result = render_page_segments(ctx) segs = render_result.segments except Exception as e: raise Exception("Error rendering segments for '%s'" % uri) from e else: segs = {} for name in self._page.config.get('segments'): segs[name] = "<unavailable: current page>" for k, v in segs.items(): self._unmapLoader(k) self._setValue(k, v) if 'content.abstract' in segs: self._setValue('content', segs['content.abstract']) self._setValue('has_more', True) if name == 'content': return segs['content.abstract'] return segs[name]
def test_serve_category_page(category, expected_indices): categories = ['foo', 'foo', 'bar', 'foo', None, 'bar'] def config_factory(i): c = {'title': 'Post %d' % (i + 1)} if categories[i]: c['category'] = categories[i] return c fs = (mock_fs().withPages(6, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory).withPage( 'pages/_category.md', { 'layout': 'none', 'format': 'none' }, "Pages in {{category}}\n" "{%for p in pagination.posts -%}\n" "{{p.title}}\n" "{%endfor%}")) with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({ 'slug': '_category', 'category': category }) route = app.getTaxonomyRoute('categories', 'posts') route_metadata = {'slug': '_category', 'category': category} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) ctx.setTaxonomyFilter(category) rp = render_page(ctx) expected = "Pages in %s\n" % category if expected_indices: for i in reversed(expected_indices): expected += "Post %d\n" % i assert expected == rp.content
def _try_render_page(self, app, route, route_metadata, page_num, req_path): # Match the route to an actual factory. taxonomy_info = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: return None else: taxonomy = app.getTaxonomy(route.taxonomy_name) route_terms = route_metadata.get(taxonomy.term_name) if route_terms is None: return None tax_page_ref = taxonomy.getPageRef(source.name) factory = tax_page_ref.getFactory() tax_terms = route.unslugifyTaxonomyTerm(route_terms) route_metadata[taxonomy.term_name] = tax_terms taxonomy_info = (taxonomy, tax_terms) # Build the page. page = factory.buildPage() # We force the rendering of the page because it could not have # changed, but include pages that did change. qp = QualifiedPage(page, route, route_metadata) render_ctx = PageRenderingContext(qp, page_num=page_num, force_render=True) if taxonomy_info is not None: taxonomy, tax_terms = taxonomy_info render_ctx.setTaxonomyFilter(taxonomy, tax_terms) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() assert uri == req_path entry = self._page_record.getEntry(uri, page_num) if (taxonomy_info is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if taxonomy_info is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = taxonomy_info[0] message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Ok all good. return rendered_page
def _try_render_page(self, app, route, route_metadata, page_num, req_path): # Match the route to an actual factory. taxonomy_info = None source = app.getSource(route.source_name) if route.taxonomy_name is None: factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: return None else: taxonomy = app.getTaxonomy(route.taxonomy_name) route_terms = route_metadata.get(taxonomy.term_name) if route_terms is None: return None tax_page_ref = taxonomy.getPageRef(source) factory = tax_page_ref.getFactory() tax_terms = route.unslugifyTaxonomyTerm(route_terms) route_metadata[taxonomy.term_name] = tax_terms taxonomy_info = (taxonomy, tax_terms) # Build the page. page = factory.buildPage() # We force the rendering of the page because it could not have # changed, but include pages that did change. qp = QualifiedPage(page, route, route_metadata) render_ctx = PageRenderingContext(qp, page_num=page_num, force_render=True) if taxonomy_info is not None: taxonomy, tax_terms = taxonomy_info render_ctx.setTaxonomyFilter(taxonomy, tax_terms) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() entry = self._page_record.getEntry(uri, page_num) if (taxonomy_info is not None or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, page_num) app.env.rendered_segments_repository.invalidate(cache_key) # Render the page. rendered_page = render_page(render_ctx) # Check if this page is a taxonomy page that actually doesn't match # anything. if taxonomy_info is not None: paginator = rendered_page.data.get('pagination') if (paginator and paginator.is_loaded and len(paginator.items) == 0): taxonomy = taxonomy_info[0] message = ("This URL matched a route for taxonomy '%s' but " "no pages have been found to have it. This page " "won't be generated by a bake." % taxonomy.name) raise NotFound(message) # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) for p, pinfo in render_ctx.render_passes.items(): entry.used_source_names |= pinfo.used_source_names # Ok all good. return rendered_page
def render_simple_page(page, route, route_metadata): qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) rp = render_page(ctx) return rp.content
def bake(self, factory, route, record_entry): # Get the page. page = factory.buildPage() route_metadata = copy.deepcopy(factory.metadata) # Add taxonomy info in the template data and route metadata if needed. bake_taxonomy_info = None if record_entry.taxonomy_info: tax_name, tax_term, tax_source_name = record_entry.taxonomy_info taxonomy = self.app.getTaxonomy(tax_name) slugified_term = route.slugifyTaxonomyTerm(tax_term) route_metadata[taxonomy.term_name] = slugified_term bake_taxonomy_info = (taxonomy, tax_term) # Generate the URI. uri = route.getUri(route_metadata, provider=page) # See if this URL has been overriden by a previously baked page. # If that page is from another realm (e.g. a user page vs. a theme # page), we silently skip this page. If they're from the same realm, # we don't allow overriding and raise an error (this is probably # because of a misconfigured configuration that allows for ambiguous # URLs between 2 routes or sources). override = self.record.getOverrideEntry(factory, uri) if override is not None: override_source = self.app.getSource(override.source_name) if override_source.realm == factory.source.realm: raise BakingError( "Page '%s' maps to URL '%s' but is overriden by page" "'%s:%s'." % (factory.ref_spec, uri, override.source_name, override.rel_path)) logger.debug("'%s' [%s] is overriden by '%s:%s'. Skipping" % (factory.ref_spec, uri, override.source_name, override.rel_path)) record_entry.flags |= BakeRecordPageEntry.FLAG_OVERRIDEN return # Setup the record entry. record_entry.config = copy_public_page_config(page.config) # Start baking the sub-pages. cur_sub = 1 has_more_subs = True force_this = self.force invalidate_formatting = False prev_record_entry = self.record.getPreviousEntry( factory.source.name, factory.rel_path, record_entry.taxonomy_info) logger.debug("Baking '%s'..." % uri) while has_more_subs: # Get the URL and path for this sub-page. sub_uri = route.getUri(route_metadata, sub_num=cur_sub, provider=page) out_path = self.getOutputPath(sub_uri) # Create the sub-entry for the bake record. record_sub_entry = BakeRecordSubPageEntry(sub_uri, out_path) record_entry.subs.append(record_sub_entry) # Find a corresponding sub-entry in the previous bake record. prev_record_sub_entry = None if prev_record_entry: try: prev_record_sub_entry = prev_record_entry.getSub(cur_sub) except IndexError: pass # Figure out what to do with this page. if (prev_record_sub_entry and (prev_record_sub_entry.was_baked_successfully or prev_record_sub_entry.was_clean)): # If the current page is known to use pages from other sources, # see if any of those got baked, or are going to be baked for # some reason. If so, we need to bake this one too. # (this happens for instance with the main page of a blog). dirty_src_names, invalidated_render_passes = ( self._getDirtySourceNamesAndRenderPasses( prev_record_sub_entry)) if len(invalidated_render_passes) > 0: logger.debug( "'%s' is known to use sources %s, which have " "items that got (re)baked. Will force bake this " "page. " % (uri, dirty_src_names)) record_sub_entry.flags |= \ BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE force_this = True if PASS_FORMATTING in invalidated_render_passes: logger.debug( "Will invalidate cached formatting for '%s' " "since sources were using during that pass." % uri) invalidate_formatting = True elif (prev_record_sub_entry and prev_record_sub_entry.errors): # Previous bake failed. We'll have to bake it again. logger.debug( "Previous record entry indicates baking failed for " "'%s'. Will bake it again." % uri) record_sub_entry.flags |= \ BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS force_this = True elif not prev_record_sub_entry: # No previous record. We'll have to bake it. logger.debug("No previous record entry found for '%s'. Will " "force bake it." % uri) record_sub_entry.flags |= \ BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS force_this = True # Check for up-to-date outputs. do_bake = True if not force_this: try: in_path_time = page.path_mtime out_path_time = os.path.getmtime(out_path) if out_path_time >= in_path_time: do_bake = False except OSError: # File doesn't exist, we'll need to bake. pass # If this page didn't bake because it's already up-to-date. # Keep trying for as many subs as we know this page has. if not do_bake: prev_record_sub_entry.collapseRenderPasses(record_sub_entry) record_sub_entry.flags = BakeRecordSubPageEntry.FLAG_NONE if prev_record_entry.num_subs >= cur_sub + 1: cur_sub += 1 has_more_subs = True logger.debug(" %s is up to date, skipping to next " "sub-page." % out_path) continue logger.debug(" %s is up to date, skipping bake." % out_path) break # All good, proceed. try: if invalidate_formatting: cache_key = sub_uri self.app.env.rendered_segments_repository.invalidate( cache_key) record_sub_entry.flags |= \ BakeRecordSubPageEntry.FLAG_FORMATTING_INVALIDATED logger.debug(" p%d -> %s" % (cur_sub, out_path)) qp = QualifiedPage(page, route, route_metadata) ctx, rp = self._bakeSingle(qp, cur_sub, out_path, bake_taxonomy_info) except Exception as ex: if self.app.debug: logger.exception(ex) page_rel_path = os.path.relpath(page.path, self.app.root_dir) raise BakingError("%s: error baking '%s'." % (page_rel_path, uri)) from ex # Record what we did. record_sub_entry.flags |= BakeRecordSubPageEntry.FLAG_BAKED self.record.dirty_source_names.add(record_entry.source_name) for p, pinfo in ctx.render_passes.items(): brpi = BakeRecordPassInfo() brpi.used_source_names = set(pinfo.used_source_names) brpi.used_taxonomy_terms = set(pinfo.used_taxonomy_terms) record_sub_entry.render_passes[p] = brpi if prev_record_sub_entry: prev_record_sub_entry.collapseRenderPasses(record_sub_entry) # Copy page assets. if (cur_sub == 1 and self.copy_assets and ctx.used_assets is not None): if self.pretty_urls: out_assets_dir = os.path.dirname(out_path) else: out_assets_dir, out_name = os.path.split(out_path) if sub_uri != self.site_root: out_name_noext, _ = os.path.splitext(out_name) out_assets_dir += out_name_noext logger.debug("Copying page assets to: %s" % out_assets_dir) if not os.path.isdir(out_assets_dir): os.makedirs(out_assets_dir, 0o755) for ap in ctx.used_assets: dest_ap = os.path.join(out_assets_dir, os.path.basename(ap)) logger.debug(" %s -> %s" % (ap, dest_ap)) shutil.copy(ap, dest_ap) record_entry.assets.append(ap) # Figure out if we have more work. has_more_subs = False if ctx.used_pagination is not None: if ctx.used_pagination.has_more: cur_sub += 1 has_more_subs = True