def gen_taxonomy_archives(stubs, output_path_template=None, taxonomies=None, templates=_EMPTY_TUPLE): """ Creates a full archive page for each taxonomy term. One page per term. """ output_path_template = output_path_template or ARCHIVE_PATH_TEMPLATE tax_index = index_by_taxonomy(stubs, taxonomies) for taxonomy, terms in tax_index.items(): for term, stubs in terms.items(): output_path = output_path_template.format( taxonomy=pathtools.to_slug(taxonomy), term=pathtools.to_slug(term)) tax_templates = ("taxonomy/{}/all.html".format(taxonomy), "taxonomy/{}/list.html".format(taxonomy), "taxonomy/all.html", "taxonomy/list.html", "list.html") meta = {"stubs": stubs} yield doc(id_path=output_path, output_path=output_path, created=datetime.now(), modified=datetime.now(), title=term, section=taxonomy, templates=templates + tax_templates, meta=meta)
def _parse_wikilink(wikilink_str): """ Given a `[[WikiLink]]` or a `[[wikilink | Title]]`, return a tuple of `(wikilink, Title)`. Supports both piped and non-piped forms. """ inner = wikilink_str.strip('[] ') try: _slug, _text = inner.split("|") slug = to_slug(_slug.strip()) text = _text.strip() except ValueError: text = inner.strip() slug = to_slug(text) return slug, text
def index_wikilinks(docs, base_url="/"): """ Reduce an iterator of docs to a slug-to-url index. """ return { to_slug(doc.title): to_url(doc.output_path, base=base_url) for doc in docs }
def uplift_wikilinks(doc): """ Find all wikilinks in doc and assign them to a wikilinks property of doc. """ matches = re.finditer(WIKILINK, doc.content) wikilinks = (match.group(1) for match in matches) slugs = tuple(to_slug(wikilink) for wikilink in wikilinks) return Doc.replace_meta(doc, wikilinks=slugs)
def render_inner_match(match): inner = match.group(1) text = parse_inner(inner) try: url = wikilink_index[to_slug(text)] return link_template.format(url=url, text=text) except KeyError: return nolink_template.format(text=text)
def taxonomy_archives(docs, key, template="taxonomy.html", output_path_template="{taxonomy}/{term}/index.html"): """ Creates an archive page for each taxonomy term. One page per term. """ tax_index = index_taxonomy(docs, key) for term, docs in tax_index.items(): output_path = output_path_template.format( taxonomy=pathtools.to_slug(key), term=pathtools.to_slug(term)) meta = {"docs": docs} now = datetime.now() yield Doc.create(id_path=output_path, output_path=output_path, created=now, modified=now, title=term, template=template, meta=meta)
def index_backlinks(stubs): """ Index all backlinks in an iterable of docs. This assumes you have already uplifted wikilinks from content with `uplift_wikilinks`. """ # Create an index of `slug: [slugs]` wikilink_index = { to_slug(stub.title): stub for stub in stubs if "wikilinks" in stub.meta } backlink_index = {} for stub in wikilink_index.values(): for slug in frozenset(stub.meta["wikilinks"]): try: to_path = wikilink_index[slug].id_path if to_path not in backlink_index: backlink_index[to_path] = [] backlink_index[to_path].append(stub) except KeyError: pass return backlink_index
def _index_by_slug(docs): return {to_slug(doc.title): Stub.from_doc(doc) for doc in docs}