def handle(self, path): if path[-1] == '/': path = path[:-1] name,ext = os.path.splitext(os.path.basename(path)) if ext == '.md': full_path = None elif ext == '.html': full_path = self.find_matching_path(name + '.md') elif name == 'index': full_path = self.find_matching_path('index.md') else: full_path = self.find_matching_path(path) if not full_path: handler = self.handlers.get(path) if handler: return handler() else: raise WikidNotFoundError() name,ext = os.path.splitext(os.path.basename(full_path)) if ext == '.md': if name == 'index': base_url = '' else: base_url = os.path.relpath(self.path, start=full_path) + '/' result = convert(full_path, extension_configs={'wikilinks':[ ('base_url', base_url) ]}) content_type = 'text/html; charset=utf-8' else: content_type, _ = mimetypes.guess_type(full_path) result = open(full_path, 'rb').read() return result, content_type
def walk_wiki_files(path, dir_visitor=None, non_wiki_visitor=None, wiki_visitor=None, link_gen=None): """Walks a directory looking for wiki files. The first argument is the path to walk, the rest are visitors for different things the walk encounters. dir_visitor(path) `path` -- a path relative to the wiki path non_wiki_visitor(path) `path` -- a path to a file without a .md extension, relative to the wiki path wiki_visitor(base, name, html, items) `base` -- the base path, relative to the wiki path `name` -- the name of the markdown file, minus its extension `html` -- the html generated from the markdown Additionally, the caller may wish to pass a `link_gen`. This generates an internal link between wiki files. It should look like this: link_gen(base, name) `base` -- the base path relative to the wiki path `name` -- the name of the wiki file sans extension `id` -- the id of an anchor on the wiki page (may be None) Returns a list of items collected by `TextCollectingExtension` """ items = [] for base, dirs, files in os.walk(path): if '.page-order' in files: order = {} for i, p in enumerate(open(os.path.join(base, '.page-order')).read().split()): order[p.lower() + '.md'] = i files.sort(cmp=lambda a,b: cmp(order.get(a.lower(), 10000), order.get(b.lower(), 10000))) if dir_visitor: for d in dirs: dir_visitor( os.path.relpath(os.path.join(base, d), start=path) ) for f in files: relbase = os.path.relpath(base, start=path) name, ext = os.path.splitext(f) if ext == '.md': md_text_ext = TextCollectingExtension() if link_gen: base_url = link_gen(relbase, '', None) else: base_url = os.path.relpath(path, start=base) html = convert(os.path.join(base, f), extensions=[md_text_ext], extension_configs={'wikilinks': [ ('base_url', base_url+'/'), ('end_url', '.html') ]}) for item in md_text_ext.treeprocessor.items: if link_gen: item['path'] = link_gen(relbase, name, item.get('id')) else: item_file_path = os.path.relpath(os.path.join(base, name+'.html'), start=path) item['path'] = item_file_path + ('#'+item['id'] if 'id' in item else '') items.append(item) if wiki_visitor: wiki_visitor(relbase, name, html) elif non_wiki_visitor: non_wiki_visitor(os.path.join(relbase, f)) return items