class SourceUrlPlugin(BasePlugin): config_scheme = ( ('repos_file', config_options.Type(str)), ('repos_prefix', config_options.Type(str, default="")), ('default_url_template', config_options.Type(str, default="")), ('github_url_template', config_options.Type(str, default="")), ) def on_config(self, config, **kwargs): self.config["repos_info"] = config_options.SourceCodeLink.parse_repos_info( self.config["repos_file"] ) return config def on_pre_page(self, page, **kwargs): source_url = convert_path_to_source_url( page.file.src_path, repos_prefix=self.config["repos_prefix"], repos_info=self.config["repos_info"], default_url_template=self.config["default_url_template"], github_url_template=self.config["github_url_template"], ) page.source_url = source_url return page
class DoxygenPlugin(BasePlugin): config_scheme = ( ("packages", ConfigItems( ("url", mkd.Type(str)), ("config", mkd.Type(str)), ("workdir", mkd.Type(str)), )), ("tryclone", mkd.Type(bool, default=False)), ("recursive", mkd.Type(bool, default=False)), ) def on_post_build(self, config): for pkgConf in self.config["packages"]: for outname, cfg in pkgConf.items(): outpath = os.path.abspath( os.path.join(config["site_dir"], outname)) try: basedir = cfg.get("url", ".") icfg = cfg.get("config") logger.info( "Running doxygen for {0} with {1}, saving into {2}". format((basedir if basedir != "." else "current directory"), (icfg if icfg else "default config"), outpath)) runDoxygen(basedir, cfg=icfg, workdir=cfg.get("workdir"), dest=outpath, tryClone=self.config["tryclone"], recursive=self.config["recursive"]) except Exception as e: logger.error( "Skipped doxygen for package {0}: {1!s}".format( outname, e))
class RootPlugin(BasePlugin): config_scheme = (('ignore_folders', config_options.Type(list, default=[])), ('ignore_hidden', config_options.Type(bool, default=True))) def __init__(self): self.docs_dir = "docs_" def on_pre_build(self, config): self.ignore_folders = self.config['ignore_folders'] self.ignore_folders += [ config['docs_dir'], config['site_dir'], self.docs_dir ] self.ignore_hidden = self.config['ignore_hidden'] # Update the docs_dir with our temporary one! self.orig_docs_dir = config['docs_dir'] config['docs_dir'] = self.docs_dir # Add all md files from directory, keeping folder structure self.paths = self.gen_from_dir() def on_serve(self, server, config, **kwargs): builder = list(server.watcher._tasks.values())[0]['func'] # still watch the original docs/ directory if os.path.exists(self.orig_docs_dir): server.watch(self.orig_docs_dir, builder) # watch all the doc files for orig, _ in self.paths: server.watch(orig, builder) return server def on_post_build(self, config): shutil.rmtree(self.docs_dir) def valid_dir(self, dir): if self.ignore_hidden and dir[0] == ".": return False if dir in self.ignore_folders: return False return True def gen_from_dir(self): paths = [] for root, dirs, files in os.walk("."): for f in files: if any(s in f for s in ('.md', '.svg', '.png', '.jpg')): doc_root = "./" + self.docs_dir + root[1:] orig = "{}/{}".format(root, f) new = "{}/{}".format(doc_root, f) try: os.makedirs(doc_root, exist_ok=True) shutil.copy(orig, new) paths.append((orig, new)) except Exception as e: print("ERROR: {}.. skipping {}".format(e, orig)) dirs[:] = [d for d in dirs if self.valid_dir(d)] return paths
class ErrandKunPlugin(BasePlugin): config_scheme = (('extract_path', config_options.Type(str, required=True)), ('external_resources', config_options.Type(list, default=[])), ('ver', config_options.Type(int, default=0))) def on_pre_build(self, config): extract_path = self.config['extract_path'] for external_resource in self.config['external_resources']: if external_resource['type'] is None: raise Exception( f'type needs to be defined in external_resources') if external_resource['type'].lower() == 'github': github = GithubArtefactSource( extract_path=extract_path, github_owner=external_resource['owner'], github_repo=external_resource['repo'], workflow_name=external_resource['workflow_name'], token=os.getenv('ERRANDKUN_GITHUB_TOKEN', default=None)) github.download_all() print('hello there', ) pass
class Plugin(mkdocs.plugins.BasePlugin): config_scheme = ( ("execute", config_options.Type(bool, default=False)), ("include_source", config_options.Type(bool, default=False)), ("kernel_name", config_options.Type(str, default="")), ) def on_files(self, files, config): extensions = [".ipynb", ".py"] ret = Files([ NotebookFile(file, **config) if os.path.splitext( str(file.abs_src_path))[-1] in extensions else file for file in files ]) return ret def on_pre_page(self, page, config, files): extensions = [".ipynb", ".py"] if os.path.splitext(str(page.file.abs_src_path))[-1] in extensions: exec_nb = self.config["execute"] kernel_name = self.config["kernel_name"] def new_render(self, config, files): body = convert.nb2html(page.file.abs_src_path, execute=exec_nb, kernel_name=kernel_name) self.content = body self.toc = get_nb_toc(page.file.abs_src_path) # replace render with new_render for this object only page.render = new_render.__get__(page, Page) # Add metadata for template self._set_nb_url(page) return page def _set_nb_url(self, page): from urllib.parse import urljoin nb_source = page.file.abs_src_path nb_source_name = os.path.basename(nb_source) page.nb_url = urljoin(page.abs_url, nb_source_name) def on_post_page(self, output_content, page, config): # Include source if self.config["include_source"]: from shutil import copyfile nb_source = page.file.abs_src_path nb_source_name = os.path.basename(nb_source) nb_target_dir = os.path.dirname(page.file.abs_dest_path) nb_target = os.path.join(nb_target_dir, nb_source_name) os.makedirs(nb_target_dir, exist_ok=True) copyfile(nb_source, nb_target)
class SearchPlugin(BasePlugin): """ Add a search feature to MkDocs. """ config_scheme = ( ('lang', LangOption()), ('separator', config_options.Type(str, default=r'[\s\-]+')), ('min_search_length', config_options.Type(int, default=3)), ('prebuild_index', config_options.Choice((False, True, 'node', 'python'), default=False)), ('indexing', config_options.Choice(('full', 'sections', 'titles'), default='full')) ) def on_config(self, config, **kwargs): "Add plugin templates and scripts to config." if 'include_search_page' in config['theme'] and config['theme']['include_search_page']: config['theme'].static_templates.add('search.html') if not ('search_index_only' in config['theme'] and config['theme']['search_index_only']): path = os.path.join(base_path, 'templates') config['theme'].dirs.append(path) if 'search/main.js' not in config['extra_javascript']: config['extra_javascript'].append('search/main.js') if self.config['lang'] is None: # lang setting undefined. Set default based on theme locale validate = self.config_scheme[0][1].run_validation self.config['lang'] = validate(config['theme']['locale'].language) return config def on_pre_build(self, config, **kwargs): "Create search index instance for later use." self.search_index = SearchIndex(**self.config) def on_page_context(self, context, **kwargs): "Add page to search index." self.search_index.add_entry_from_context(context['page']) def on_post_build(self, config, **kwargs): "Build search index." output_base_path = os.path.join(config['site_dir'], 'search') search_index = self.search_index.generate_search_index() json_output_path = os.path.join(output_base_path, 'search_index.json') utils.write_file(search_index.encode('utf-8'), json_output_path) if not ('search_index_only' in config['theme'] and config['theme']['search_index_only']): # Include language support files in output. Copy them directly # so that only the needed files are included. files = [] if len(self.config['lang']) > 1 or 'en' not in self.config['lang']: files.append('lunr.stemmer.support.js') if len(self.config['lang']) > 1: files.append('lunr.multi.js') if ('ja' in self.config['lang'] or 'jp' in self.config['lang']): files.append('tinyseg.js') for lang in self.config['lang']: if (lang != 'en'): files.append('lunr.{}.js'.format(lang)) for filename in files: from_path = os.path.join(base_path, 'lunr-language', filename) to_path = os.path.join(output_base_path, filename) utils.copy_file(from_path, to_path)
class GitRevisionDatePlugin(BasePlugin): config_scheme = ( ('enabled_if_env', config_options.Type(str)), ('modify_md', config_options.Type(bool, default=True)), ('as_datetime', config_options.Type(bool, default=False)), ) def __init__(self): self.enabled = True self.util = Util() def on_config(self, config): env_name = self.config['enabled_if_env'] if env_name: self.enabled = environ.get(env_name) == '1' if not self.enabled: print('PDF export is disabled (set environment variable %s to 1 to enable)' % env_name) return def on_page_markdown(self, markdown, page, config, files): if not self.enabled: return markdown revision_date = self.util.get_revision_date_for_file(page.file.abs_src_path) if not revision_date: revision_date = datetime.now().date().strftime('%Y-%m-%d') print('WARNING - %s has no git logs, revision date defaulting to today\'s date' % page.file.src_path) if self.config['as_datetime']: revision_date = datetime.strptime(revision_date,'%Y-%m-%d') page.meta['revision_date'] = revision_date if not self.config['modify_md']: return markdown if 'macros' in config['plugins']: keys = list(config['plugins'].keys()) vals = list(config['plugins'].values()) if keys.index('macros') > vals.index(self): new_markdown = '{{% set git_revision_date = \'{}\' %}}\n'.format(revision_date) + markdown return new_markdown else: print('WARNING - macros plugin must be placed AFTER the git-revision-date plugin. Skipping markdown modifications') return markdown else: markdown = re.sub(r"\{\{(\s)*git_revision_date(\s)*\}\}", revision_date, markdown, flags=re.IGNORECASE) markdown = re.sub(r"\{\{\s*page\.meta\.git_revision_date\s*\}\}", revision_date, markdown, flags=re.IGNORECASE) return markdown
class Plugin(BasePlugin): """ MkDocs Plugin class. """ _root_module: Optional[ModuleCollector] = None # out-dir is config["docs_dir"] config_scheme = ( ("module", config_options.Type(str, required=True)), ("out-name", config_options.Type(str, default=None)), ) def root_module(self, config: Config) -> ModuleCollector: if not self._root_module: out_dir = config["docs_dir"] out_name = self.config["out-name"] root_name = self.config["module"] _root_module = importlib.import_module(root_name) self._root_module = ModuleCollector( _root_module, out_dir, out_name=out_name, enable_yaml_header=True ) return self._root_module def on_config(self, config: Config, **kw: Any) -> Config: md_ext = config.get("markdown_extensions", []) if "attr_list" not in md_ext: md_ext.append("attr_list") if "meta" not in md_ext: md_ext.append("meta") config["markdown_extantions"] = md_ext return config def on_serve( self, server: LiveReloadServer, config: Config, builder: Callable[[], None], **kw: Any ) -> LiveReloadServer: self._build(config) # add watching path. module_path = self.config["module"].replace(".", "/") server.watch(module_path) return server def on_pre_build(self, config: Config) -> None: """Build markdown docs from python modules.""" self._build(config) def _build(self, config: Config) -> None: cwd = os.getcwd() if cwd not in sys.path: sys.path.append(cwd) # create docs. self.root_module(config).write()
class SearchPlugin(BasePlugin): """ Add a search feature to MkDocs. """ config_scheme = ( ('lang', LangOption(default=['en'])), ('separator', config_options.Type(utils.string_types, default=r'[\s\-]+')), ('prebuild_index', config_options.Type(bool, default=False)), ) def on_config(self, config, **kwargs): "Add plugin templates and scripts to config." if 'include_search_page' in config['theme'] and config['theme'][ 'include_search_page']: config['theme'].static_templates.add('search.html') if not ('search_index_only' in config['theme'] and config['theme']['search_index_only']): path = os.path.join(base_path, 'templates') config['theme'].dirs.append(path) if 'search/main.js' not in config['extra_javascript']: config['extra_javascript'].append('search/main.js') return config def on_pre_build(self, config, **kwargs): "Create search index instance for later use." self.search_index = SearchIndex(**self.config) def on_page_context(self, context, **kwargs): "Add page to search index." self.search_index.add_entry_from_context(context['page']) def on_post_build(self, config, **kwargs): "Build search index." output_base_path = os.path.join(config['site_dir'], 'search') search_index = self.search_index.generate_search_index() json_output_path = os.path.join(output_base_path, 'search_index.json') utils.write_file(search_index.encode('utf-8'), json_output_path) if not ('search_index_only' in config['theme'] and config['theme']['search_index_only']): # Include language support files in output. Copy them directly # so that only the needed files are included. files = [] if len(self.config['lang']) > 1 or 'en' not in self.config['lang']: files.append('lunr.stemmer.support.js') if len(self.config['lang']) > 1: files.append('lunr.multi.js') for lang in self.config['lang']: if (lang != 'en'): files.append('lunr.{}.js'.format(lang)) for filename in files: from_path = os.path.join(base_path, 'lunr-language', filename) to_path = os.path.join(output_base_path, filename) utils.copy_file(from_path, to_path)
class PdfWithJS(BasePlugin): config_scheme = ( ('enable', config_options.Type(bool, default=True)), ('add_download_button', config_options.Type(bool, default=False)), ('display_header_footer', config_options.Type(bool, default=False)), ('header_template', config_options.Type(str, default='')), ('footer_template', config_options.Type(str, default='')), ) def __init__(self): self.printer = Printer() pass def on_config(self, config, **kwargs): self.enabled = self.config['enable'] self.add_download_button = self.config['add_download_button'] self.printer.set_config(self.config['display_header_footer'], self.config['header_template'], self.config['footer_template']) return config def on_nav(self, nav, config, files): return nav def on_post_page(self, output_content, page, config, **kwargs): if not self.enabled: return page_paths = self.printer.add_page(page, config) if self.add_download_button: output_content = self.printer.add_download_link( output_content, page_paths) return output_content def on_post_build(self, config): if not self.enabled: return self.printer.print_pages() def on_env(self, env, config, files): env.filters['shuffle'] = self.do_shuffle def do_shuffle(self, seq): try: random.shuffle(seq) return seq except: return seq
class GitLabPlugin(BasePlugin): config_scheme = ( ('auth', config_options.Type(bool, default=False)), ('gitlab_url', config_options.Type(str, default='https://gitlab.com')), ('script_url', config_options.Type( str, default= 'https://gitlab.com/assets/webpack/visual_review_toolbar.js')), ('script_id', config_options.Type(str, default='review-app-toolbar-script')), ) def __init__(self): self.enabled = False self.gitlab_project_id = os.environ.get('CI_PROJECT_ID', None) self.gitlab_project_path = os.environ.get('CI_PROJECT_PATH', None) self.gitlab_mr = os.environ.get('CI_MERGE_REQUEST_IID', None) if self.gitlab_mr is not None and self.gitlab_project_id is not None and self.gitlab_project_path is not None: self.enabled = True def script(self): return "<script " \ "defer data-project-id='%s' " \ "data-project-path='%s' " \ "data-merge-request-id='%s' " \ "%s" \ "data-mr-url='%s' " \ "id='%s' " \ "src='%s'></script>" % ( self.gitlab_project_id, self.gitlab_project_path, self.gitlab_mr, self.require_auth(), self.gitlab_url(), self.script_id(), self.script_url()) def on_post_page(self, output, page, config): if not self.enabled: return output if '</head>' not in output: return output return output.replace('</head>', "%s\n</head>" % self.script()) def require_auth(self): if self.config.get('auth'): return "data-require-auth='true' " return '' def gitlab_url(self): return self.config.get('gitlab_url') def script_url(self): return self.config.get('script_url') def script_id(self): return self.config.get('script_id')
class AwesomePagesPlugin(BasePlugin): DEFAULT_META_FILENAME = '.pages' config_scheme = (('filename', config_options.Type(mkdocs_utils.string_types, default=DEFAULT_META_FILENAME)), ('collapse_single_pages', config_options.Type(bool, default=False))) def on_nav(self, nav: MkDocsNavigation, config: Config, files: Files): return AwesomeNavigation(nav, Options(**self.config)).to_mkdocs()
class AwesomePagesPlugin(BasePlugin): DEFAULT_META_FILENAME = ".pages" config_scheme = ( ("filename", config_options.Type(str, default=DEFAULT_META_FILENAME)), ("collapse_single_pages", config_options.Type(bool, default=False)), ("strict", config_options.Type(bool, default=True)), ) def on_nav(self, nav: MkDocsNavigation, config: Config, files: Files): return AwesomeNavigation(nav, Options(**self.config)).to_mkdocs()
class AwesomePagesPlugin(BasePlugin): DEFAULT_META_FILENAME = '.pages' config_scheme = ( ('maximum_file_homepage', config_options.Type(int, default=100)), ('maximum_days_ahead', config_options.Type(int, default=30)), ('filename', config_options.Type(str, default=DEFAULT_META_FILENAME)), ('collapse_single_pages', config_options.Type(bool, default=False)), ('strict', config_options.Type(bool, default=True)) ) def on_nav(self, nav: MkDocsNavigation, config: Config, files: Files): return AwesomeNavigation(nav, Options(**self.config)).to_mkdocs()
class GitRevisionDatePlugin(BasePlugin): config_scheme = (('enabled_if_env', config_options.Type(string_types)), ) def __init__(self): self.enabled = True self.util = Util() def on_config(self, config): env_name = self.config['enabled_if_env'] if env_name: self.enabled = environ.get(env_name) == '1' if not self.enabled: print( 'PDF export is disabled (set environment variable %s to 1 to enable)' % env_name) return def on_page_markdown(self, markdown, page, config, files): if not self.enabled: return markdown revision_date = self.util.get_revision_date_for_file( page.file.abs_src_path) if not revision_date: from datetime import datetime revision_date = datetime.now().date() print( 'WARNING - %s has no git logs, revision date defaulting to today\'s date' % page.file.src_path) page.meta['revision_date'] = revision_date return markdown
def test_doc_dir_in_site_dir(self): j = os.path.join test_configs = ( {'docs_dir': j('site', 'docs'), 'site_dir': 'site'}, {'docs_dir': 'docs', 'site_dir': '.'}, {'docs_dir': '.', 'site_dir': '.'}, {'docs_dir': 'docs', 'site_dir': ''}, {'docs_dir': '', 'site_dir': ''}, {'docs_dir': j('..', 'mkdocs', 'docs'), 'site_dir': 'docs'}, ) conf = { 'config_file_path': j(os.path.abspath('..'), 'mkdocs.yml') } for test_config in test_configs: patch = conf.copy() patch.update(test_config) # Same as the default schema, but don't verify the docs_dir exists. c = config.Config(schema=( ('docs_dir', config_options.Dir(default='docs')), ('site_dir', config_options.SiteDir(default='site')), ('config_file_path', config_options.Type(utils.string_types)) )) c.load_dict(patch) errors, warnings = c.validate() self.assertEqual(len(errors), 1) self.assertEqual(warnings, [])
class IncludePlugin(BasePlugin): config_scheme = ( ('src_path', config_options.Type(str, default="docs")), ) page = None def includex(self, filename, encoding='utf-8'): if len(encoding) == 0: encoding='utf-8' # !!! TODO support git+, https and other uris # !!! TODO support BOF, EOF markers # !!! TODO support line range path = f'{self.config["src_path"]}/{filename}' suffix = os.path.splitext(path)[1] if suffix == ".csv" : return parseCsv(path,encoding) else: with open(path, 'r', encoding=encoding ) as f: return f.read() def on_page_markdown(self, markdown, page, config, **kwargs): self.page = page md_template = Template(markdown) return md_template.render(includex=self.includex)
class TocSidebar(BasePlugin): config_scheme = (('param', config_options.Type(mkdocs_utils.string_types, default='')), ) def __init__(self): self.enabled = True self.total_time = 0 def on_post_page(self, output_content, page, config): soup = BeautifulSoup(output_content, 'html.parser') nav_extra = soup.find("div", {"class": "sidebar"}) if nav_extra: soup_toc = soup.find("div", {"data-md-component": "toc"}) if soup_toc: scrollwrap = soup_toc.findNext( "div", {"class": "md-sidebar__scrollwrap"}) if scrollwrap: scrollwrap.insert(0, nav_extra) else: print("WARNING: Table of Contents sidebar not found") souped_html = soup.prettify(soup.original_encoding) return souped_html
class TocSidebar(BasePlugin): config_scheme = (('param', config_options.Type(str, default='')), ) def __init__(self): self.enabled = True self.total_time = 0 def on_post_page(self, output_content, page, config): soup = BeautifulSoup(output_content, 'html.parser') nav_extra = soup.find("div", {"class": "sidebar"}) if nav_extra: soup_toc = soup.find("div", {"data-md-type": "toc"}) if soup_toc: scrollwrap = soup_toc.find("div", {"class": "md-sidebar__scrollwrap"}) if scrollwrap: scrollwrap.insert(0, nav_extra) else: print( "WARNING (ToC Sidebar): Insertion point not found in %s" % page.file.src_path) else: print( "WARNING (ToC Sidebar): Table of Contents in sidebar not found in %s" % page.file.src_path) souped_html = soup.encode( soup.original_encoding) if soup.original_encoding else str(soup) return souped_html
class BootstrapTablesPlugin(BasePlugin): config_scheme = ( ('bootstrap-theme', config_options.Type(str, default='')), ) def __init__(self): self.enabled = True self.total_time = 0 self.theme="" def on_config(self, config, **kwargs): for config in ['bootstrap-theme']: # Check for non-existing config values. print("config option:",config) if not self.config[config]: sys.exit("Config '{}' is missing for {} plugin.".format(config, "BtTable")) self.theme=self.config['bootstrap-theme'] def on_post_page(self, output_content, page, config): if(self.config): output_content = re.sub(r"<table>", "<table class=\" table "+self.theme+"\""+">", output_content) output_content = re.sub(r"<th>", "<th scope=\"col\">", output_content) return output_content else: output_content = re.sub(r"<table>", "<table class=\" table "+self.theme+"\""+">", output_content) output_content = re.sub(r"<th>", "<th scope=\"col\">", output_content) return output_content
def test_deprecated_option_with_invalid_type(self): option = config_options.Deprecated(option_type=config_options.Type(list)) config = {'d': 'string'} option.pre_validation({'d': 'value'}, 'd') self.assertEqual(len(option.warnings), 1) with self.assertRaises(config_options.ValidationError): option.validate(config['d'])
class PdfWithJS(BasePlugin): config_scheme = (('enable', config_options.Type(bool, default=True)), ) def __init__(self): self.printer = Printer() pass def on_config(self, config, **kwargs): self.enabled = self.config['enable'] return config def on_nav(self, nav, config, files): return nav def on_post_page(self, output_content, page, config, **kwargs): if not self.enabled: return output_content page_paths = self.printer.add_page(page, config) output_content = self.printer.add_download_link( output_content, page_paths) return output_content def on_post_build(self, config): if not self.enabled: return self.printer.print_pages()
def test_length(self): option = config_options.Type(str, length=7) value = option.validate("Testing") self.assertEqual(value, "Testing") with self.assertRaises(config_options.ValidationError): option.validate("Testing Long")
def test_length(self): option = config_options.Type(utils.string_types, length=7) value = option.validate("Testing") self.assertEqual(value, "Testing") self.assertRaises(config_options.ValidationError, option.validate, "Testing Long")
class GitRevisionDatePlugin(BasePlugin): config_scheme = ( ('enabled_if_env', config_options.Type(string_types)), ('modify_md', config_options.Type(bool, default=True)) ) def __init__(self): self.enabled = True self.util = Util() def on_config(self, config): env_name = self.config['enabled_if_env'] if env_name: self.enabled = environ.get(env_name) == '1' if not self.enabled: print('PDF export is disabled (set environment variable %s to 1 to enable)' % env_name) return def on_page_markdown(self, markdown, page, config, files): if not self.enabled: return markdown revision_date = self.util.get_revision_date_for_file(page.file.abs_src_path) if not revision_date: from datetime import datetime revision_date = datetime.now().date() print('WARNING - %s has no git logs, revision date defaulting to today\'s date' % page.file.src_path) page.meta['revision_date'] = revision_date if not self.config['modify_md']: return markdown if 'macros' in config['plugins']: keys = list(config['plugins'].keys()) vals = list(config['plugins'].values()) if keys.index('macros') > vals.index(self): new_markdown = '{{% set git_revision_date = \'{}\' %}}\n'.format(revision_date) + markdown return new_markdown else: print('WARNING - macros plugin must be placed AFTER the git-revision-date plugin. Skipping markdown modifications') return markdown else: md_template = Template(markdown) return md_template.render({'git_revision_date': revision_date})
class RedirectPlugin(BasePlugin): # Any options that this plugin supplies should go here. config_scheme = ( ('redirect_maps', config_options.Type(dict, default={})), # note the trailing comma ) # Build a list of redirects on file generation def on_files(self, files, config, **kwargs): self.redirects = self.config.get('redirect_maps', {}) # SHIM! Produce a warning if the old root-level 'redirects' config is present if config.get('redirects'): log.warn( "The root-level 'redirects:' setting is not valid and has been changed in version 1.0! " "The plugin-level 'redirect-map' must be used instead. See https://git.io/fjdBN" ) # Validate user-provided redirect "old files" for page_old in self.redirects.keys(): if not utils.is_markdown_file(page_old): log.warn( "redirects plugin: '%s' is not a valid markdown file!", page_old) # Build a dict of known document pages to validate against later self.doc_pages = {} for page in files.documentation_pages( ): # object type: mkdocs.structure.files.File self.doc_pages[page.src_path.replace('\\', '/')] = page # Create HTML files for redirects after site dir has been built def on_post_build(self, config, **kwargs): # Determine if 'use_directory_urls' is set use_directory_urls = config.get('use_directory_urls') # Walk through the redirect map and write their HTML files for page_old, page_new in self.redirects.items(): # External redirect targets are easy, just use it as the target path if page_new.lower().startswith(('http://', 'https://')): dest_path = page_new elif page_new in self.doc_pages: dest_path = get_relative_html_path(page_old, page_new, use_directory_urls) # If the redirect target isn't external or a valid internal page, throw an error # Note: we use 'warn' here specifically; mkdocs treats warnings specially when in strict mode else: log.warn("Redirect target '%s' does not exist!", page_new) continue # DO IT! write_html(config['site_dir'], get_html_path(page_old, use_directory_urls), dest_path)
class YourPlugin(BasePlugin): config_scheme = (('param', config_options.Type(mkdocs_utils.string_types, default='')), ) def __init__(self): self.enabled = True self.total_time = 0 def on_serve(self, server): return server def on_pre_build(self, config): return def on_files(self, files, config): return files def on_nav(self, nav, config, files): return nav def on_env(self, env, config, site_nav): return env def on_config(self, config): return config def on_post_build(self, config): return def on_pre_template(self, template, template_name, config): return template def on_template_context(self, context, template_name, config): return context def on_post_template(self, output_content, template_name, config): return output_content def on_pre_page(self, page, config, site_nav): return page def on_page_read_source(self, page, config): return "" def on_page_markdown(self, markdown, page, config, site_nav): return markdown def on_page_content(self, html, page, config, site_nav): return html def on_page_context(self, context, page, config, nav): return context def on_post_page(self, output_content, page, config): return output_content
class BibTexPlugin(BasePlugin): """ Allows the use of bibtex in markdown content for MKDocs. Options: bib_file (string): path to a single bibtex file for entries, relative to mkdocs.yml. csl_file (string, optional): path to a CLS file, relative to mkdocs.yml. """ config_scheme = [ ("bib_file", config_options.Type(str, required=True)), # TODO: multiple files. ("csl_file", config_options.Type(str, required=False)), ("pandoc_output_format", config_options.Type(str, required=False)), ] def on_config(self, config): """Get path on load of config.""" config_path = os.path.dirname(config.config_file_path) self.csl_path = get_path(self.config.get("csl_file", None), config_path) self.bib_path = get_path(self.config["bib_file"], config_path) self.pandoc_output_format = self.config.get("pandoc_output_format", "markdown_strict") return config def on_page_markdown(self, markdown, page, config, files): to = self. pandoc_output_format # "markdown_strict", "gfm", "markdown-citations". input_format = "md" extra_args = [] # Add bibtex files. # TODO: multiple bib files. Pandoc supports multiple "--bibliography" args, # but I don't know yet how to get a list from the config. extra_args.extend(["--bibliography", self.bib_path]) # Add CSL files. if self.csl_path is not None: extra_args.extend(["--csl", self.csl_path]) # Call Pandoc. markdown = pypandoc.convert_text(markdown, to, input_format, extra_args) return str(markdown)
class Mkpdfs(BasePlugin): config_scheme = ( ('design', config_options.Type(str, default=None)), ('toc_title', config_options.Type(str, default="Table of Contents")), ('company', config_options.Type(str, default=None)), ('author', config_options.Type(str, default=None)), ('toc_position', config_options.Type(str, default="pre")), ('pdf_links', config_options.Type(bool, default=True)), ('output_path', config_options.Type(str, default="pdf/combined.pdf")), ('toc_level', config_options.Type(int, default=1)), ) def __init__(self): self.generator = Generator() def on_serve(self, server, config, **kwargs): # TODO: Implement watcher when the user is performing design # print(server.watcher.__dict__) # # builder = build(config, True, False) # # server.watch(os.path.dirname(self.design), builder) return server def on_config(self, config, **kwargs): self.config['output_path'] = os.path.join( "pdf", "combined.pdf" ) if not self.config['output_path'] else self.config['output_path'] self.generator.set_config(self.config, config) return config def on_nav(self, nav, config, **kwargs): self.generator.add_nav(nav) return nav def on_post_page(self, output_content, page, config, **kwargs): try: abs_dest_path = page.file.abs_dest_path src_path = page.file.src_path except AttributeError: abs_dest_path = page.abs_output_path src_path = page.input_path path = os.path.dirname(abs_dest_path) os.makedirs(path, exist_ok=True) filename = os.path.splitext(os.path.basename(src_path))[0] base_url = urls.path2url(os.path.join(path, filename)) pdf_url = self.generator.add_article(output_content, page, base_url) if self.config['pdf_links'] and pdf_url: output_content = modify_html(output_content, pdf_url) return output_content def on_post_build(self, config): self.generator.write()
class MikePlugin(BasePlugin): config_scheme = ( ('version_selector', config_options.Type(bool, default=True)), ('canonical_version', config_options.Type((str, type(None)), default=None)), ('css_dir', config_options.Type(str, default='css')), ('javascript_dir', config_options.Type(str, default='js')), ) def on_config(self, config): version = os.environ.get(docs_version_var) if version and config.get('site_url'): if self.config['canonical_version'] is not None: version = self.config['canonical_version'] config['site_url'] = urljoin(config['site_url'], version) def on_files(self, files, config): if not self.config['version_selector']: return files try: theme_dir = get_theme_dir(config['theme'].name) except ValueError: return files for path, prop in [('css', 'css'), ('js', 'javascript')]: cfg_value = self.config[prop + '_dir'] srcdir = os.path.join(theme_dir, path) destdir = os.path.join(config['site_dir'], cfg_value) extra_kind = 'extra_' + prop norm_extras = [os.path.normpath(i) for i in config[extra_kind]] for f in os.listdir(srcdir): relative_dest = os.path.join(cfg_value, f) if relative_dest in norm_extras: raise PluginError( '{!r} is already included in {!r}'.format( relative_dest, extra_kind)) files.append(File(f, srcdir, destdir, False)) config[extra_kind].append(relative_dest) return files