def build_subset(app, config): # Convert to list of docs to build docs_to_build = config.docs_to_build.split(',') # Exclude all documents which were not set as docs_to_build when build_docs were called exclude_docs = [ filename for filename in get_matching_files( app.srcdir, compile_matchers(docs_to_build)) ] docs = [ filename for filename in get_matching_files( app.srcdir, compile_matchers(exclude_docs)) ] app.config.exclude_patterns.extend(exclude_docs) # Get all docs that will be built docs = [ filename for filename in get_matching_files( app.srcdir, compile_matchers(exclude_docs)) ] if not docs: raise ValueError("No documents to build") print("Building a subset of the documents: {}".format(docs)) # Sphinx requires a master document, if there is a document name 'index' then we pick that index_docs = [doc for doc in docs if 'index' in doc] if index_docs: config.master_doc = index_docs[0].replace('.rst', '') else: config.master_doc = docs[0].replace('.rst', '')
def find_files(self, config, builder): # type: (Config, Builder) -> None """Find all source files in the source dir and put them in self.found_docs. """ matchers = compile_matchers( config.exclude_patterns[:] + config.templates_path + builder.get_asset_paths() + ['**/_sources', '.#*', '**/.#*', '*.lproj/**']) self.found_docs = set() for docname in get_matching_docs( self.srcdir, config.source_suffix, # type: ignore exclude_matchers=matchers): if os.access(self.doc2path(docname), os.R_OK): self.found_docs.add(docname) else: logger.warning("document not readable. Ignored.", location=docname) # Current implementation is applying translated messages in the reading # phase.Therefore, in order to apply the updated message catalog, it is # necessary to re-process from the reading phase. Here, if dependency # is set for the doc source and the mo file, it is processed again from # the reading phase when mo is updated. In the future, we would like to # move i18n process into the writing phase, and remove these lines. if builder.use_message_catalog: # add catalog mo file dependency for docname in self.found_docs: catalog_files = find_catalog_files(docname, self.srcdir, self.config.locale_dirs, self.config.language, self.config.gettext_compact) for filename in catalog_files: self.dependencies[docname].add(filename)
def find_files(self, config): """Find all source files in the source dir and put them in self.found_docs. """ matchers = compile_matchers( config.exclude_patterns[:] + config.templates_path + config.html_extra_path + ['**/_sources', '.#*', '**/.#*', '*.lproj/**'] ) self.found_docs = set() for docname in get_matching_docs(self.srcdir, config.source_suffix, exclude_matchers=matchers): if os.access(self.doc2path(docname), os.R_OK): self.found_docs.add(docname) else: self.warn(docname, "document not readable. Ignored.") # add catalog mo file dependency for docname in self.found_docs: catalog_files = find_catalog_files( docname, self.srcdir, self.config.locale_dirs, self.config.language, self.config.gettext_compact) for filename in catalog_files: self.dependencies.setdefault(docname, set()).add(filename)
def _get_statics(self, confignode, config): """Returns static files, filtered through exclude_patterns.""" statics = [] matchers = compile_matchers(config.get('exclude_patterns', [])) for path in config.get('html_static_path', []): # Check _get_templates() why we use this construction. p = confignode.File('conf.py').rfile().dir.srcnode().get_abspath() p = os.path.join(p, os_path(path)) if os.path.isfile(p): statics.append(confignode.File(path)) elif os.path.isdir(p): node = confignode.Dir(path) for root, dirs, files in os.walk(p): relpath = os.path.relpath(root, p) for entry in [ d for d in dirs if self._anymatch( matchers, sphinx_path(os.path.join(relpath, d))) ]: dirs.remove(entry) statics += [ node.File(os_path(f)) for f in self._exclude(matchers, [ sphinx_path(os.path.join(relpath, name)) for name in files ]) ] return statics
def discover(self, exclude_paths: List[str] = []) -> Set[str]: """Find all document files in the source directory and put them in :attr:`docnames`. """ self.docnames = set() excludes = compile_matchers(exclude_paths + EXCLUDE_PATHS) for filename in get_matching_files(self.srcdir, excludes): # type: ignore docname = self.path2doc(filename) if docname: if docname in self.docnames: pattern = os.path.join(self.srcdir, docname) + '.*' files = [relpath(f, self.srcdir) for f in glob(pattern)] logger.warning(__( 'multiple files found for the document "%s": %r\n' 'Use %r for the build.'), docname, files, self.doc2path(docname), once=True) elif os.access(os.path.join(self.srcdir, filename), os.R_OK): self.docnames.add(docname) else: logger.warning(__("document not readable. Ignored."), location=docname) return self.docnames
def copy_static_files(self): # copy static files self.info(bold('copying static files... '), nonl=True) ensuredir(path.join(self.outdir, '_static')) # first, create pygments style file f = open(path.join(self.outdir, '_static', 'pygments.css'), 'w') f.write(self.highlighter.get_stylesheet()) f.close() # then, copy translations JavaScript file if self.config.language is not None: jsfile = self._get_translations_js() if jsfile: copyfile(jsfile, path.join(self.outdir, '_static', 'translations.js')) # add context items for search function used in searchtools.js_t ctx = self.globalcontext.copy() ctx.update(self.indexer.context_for_searchtool()) # then, copy over theme-supplied static files if self.theme: themeentries = [ path.join(themepath, 'static') for themepath in self.theme.get_dirchain()[::-1] ] for entry in themeentries: copy_static_entry(entry, path.join(self.outdir, '_static'), self, ctx) # then, copy over all user-supplied static files staticentries = [ path.join(self.confdir, spath) for spath in self.config.html_static_path ] matchers = compile_matchers( self.config.exclude_patterns + ['**/' + d for d in self.config.exclude_dirnames]) for entry in staticentries: if not path.exists(entry): self.warn('html_static_path entry %r does not exist' % entry) continue copy_static_entry(entry, path.join(self.outdir, '_static'), self, ctx, exclude_matchers=matchers) # copy logo and favicon files if not already in static path if self.config.html_logo: logobase = path.basename(self.config.html_logo) logotarget = path.join(self.outdir, '_static', logobase) if not path.isfile(logotarget): copyfile(path.join(self.confdir, self.config.html_logo), logotarget) if self.config.html_favicon: iconbase = path.basename(self.config.html_favicon) icontarget = path.join(self.outdir, '_static', iconbase) if not path.isfile(icontarget): copyfile(path.join(self.confdir, self.config.html_favicon), icontarget) self.info('done')
def copy_static_files(self): # copy static files self.info(bold('copying static files... '), nonl=True) ensuredir(path.join(self.outdir, '_static')) # first, create pygments style file f = open(path.join(self.outdir, '_static', 'pygments.css'), 'w') f.write(self.highlighter.get_stylesheet()) f.close() # then, copy translations JavaScript file if self.config.language is not None: jsfile = self._get_translations_js() if jsfile: copyfile(jsfile, path.join(self.outdir, '_static', 'translations.js')) # add context items for search function used in searchtools.js_t ctx = self.globalcontext.copy() ctx.update(self.indexer.context_for_searchtool()) # then, copy over theme-supplied static files if self.theme: themeentries = [path.join(themepath, 'static') for themepath in self.theme.get_dirchain()[::-1]] for entry in themeentries: copy_static_entry(entry, path.join(self.outdir, '_static'), self, ctx) # then, copy over all user-supplied static files staticentries = [path.join(self.confdir, spath) for spath in self.config.html_static_path] matchers = compile_matchers( self.config.exclude_patterns + ['**/' + d for d in self.config.exclude_dirnames] ) for entry in staticentries: if not path.exists(entry): self.warn('html_static_path entry %r does not exist' % entry) continue copy_static_entry(entry, path.join(self.outdir, '_static'), self, ctx, exclude_matchers=matchers) # copy logo and favicon files if not already in static path if self.config.html_logo: logobase = path.basename(self.config.html_logo) logotarget = path.join(self.outdir, '_static', logobase) if not path.isfile(path.join(self.confdir, self.config.html_logo)): self.warn('logo file %r does not exist' % self.config.html_logo) elif not path.isfile(logotarget): copyfile(path.join(self.confdir, self.config.html_logo), logotarget) if self.config.html_favicon: iconbase = path.basename(self.config.html_favicon) icontarget = path.join(self.outdir, '_static', iconbase) if not path.isfile(path.join(self.confdir, self.config.html_favicon)): self.warn('favicon file %r does not exist' % self.config.html_favicon) elif not path.isfile(icontarget): copyfile(path.join(self.confdir, self.config.html_favicon), icontarget) self.info('done')
def copy_extra_files(self): # copy html_extra_path files self.info(bold("copying extra files... "), nonl=True) extraentries = [path.join(self.confdir, epath) for epath in self.config.html_extra_path] matchers = compile_matchers(self.config.exclude_patterns) for entry in extraentries: if not path.exists(entry): self.warn("html_extra_path entry %r does not exist" % entry) continue copy_extra_entry(entry, self.outdir, matchers) self.info("done")
def copy_extra_files(self): # copy html_extra_path files self.info(bold('copying extra files... '), nonl=True) extraentries = [path.join(self.confdir, epath) for epath in self.config.html_extra_path] matchers = compile_matchers(self.config.exclude_patterns) for entry in extraentries: if not path.exists(entry): self.warn('html_extra_path entry %r does not exist' % entry) continue copy_extra_entry(entry, self.outdir, matchers) self.info('done')
def find_files(self, config): """ Find all source files in the source dir and put them in self.found_docs. """ matchers = compile_matchers( config.exclude_patterns[:] + config.exclude_trees + [d + ".rst" for d in config.unused_docs] + [d + ".yay" for d in config.unused_docs] + ['**/' + d for d in config.exclude_dirnames] + ['**/_sources'] ) self.found_docs = set(get_matching_docs( self.srcdir, exclude_matchers=matchers))
def copy_localized_files(self): source_dir = path.join(self.confdir, self.config.applehelp_locale + '.lproj') target_dir = self.outdir if path.isdir(source_dir): self.info(bold('copying localized files... '), nonl=True) ctx = self.globalcontext.copy() matchers = compile_matchers(self.config.exclude_patterns) copy_static_entry(source_dir, target_dir, self, ctx, exclude_matchers=matchers) self.info('done')
def discover(self, exclude_paths=[]): # type: (List[str]) -> Set[str] """Find all document files in the source directory and put them in :attr:`docnames`. """ self.docnames = set() excludes = compile_matchers(exclude_paths + EXCLUDE_PATHS) for filename in get_matching_files(self.srcdir, excludes): # type: ignore docname = self.path2doc(filename) if docname: if os.access(os.path.join(self.srcdir, filename), os.R_OK): self.docnames.add(docname) else: logger.warning(__("document not readable. Ignored."), location=docname) return self.docnames
def copy_static_files(self): # copy static files self.info(bold("copying static files... "), nonl=True) ensuredir(path.join(self.outdir, "_static")) ctx = self.globalcontext.copy() # copy over all user-supplied static files staticentries = [path.join(self.confdir, spath) for spath in self.config.html_static_path] matchers = compile_matchers(self.config.exclude_patterns) for entry in staticentries: if not path.exists(entry): self.warn("html_static_path entry %r does not exist" % entry) continue copy_static_entry(entry, path.join(self.outdir, "_static"), self, ctx, exclude_matchers=matchers) self.info("done")
def _get_matching_docs(dirname, suffixes, exclude_matchers=()): """Get all file names (without suffixes) matching a suffix in a directory, recursively. Exclude files and dirs matching a pattern in *exclude_patterns*. """ suffixpatterns = ['*' + s for s in suffixes] # The following two lines were added. exclude_matchers += compile_matchers(_config.CodeChat_excludes) for filename in get_matching_files(dirname, exclude_matchers): for suffixpattern in suffixpatterns: if fnmatch.fnmatch(filename, suffixpattern): yield filename[:-len(suffixpattern) + 1] break # The following code was added. if is_supported_language(filename): yield filename
def copy_static_files(self): # copy static files self.info(bold("copying static files... "), nonl=True) ensuredir(path.join(self.outdir, "_static")) # first, create pygments style file f = open(path.join(self.outdir, "_static", "pygments.css"), "w") f.write(self.highlighter.get_stylesheet()) f.close() # then, copy translations JavaScript file if self.config.language is not None: jsfile_list = [ path.join(package_dir, "locale", self.config.language, "LC_MESSAGES", "sphinx.js"), path.join(sys.prefix, "share/sphinx/locale", self.config.language, "sphinx.js"), ] for jsfile in jsfile_list: if path.isfile(jsfile): copyfile(jsfile, path.join(self.outdir, "_static", "translations.js")) break # then, copy over theme-supplied static files if self.theme: themeentries = [path.join(themepath, "static") for themepath in self.theme.get_dirchain()[::-1]] for entry in themeentries: copy_static_entry(entry, path.join(self.outdir, "_static"), self, self.globalcontext) # then, copy over all user-supplied static files staticentries = [path.join(self.confdir, spath) for spath in self.config.html_static_path] matchers = compile_matchers(self.config.exclude_patterns + ["**/" + d for d in self.config.exclude_dirnames]) for entry in staticentries: if not path.exists(entry): self.warn("html_static_path entry %r does not exist" % entry) continue copy_static_entry( entry, path.join(self.outdir, "_static"), self, self.globalcontext, exclude_matchers=matchers ) # copy logo and favicon files if not already in static path if self.config.html_logo: logobase = path.basename(self.config.html_logo) logotarget = path.join(self.outdir, "_static", logobase) if not path.isfile(logotarget): copyfile(path.join(self.confdir, self.config.html_logo), logotarget) if self.config.html_favicon: iconbase = path.basename(self.config.html_favicon) icontarget = path.join(self.outdir, "_static", iconbase) if not path.isfile(icontarget): copyfile(path.join(self.confdir, self.config.html_favicon), icontarget) self.info("done")
def find_all_files(srcdir: str, exclude_patterns: List[str], suffixes=(".rst", )): """Adapted from ``sphinx.environment.BuildEnvironment.find_files``""" from sphinx.project import EXCLUDE_PATHS from sphinx.util import get_matching_files from sphinx.util.matching import compile_matchers exclude_patterns.extend(EXCLUDE_PATHS) excludes = compile_matchers(exclude_patterns) docnames = set() for filename in get_matching_files(srcdir, excludes): if not any(filename.endswith(s) for s in suffixes): continue if os.access(os.path.join(srcdir, filename), os.R_OK): filename = os.path.realpath(filename) docnames.add(filename) return docnames
def copy_static_files(self): # copy static files self.info(bold('copying static files... '), nonl=True) ensuredir(path.join(self.outdir, '_static')) ctx = self.globalcontext.copy() # copy over all user-supplied static files staticentries = [path.join(self.confdir, spath) for spath in self.config.html_static_path] matchers = compile_matchers(self.config.exclude_patterns) for entry in staticentries: if not path.exists(entry): self.warn('html_static_path entry %r does not exist' % entry) continue copy_static_entry(entry, path.join(self.outdir, '_static'), self, ctx, exclude_matchers=matchers) self.info('done')
def _get_sources(self, srcnode, config): """Returns all source files in the project filtered through exclude_patterns.""" suffix = config.get('source_suffix', '.rst') matchers = compile_matchers(config.get('exclude_patterns', [])) srcfiles = [] scannode = srcnode.srcnode().rdir() for root, dirs, files in os.walk(scannode.get_abspath()): relpath = os.path.relpath(root, scannode.get_abspath()) for entry in [d for d in dirs if self._anymatch(matchers, sphinx_path(os.path.join(relpath, d)))]: dirs.remove(entry) srcfiles += [srcnode.File(os_path(f)) for f in self._exclude(matchers, [sphinx_path(os.path.join(relpath, name)) for name in files if name.endswith(suffix)])] return srcfiles
def _get_matching_docs(dirname, suffixes, exclude_matchers=()): """Get all file names (without suffixes) matching a suffix in a directory, recursively. Exclude files and dirs matching a pattern in *exclude_patterns*. """ suffixpatterns = ['*' + s for s in suffixes] # The following two lines were added. source_suffixpatterns = ( SUPPORTED_GLOBS | set(_config.CodeChat_lexer_for_glob.keys()) ) exclude_matchers += compile_matchers(_config.CodeChat_excludes) for filename in get_matching_files(dirname, exclude_matchers): for suffixpattern in suffixpatterns: if fnmatch.fnmatch(filename, suffixpattern): yield filename[:-len(suffixpattern)+1] break # The following code was added. for source_suffixpattern in source_suffixpatterns: if fnmatch.fnmatch(filename, source_suffixpattern): yield filename break
def find_files(self, config, builder): # type: (Config, Builder) -> None """Find all source files in the source dir and put them in self.found_docs. """ try: matchers = compile_matchers( config.exclude_patterns[:] + config.templates_path + builder.get_asset_paths() + ['**/_sources', '.#*', '**/.#*', '*.lproj/**'] ) self.found_docs = set() for docname in get_matching_docs(self.srcdir, config.source_suffix, # type: ignore exclude_matchers=matchers): if os.access(self.doc2path(docname), os.R_OK): self.found_docs.add(docname) else: logger.warning(__("document not readable. Ignored."), location=docname) # Current implementation is applying translated messages in the reading # phase.Therefore, in order to apply the updated message catalog, it is # necessary to re-process from the reading phase. Here, if dependency # is set for the doc source and the mo file, it is processed again from # the reading phase when mo is updated. In the future, we would like to # move i18n process into the writing phase, and remove these lines. if builder.use_message_catalog: # add catalog mo file dependency for docname in self.found_docs: catalog_files = find_catalog_files( docname, self.srcdir, self.config.locale_dirs, self.config.language, self.config.gettext_compact) for filename in catalog_files: self.dependencies[docname].add(filename) except EnvironmentError as exc: raise DocumentError(__('Failed to scan documents in %s: %r') % (self.srcdir, exc))
def _get_statics(self, confignode, config): """Returns static files, filtered through exclude_patterns.""" statics = [] matchers = compile_matchers(config.get('exclude_patterns', [])) for path in config.get('html_static_path', []): # Check _get_templates() why we use this construction. p = confignode.File('conf.py').rfile().dir.srcnode().get_abspath() p = os.path.join(p, os_path(path)) if os.path.isfile(p): statics.append(confignode.File(path)) elif os.path.isdir(p): node = confignode.Dir(path) for root, dirs, files in os.walk(p): relpath = os.path.relpath(root, p) for entry in [d for d in dirs if self._anymatch(matchers, sphinx_path(os.path.join(relpath, d)))]: dirs.remove(entry) statics += [node.File(os_path(f)) for f in self._exclude(matchers, [sphinx_path(os.path.join(relpath, name)) for name in files])] return statics
def copy_static_files(self): # copy static files self.info(bold('copying static files... '), nonl=True) ensuredir(path.join(self.outdir, '_static')) # first, create pygments style file f = open(path.join(self.outdir, '_static', 'pygments.css'), 'w') f.write(self.highlighter.get_stylesheet()) f.close() # then, copy translations JavaScript file if self.config.language is not None: jsfile_list = [ path.join(package_dir, 'locale', self.config.language, 'LC_MESSAGES', 'sphinx.js'), path.join(sys.prefix, 'share/sphinx/locale', self.config.language, 'sphinx.js') ] for jsfile in jsfile_list: if path.isfile(jsfile): copyfile( jsfile, path.join(self.outdir, '_static', 'translations.js')) break # then, copy over theme-supplied static files if self.theme: themeentries = [ path.join(themepath, 'static') for themepath in self.theme.get_dirchain()[::-1] ] for entry in themeentries: copy_static_entry(entry, path.join(self.outdir, '_static'), self, self.globalcontext) # then, copy over all user-supplied static files staticentries = [ path.join(self.confdir, spath) for spath in self.config.html_static_path ] matchers = compile_matchers( self.config.exclude_patterns + ['**/' + d for d in self.config.exclude_dirnames]) for entry in staticentries: if not path.exists(entry): self.warn('html_static_path entry %r does not exist' % entry) continue copy_static_entry(entry, path.join(self.outdir, '_static'), self, self.globalcontext, exclude_matchers=matchers) # copy logo and favicon files if not already in static path if self.config.html_logo: logobase = path.basename(self.config.html_logo) logotarget = path.join(self.outdir, '_static', logobase) if not path.isfile(logotarget): copyfile(path.join(self.confdir, self.config.html_logo), logotarget) if self.config.html_favicon: iconbase = path.basename(self.config.html_favicon) icontarget = path.join(self.outdir, '_static', iconbase) if not path.isfile(icontarget): copyfile(path.join(self.confdir, self.config.html_favicon), icontarget) self.info('done')
def test_compile_matchers(): # exact matching pat = compile_matchers(['hello.py']).pop() assert pat('hello.py') assert not pat('hello-py') assert not pat('subdir/hello.py') # wild card (*) pat = compile_matchers(['hello.*']).pop() assert pat('hello.py') assert pat('hello.rst') pat = compile_matchers(['*.py']).pop() assert pat('hello.py') assert pat('world.py') assert not pat('subdir/hello.py') # wild card (**) pat = compile_matchers(['hello.**']).pop() assert pat('hello.py') assert pat('hello.rst') assert pat('hello.py/world.py') pat = compile_matchers(['**.py']).pop() assert pat('hello.py') assert pat('world.py') assert pat('subdir/hello.py') pat = compile_matchers(['**/hello.py']).pop() assert not pat('hello.py') assert pat('subdir/hello.py') assert pat('subdir/subdir/hello.py') # wild card (?) pat = compile_matchers(['hello.?']).pop() assert pat('hello.c') assert not pat('hello.py') # pattern ([...]) pat = compile_matchers(['hello[12\\].py']).pop() assert pat('hello1.py') assert pat('hello2.py') assert pat('hello\\.py') assert not pat('hello3.py') pat = compile_matchers(['hello[^12].py']).pop() # "^" is not negative identifier assert pat('hello1.py') assert pat('hello2.py') assert pat('hello^.py') assert not pat('hello3.py') # negative pattern ([!...]) pat = compile_matchers(['hello[!12].py']).pop() assert not pat('hello1.py') assert not pat('hello2.py') assert not pat('hello/.py') # negative pattern does not match to "/" assert pat('hello3.py') # non patterns pat = compile_matchers(['hello[.py']).pop() assert pat('hello[.py') assert not pat('hello.py') pat = compile_matchers(['hello[].py']).pop() assert pat('hello[].py') assert not pat('hello.py') pat = compile_matchers(['hello[!].py']).pop() assert pat('hello[!].py') assert not pat('hello.py')