Exemplo n.º 1
0
    def write(self, build_docnames, updated_docnames, method='update'):
        # type: (Iterable[str], Sequence[str], str) -> None
        if build_docnames is None or build_docnames == ['__all__']:
            # build_all
            build_docnames = self.env.found_docs
        if method == 'update':
            # build updated ones as well
            docnames = set(build_docnames) | set(updated_docnames)
        else:
            docnames = set(build_docnames)
        logger.debug(__('docnames to write: %s'), ', '.join(sorted(docnames)))

        # add all toctree-containing files that may have changed
        for docname in list(docnames):
            for tocdocname in self.env.files_to_rebuild.get(docname, set()):
                if tocdocname in self.env.found_docs:
                    docnames.add(tocdocname)
        docnames.add(self.config.master_doc)

        with progress_message(__('preparing documents')):
            self.prepare_writing(docnames)

        if self.parallel_ok:
            # number of subprocesses is parallel-1 because the main process
            # is busy loading doctrees and doing write_doc_serialized()
            self._write_parallel(sorted(docnames),
                                 nproc=self.app.parallel - 1)
        else:
            self._write_serial(sorted(docnames))
Exemplo n.º 2
0
 def copy_support_files(self):
     # type: () -> None
     try:
         with progress_message(__('copying Texinfo support files')):
             logger.info('Makefile ', nonl=True)
             copy_asset_file(os.path.join(template_dir, 'Makefile'), self.outdir)
     except OSError as err:
         logger.warning(__("error writing file Makefile: %s"), err)
Exemplo n.º 3
0
    def write(self, *ignored):
        # type: (Any) -> None
        docwriter = LaTeXWriter(self)
        docsettings = OptionParser(
            defaults=self.env.settings,
            components=(docwriter,),
            read_config_files=True).get_default_values()  # type: Any

        self.init_document_data()
        self.write_stylesheet()

        for entry in self.document_data:
            docname, targetname, title, author, docclass = entry[:5]
            toctree_only = False
            if len(entry) > 5:
                toctree_only = entry[5]
            destination = SphinxFileOutput(destination_path=path.join(self.outdir, targetname),
                                           encoding='utf-8', overwrite_if_changed=True)
            with progress_message(__("processing %s") % targetname):
                toctrees = self.env.get_doctree(docname).traverse(addnodes.toctree)
                if toctrees:
                    if toctrees[0].get('maxdepth') > 0:
                        tocdepth = toctrees[0].get('maxdepth')
                    else:
                        tocdepth = None
                else:
                    tocdepth = None
                doctree = self.assemble_doctree(
                    docname, toctree_only,
                    appendices=((docclass != 'howto') and self.config.latex_appendices or []))
                doctree['tocdepth'] = tocdepth
                self.apply_transforms(doctree)
                self.post_process_images(doctree)
                self.update_doc_context(title, author)

            with progress_message(__("writing")):
                docsettings.author = author
                docsettings.title = title
                docsettings.contentsname = self.get_contentsname(docname)
                docsettings.docname = docname
                docsettings.docclass = docclass

                doctree.settings = docsettings
                docwriter.write(doctree, destination)
Exemplo n.º 4
0
    def copy_applehelp_icon(self, resources_dir: str) -> None:
        """Copy the icon, if one is supplied."""
        if self.config.applehelp_icon:

            try:
                with progress_message(__('copying icon... ')):
                    applehelp_icon = path.join(self.srcdir, self.config.applehelp_icon)
                    copy_asset_file(applehelp_icon, resources_dir)
            except Exception as err:
                logger.warning(__('cannot copy icon file %r: %s'), applehelp_icon, err)
Exemplo n.º 5
0
    def render_batches(self):
        pending_keys = sorted(self._pending_keys)
        for fileformat in self.image_formats:
            for i in range(0, len(pending_keys), self.batch_size):
                keys = pending_keys[i:i + self.batch_size]
                with util.progress_message(
                        'rendering plantuml diagrams [%d..%d/%d]' %
                    (i, i + len(keys), len(pending_keys))):
                    self._render_files(keys, fileformat)

        del self._pending_keys[:]
Exemplo n.º 6
0
    def write(self, *ignored) -> None:
        docwriter = LaTeXWriter(self)
        docsettings = OptionParser(
            defaults=self.env.settings,
            components=(docwriter,),
            read_config_files=True).get_default_values()  # type: Any

        self.init_document_data()
        self.write_stylesheet()

        for entry in self.document_data:
            docname, targetname, title, author, docclass = entry[:5]
            toctree_only = False
            if len(entry) > 5:
                toctree_only = entry[5]
            destination = SphinxFileOutput(destination_path=path.join(self.outdir, targetname),
                                           encoding='utf-8', overwrite_if_changed=True)
            with progress_message(__("processing %s") % targetname):
                doctree = self.env.get_doctree(docname)
                toctree = next(iter(doctree.traverse(addnodes.toctree)), None)
                if toctree and toctree.get('maxdepth') > 0:
                    tocdepth = toctree.get('maxdepth')
                else:
                    tocdepth = None

                doctree = self.assemble_doctree(
                    docname, toctree_only,
                    appendices=(self.config.latex_appendices if docclass != 'howto' else []))
                doctree['tocdepth'] = tocdepth
                self.post_process_images(doctree)
                self.update_doc_context(title, author)

            with progress_message(__("writing")):
                docsettings.author = author
                docsettings.title = title
                docsettings.contentsname = self.get_contentsname(docname)
                docsettings.docname = docname
                docsettings.docclass = docclass

                doctree.settings = docsettings
                docwriter.write(doctree, destination)
Exemplo n.º 7
0
    def write(self, *ignored: Any) -> None:
        self.init_document_data()
        for entry in self.document_data:
            docname, targetname, title, author = entry[:4]
            targetname += '.texi'
            direntry = description = category = ''
            if len(entry) > 6:
                direntry, description, category = entry[4:7]
            toctree_only = False
            if len(entry) > 7:
                toctree_only = entry[7]
            destination = FileOutput(
                destination_path=path.join(self.outdir, targetname),
                encoding='utf-8')
            with progress_message(__("processing %s") % targetname):
                appendices = self.config.texinfo_appendices or []
                doctree = self.assemble_doctree(docname, toctree_only, appendices=appendices)

            with progress_message(__("writing")):
                self.post_process_images(doctree)
                docwriter = TexinfoWriter(self)
                with warnings.catch_warnings():
                    warnings.filterwarnings('ignore', category=DeprecationWarning)
                    # DeprecationWarning: The frontend.OptionParser class will be replaced
                    # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
                    settings: Any = OptionParser(
                        defaults=self.env.settings,
                        components=(docwriter,),
                        read_config_files=True).get_default_values()
                settings.author = author
                settings.title = title
                settings.texinfo_filename = targetname[:-5] + '.info'
                settings.texinfo_elements = self.config.texinfo_elements
                settings.texinfo_dir_entry = direntry or ''
                settings.texinfo_dir_category = category or ''
                settings.texinfo_dir_description = description or ''
                settings.docname = docname
                doctree.settings = settings
                docwriter.write(doctree, destination)
                self.copy_image_files(targetname[:-5])
Exemplo n.º 8
0
 def dump_search_index(self) -> None:
     with progress_message(__('dumping search index in %s') % self.indexer.label()):
         self.indexer.prune(self.env.all_docs)
         searchindexfn = path.join(self.outdir, self.searchindex_filename)
         # first write to a temporary file, so that if dumping fails,
         # the existing index won't be overwritten
         if self.indexer_dumps_unicode:
             with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft:
                 self.indexer.dump(ft, self.indexer_format)
         else:
             with open(searchindexfn + '.tmp', 'wb') as fb:
                 self.indexer.dump(fb, self.indexer_format)
         movefile(searchindexfn + '.tmp', searchindexfn)
Exemplo n.º 9
0
    def write(self, *ignored):
        # type: (Any) -> None
        self.init_document_data()
        for entry in self.document_data:
            docname, targetname, title, author = entry[:4]
            targetname += '.texi'
            direntry = description = category = ''
            if len(entry) > 6:
                direntry, description, category = entry[4:7]
            toctree_only = False
            if len(entry) > 7:
                toctree_only = entry[7]
            destination = FileOutput(destination_path=path.join(
                self.outdir, targetname),
                                     encoding='utf-8')
            with progress_message(__("processing %s") % targetname):
                appendices = self.config.texinfo_appendices or []
                doctree = self.assemble_doctree(docname,
                                                toctree_only,
                                                appendices=appendices)

            with progress_message(__("writing")):
                self.post_process_images(doctree)
                docwriter = TexinfoWriter(self)
                settings = OptionParser(
                    defaults=self.env.settings,
                    components=(docwriter, ),
                    read_config_files=True).get_default_values()  # type: Any
                settings.author = author
                settings.title = title
                settings.texinfo_filename = targetname[:-5] + '.info'
                settings.texinfo_elements = self.config.texinfo_elements
                settings.texinfo_dir_entry = direntry or ''
                settings.texinfo_dir_category = category or ''
                settings.texinfo_dir_description = description or ''
                settings.docname = docname
                doctree.settings = settings
                docwriter.write(doctree, destination)
                self.copy_image_files(targetname[:-5])
Exemplo n.º 10
0
    def write(self, *ignored):
        # type: (Any) -> None
        self.init_document_data()
        for entry in self.document_data:
            docname, targetname, title, author = entry[:4]
            targetname += '.texi'
            direntry = description = category = ''
            if len(entry) > 6:
                direntry, description, category = entry[4:7]
            toctree_only = False
            if len(entry) > 7:
                toctree_only = entry[7]
            destination = FileOutput(
                destination_path=path.join(self.outdir, targetname),
                encoding='utf-8')
            with progress_message(__("processing %s") % targetname):
                appendices = self.config.texinfo_appendices or []
                doctree = self.assemble_doctree(docname, toctree_only, appendices=appendices)

            with progress_message(__("writing")):
                self.post_process_images(doctree)
                docwriter = TexinfoWriter(self)
                settings = OptionParser(
                    defaults=self.env.settings,
                    components=(docwriter,),
                    read_config_files=True).get_default_values()  # type: Any
                settings.author = author
                settings.title = title
                settings.texinfo_filename = targetname[:-5] + '.info'
                settings.texinfo_elements = self.config.texinfo_elements
                settings.texinfo_dir_entry = direntry or ''
                settings.texinfo_dir_category = category or ''
                settings.texinfo_dir_description = description or ''
                settings.docname = docname
                doctree.settings = settings
                docwriter.write(doctree, destination)
                self.copy_image_files(targetname[:-5])
Exemplo n.º 11
0
 def _init_env(self, freshenv: bool) -> None:
     filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
     if freshenv or not os.path.exists(filename):
         self.env = BuildEnvironment()
         self.env.setup(self)
         self.env.find_files(self.config, self.builder)
     else:
         try:
             with progress_message(__('loading pickled environment')):
                 with open(filename, 'rb') as f:
                     self.env = pickle.load(f)
                     self.env.setup(self)
         except Exception as err:
             logger.info(__('failed: %s'), err)
             self._init_env(freshenv=True)
Exemplo n.º 12
0
def test_progress_message(app, status, warning):
    logging.setup(app, status, warning)
    logger = logging.getLogger(__name__)

    # standard case
    with progress_message('testing'):
        logger.info('blah ', nonl=True)

    output = strip_escseq(status.getvalue())
    assert 'testing... blah done\n' in output

    # skipping case
    with progress_message('testing'):
        raise SkipProgressMessage('Reason: %s', 'error')

    output = strip_escseq(status.getvalue())
    assert 'testing... skipped\nReason: error\n' in output

    # error case
    try:
        with progress_message('testing'):
            raise
    except Exception:
        pass

    output = strip_escseq(status.getvalue())
    assert 'testing... failed\n' in output

    # decorator
    @progress_message('testing')
    def func():
        logger.info('in func ', nonl=True)

    func()
    output = strip_escseq(status.getvalue())
    assert 'testing... in func done\n' in output
Exemplo n.º 13
0
    def copy_static_files(self) -> None:
        super().copy_static_files()

        for f in status_iterator(self.builtin_files, 'copying builtin files', 'brown',
                                 len(self.builtin_files), self.app.verbosity,
                                 stringify_func=path.basename):
            shutil.copyfile(f, path.join(self.outdir, '_static', path.basename(f)))

        if self.config.revealjs_use_tailwind:
            src = files('sphinx_revealit.res').joinpath('tailwind.css')
            dest = path.join(self.outdir, '_static', 'tailwind.css')

            if self.config.revealjs_purge_tailwind:
                with progress_message('purging tailwind.css'):
                    whitelist = set()

                    if hasattr(self.app.env, 'rjs_css_classes'):
                        whitelist = self.app.env.rjs_css_classes

                    purge = CSSPurge.from_file(src)
                    purge.purge_to_file(whitelist, dest)
            else:
                with progress_message('copying tailwind.css'):
                    shutil.copyfile(src, dest)
Exemplo n.º 14
0
    def copy_static_files(self) -> None:
        try:
            # copy static files
            with progress_message(__('copying static files... ')):
                ensuredir(path.join(self.outdir, '_static'))
                # first, create pygments style file
                with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
                    f.write(self.highlighter.get_stylesheet())
                # then, copy translations JavaScript file
                if self.config.language is not None:
                    jsfile = self._get_translations_js()
                    if jsfile:
                        copyfile(jsfile, path.join(self.outdir, '_static',
                                                   'translations.js'))

                # copy non-minified stemmer JavaScript file
                if self.indexer is not None:
                    jsfile = self.indexer.get_js_stemmer_rawcode()
                    if jsfile:
                        copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))

                ctx = self.globalcontext.copy()

                # add context items for search function used in searchtools.js_t
                if self.indexer is not None:
                    ctx.update(self.indexer.context_for_searchtool())

                # then, copy over theme-supplied static files
                if self.theme:
                    for theme_path in self.theme.get_theme_dirs()[::-1]:
                        entry = path.join(theme_path, 'static')
                        copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
                                   context=ctx, renderer=self.templates)
                # then, copy over all user-supplied static files
                excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
                for static_path in self.config.html_static_path:
                    entry = path.join(self.confdir, static_path)
                    copy_asset(entry, path.join(self.outdir, '_static'), excluded,
                               context=ctx, renderer=self.templates)
                # copy logo and favicon files if not already in static path
                if self.config.html_logo:
                    entry = path.join(self.confdir, self.config.html_logo)
                    copy_asset(entry, path.join(self.outdir, '_static'))
                if self.config.html_favicon:
                    entry = path.join(self.confdir, self.config.html_favicon)
                    copy_asset(entry, path.join(self.outdir, '_static'))
        except OSError as err:
            logger.warning(__('cannot copy static file %r'), err)
Exemplo n.º 15
0
 def _init_env(self, freshenv):
     # type: (bool) -> None
     filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
     if freshenv or not os.path.exists(filename):
         self.env = BuildEnvironment()
         self.env.setup(self)
         self.env.find_files(self.config, self.builder)
     else:
         try:
             with progress_message(__('loading pickled environment')):
                 with open(filename, 'rb') as f:
                     self.env = pickle.load(f)
                     self.env.setup(self)
         except Exception as err:
             logger.info(__('failed: %s'), err)
             self._init_env(freshenv=True)
Exemplo n.º 16
0
def init_ipalib_api(app, config):
    """Initialize ipalib.api

    1. Parse VERSION.m4
    2. Create fake ipapython.version module
    3. Initialize the API with mocked imports
    """
    defs = parse_version_m4()
    fake_ipaython_version(defs)

    with progress_message("initializing ipalib.api"):
        with autodoc_mock(config.autodoc_mock_imports):
            init_api(
                context=config.ipa_context,
                domain=config.ipa_domain,
                server=config.ipa_server_fqdn,
                in_server=config.ipa_in_server,
            )
Exemplo n.º 17
0
def _stage_and_execute(
    env: BuildEnvironment,
    exec_docnames: List[str],
    path_to_cache: str,
    timeout: Optional[int],
    allow_errors: bool,
    exec_in_temp: bool,
):
    pk_list = []
    cache_base = get_cache(path_to_cache)

    for nb in exec_docnames:
        source_path = env.doc2path(nb)
        with open(source_path, encoding="utf8") as handle:
            # here we pass an iterator, so that only the required lines are read
            converter = get_nb_converter(source_path, env,
                                         (line for line in handle))
        if converter is not None:
            stage_record = cache_base.stage_notebook_file(source_path)
            pk_list.append(stage_record.pk)

    # can leverage parallel execution implemented in jupyter-cache here
    try:
        with progress_message("executing outdated notebooks"):
            execute_staged_nb(
                cache_base,
                pk_list or None,
                timeout=timeout,
                exec_in_temp=exec_in_temp,
                allow_errors=allow_errors,
                env=env,
            )
    except OSError as err:
        # This is a 'fix' for obscure cases, such as if you
        # remove name.ipynb and add name.md (i.e. same name, different extension)
        # and then name.ipynb isn't flagged for removal.
        # Normally we want to keep the stage records available, so that we can retrieve
        # execution tracebacks at the `generate_notebook_outputs` stage,
        # but we need to flush if it becomes 'corrupted'
        LOGGER.error(
            "Execution failed in an unexpected way, clearing staged notebooks: %s",
            err)
        for record in cache_base.list_staged_records():
            cache_base.discard_staged_notebook(record.pk)
Exemplo n.º 18
0
def kconfig_build_resources(app: Sphinx) -> None:
    """Build the Kconfig database and install HTML resources."""

    if not app.config.kconfig_generate_db:
        return

    with progress_message("Building Kconfig database..."):
        kconfig, module_paths = kconfig_load(app)
        db = list()

        for sc in chain(kconfig.unique_defined_syms, kconfig.unique_choices):
            # skip nameless symbols
            if not sc.name:
                continue

            # store alternative defaults (from defconfig files)
            alt_defaults = list()
            for node in sc.nodes:
                if "defconfig" not in node.filename:
                    continue

                for value, cond in node.orig_defaults:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    alt_defaults.append([fmt, node.filename])

            # only process nodes with prompt or help
            nodes = [node for node in sc.nodes if node.prompt or node.help]

            inserted_paths = list()
            for node in nodes:
                # avoid duplicate symbols by forcing unique paths. this can
                # happen due to dependencies on 0, a trick used by some modules
                path = f"{node.filename}:{node.linenr}"
                if path in inserted_paths:
                    continue
                inserted_paths.append(path)

                dependencies = None
                if node.dep is not sc.kconfig.y:
                    dependencies = kconfiglib.expr_str(node.dep, sc_fmt)

                defaults = list()
                for value, cond in node.orig_defaults:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    defaults.append(fmt)

                selects = list()
                for value, cond in node.orig_selects:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    selects.append(fmt)

                implies = list()
                for value, cond in node.orig_implies:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    implies.append(fmt)

                ranges = list()
                for min, max, cond in node.orig_ranges:
                    fmt = (f"[{kconfiglib.expr_str(min, sc_fmt)}, "
                           f"{kconfiglib.expr_str(max, sc_fmt)}]")
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    ranges.append(fmt)

                choices = list()
                if isinstance(sc, kconfiglib.Choice):
                    for sym in sc.syms:
                        choices.append(kconfiglib.expr_str(sym, sc_fmt))

                filename = node.filename
                for name, path in module_paths.items():
                    if node.filename.startswith(path):
                        filename = node.filename.replace(
                            path, f"<module:{name}>")
                        break

                db.append({
                    "name": f"CONFIG_{sc.name}",
                    "prompt": node.prompt[0] if node.prompt else None,
                    "type": kconfiglib.TYPE_TO_STR[sc.type],
                    "help": node.help,
                    "dependencies": dependencies,
                    "defaults": defaults,
                    "alt_defaults": alt_defaults,
                    "selects": selects,
                    "implies": implies,
                    "ranges": ranges,
                    "choices": choices,
                    "filename": filename,
                    "linenr": node.linenr,
                })

        app.env.kconfig_db = db  # type: ignore

        outdir = Path(app.outdir) / "kconfig"
        outdir.mkdir(exist_ok=True)

        kconfig_db_file = outdir / "kconfig.json"

        with open(kconfig_db_file, "w") as f:
            json.dump(db, f)

    app.config.html_extra_path.append(kconfig_db_file.as_posix())
    app.config.html_static_path.append(RESOURCES_DIR.as_posix())
Exemplo n.º 19
0
    def write(self, *ignored: Any) -> None:
        assert self.env is not None

        docwriter = LaTeXWriter(self)
        docsettings: Any = OptionParser(
            defaults=self.env.settings,
            components=(docwriter, ),
            read_config_files=True,
        ).get_default_values()

        if sphinx.version_info <= (4, 0):
            # 3rd party
            from sphinx.builders.latex import patch_settings  # type: ignore
            patch_settings(docsettings)

        self.init_document_data()
        self.write_stylesheet()

        for entry in self.document_data:
            docname, targetname, title, author, themename = entry[:5]
            theme = self.themes.get(themename)
            toctree_only = False
            if len(entry) > 5:
                toctree_only = entry[5]
            destination = SphinxFileOutput(destination_path=os.path.join(
                self.outdir, targetname),
                                           encoding="utf-8",
                                           overwrite_if_changed=True)
            with progress_message(__("processing %s") % targetname):
                doctree = self.env.get_doctree(docname)
                process_only_nodes(doctree, self.tags)
                toctree = next(iter(doctree.traverse(addnodes.toctree)), None)
                if toctree and toctree.get("maxdepth") > 0:
                    tocdepth = toctree.get("maxdepth")
                else:
                    tocdepth = None

                doctree = self.assemble_doctree(
                    docname,
                    toctree_only,
                    appendices=(self.config.latex_appendices
                                if theme.name != "howto" else []))
                doctree["docclass"] = theme.docclass
                doctree["contentsname"] = self.get_contentsname(docname)
                doctree["tocdepth"] = tocdepth
                self.post_process_images(doctree)
                self.update_doc_context(title, author, theme)

                if hasattr(self, "update_context"):  # pragma: no cover
                    # Only present in newer Sphinx versions
                    self.update_context()

            with progress_message(__("writing")):
                docsettings._author = author
                docsettings._title = title
                docsettings._contentsname = doctree["contentsname"]
                docsettings._docname = docname
                docsettings._docclass = theme.name

                doctree.settings = docsettings
                docwriter.theme = theme
                docwriter.write(doctree, destination)
Exemplo n.º 20
0
def kconfig_build_resources(app: Sphinx) -> None:
    """Build the Kconfig database and install HTML resources."""

    if not app.config.kconfig_generate_db:
        return

    with progress_message("Building Kconfig database..."):
        kconfig, module_paths = kconfig_load(app)
        db = list()

        for sc in chain(kconfig.unique_defined_syms, kconfig.unique_choices):
            # skip nameless symbols
            if not sc.name:
                continue

            # store alternative defaults (from defconfig files)
            alt_defaults = list()
            for node in sc.nodes:
                if "defconfig" not in node.filename:
                    continue

                for value, cond in node.orig_defaults:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    alt_defaults.append([fmt, node.filename])

            # build list of symbols that select/imply the current one
            # note: all reverse dependencies are ORed together, and conditionals
            # (e.g. select/imply A if B) turns into A && B. So we first split
            # by OR to include all entries, and we split each one by AND to just
            # take the first entry.
            selected_by = list()
            if isinstance(sc,
                          kconfiglib.Symbol) and sc.rev_dep != sc.kconfig.n:
                for select in kconfiglib.split_expr(sc.rev_dep, kconfiglib.OR):
                    sym = kconfiglib.split_expr(select, kconfiglib.AND)[0]
                    selected_by.append(f"CONFIG_{sym.name}")

            implied_by = list()
            if isinstance(
                    sc, kconfiglib.Symbol) and sc.weak_rev_dep != sc.kconfig.n:
                for select in kconfiglib.split_expr(sc.weak_rev_dep,
                                                    kconfiglib.OR):
                    sym = kconfiglib.split_expr(select, kconfiglib.AND)[0]
                    implied_by.append(f"CONFIG_{sym.name}")

            # only process nodes with prompt or help
            nodes = [node for node in sc.nodes if node.prompt or node.help]

            inserted_paths = list()
            for node in nodes:
                # avoid duplicate symbols by forcing unique paths. this can
                # happen due to dependencies on 0, a trick used by some modules
                path = f"{node.filename}:{node.linenr}"
                if path in inserted_paths:
                    continue
                inserted_paths.append(path)

                dependencies = None
                if node.dep is not sc.kconfig.y:
                    dependencies = kconfiglib.expr_str(node.dep, sc_fmt)

                defaults = list()
                for value, cond in node.orig_defaults:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    defaults.append(fmt)

                selects = list()
                for value, cond in node.orig_selects:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    selects.append(fmt)

                implies = list()
                for value, cond in node.orig_implies:
                    fmt = kconfiglib.expr_str(value, sc_fmt)
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    implies.append(fmt)

                ranges = list()
                for min, max, cond in node.orig_ranges:
                    fmt = (f"[{kconfiglib.expr_str(min, sc_fmt)}, "
                           f"{kconfiglib.expr_str(max, sc_fmt)}]")
                    if cond is not sc.kconfig.y:
                        fmt += f" if {kconfiglib.expr_str(cond, sc_fmt)}"
                    ranges.append(fmt)

                choices = list()
                if isinstance(sc, kconfiglib.Choice):
                    for sym in sc.syms:
                        choices.append(kconfiglib.expr_str(sym, sc_fmt))

                menupath = ""
                iternode = node
                while iternode.parent is not iternode.kconfig.top_node:
                    iternode = iternode.parent
                    menupath = f" > {iternode.prompt[0]}" + menupath

                menupath = "(Top)" + menupath

                filename = node.filename
                for name, path in module_paths.items():
                    if node.filename.startswith(path):
                        filename = node.filename.replace(
                            path, f"<module:{name}>")
                        break

                db.append({
                    "name": f"CONFIG_{sc.name}",
                    "prompt": node.prompt[0] if node.prompt else None,
                    "type": kconfiglib.TYPE_TO_STR[sc.type],
                    "help": node.help,
                    "dependencies": dependencies,
                    "defaults": defaults,
                    "alt_defaults": alt_defaults,
                    "selects": selects,
                    "selected_by": selected_by,
                    "implies": implies,
                    "implied_by": implied_by,
                    "ranges": ranges,
                    "choices": choices,
                    "filename": filename,
                    "linenr": node.linenr,
                    "menupath": menupath,
                })

        app.env.kconfig_db = db  # type: ignore

        outdir = Path(app.outdir) / "kconfig"
        outdir.mkdir(exist_ok=True)

        kconfig_db_file = outdir / "kconfig.json"

        with open(kconfig_db_file, "w") as f:
            json.dump(db, f)

    app.config.html_extra_path.append(kconfig_db_file.as_posix())
    app.config.html_static_path.append(RESOURCES_DIR.as_posix())
Exemplo n.º 21
0
    def build(self, docnames, summary=None, method='update'):
        # type: (Iterable[str], str, str) -> None
        """Main build method.

        First updates the environment, and then calls :meth:`write`.
        """
        if summary:
            logger.info(bold(__('building [%s]') % self.name) + ': ' + summary)

        # while reading, collect all warnings from docutils
        with logging.pending_warnings():
            updated_docnames = set(self.read())

        doccount = len(updated_docnames)
        logger.info(bold(__('looking for now-outdated files... ')), nonl=True)
        for docname in self.env.check_dependents(self.app, updated_docnames):
            updated_docnames.add(docname)
        outdated = len(updated_docnames) - doccount
        if outdated:
            logger.info(__('%d found'), outdated)
        else:
            logger.info(__('none found'))

        if updated_docnames:
            # save the environment
            from sphinx.application import ENV_PICKLE_FILENAME
            with progress_message(__('pickling environment')):
                with open(path.join(self.doctreedir, ENV_PICKLE_FILENAME), 'wb') as f:
                    pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)

            # global actions
            self.app.phase = BuildPhase.CONSISTENCY_CHECK
            with progress_message(__('checking consistency')):
                self.env.check_consistency()
        else:
            if method == 'update' and not docnames:
                logger.info(bold(__('no targets are out of date.')))
                return

        self.app.phase = BuildPhase.RESOLVING

        # filter "docnames" (list of outdated files) by the updated
        # found_docs of the environment; this will remove docs that
        # have since been removed
        if docnames and docnames != ['__all__']:
            docnames = set(docnames) & self.env.found_docs

        # determine if we can write in parallel
        if parallel_available and self.app.parallel > 1 and self.allow_parallel:
            self.parallel_ok = self.app.is_parallel_allowed('write')
        else:
            self.parallel_ok = False

        #  create a task executor to use for misc. "finish-up" tasks
        # if self.parallel_ok:
        #     self.finish_tasks = ParallelTasks(self.app.parallel)
        # else:
        # for now, just execute them serially
        self.finish_tasks = SerialTasks()

        # write all "normal" documents (or everything for some builders)
        self.write(docnames, list(updated_docnames), method)

        # finish (write static files etc.)
        self.finish()

        # wait for all tasks
        self.finish_tasks.join()
Exemplo n.º 22
0
    def __init__(self,
                 srcdir,
                 confdir,
                 outdir,
                 doctreedir,
                 buildername,
                 confoverrides=None,
                 status=sys.stdout,
                 warning=sys.stderr,
                 freshenv=False,
                 warningiserror=False,
                 tags=None,
                 verbosity=0,
                 parallel=0,
                 keep_going=False):
        # type: (str, str, str, str, str, Dict, IO, IO, bool, bool, List[str], int, int, bool) -> None  # NOQA
        self.phase = BuildPhase.INITIALIZATION
        self.verbosity = verbosity
        self.extensions = {}  # type: Dict[str, Extension]
        self.builder = None  # type: Builder
        self.env = None  # type: BuildEnvironment
        self.project = None  # type: Project
        self.registry = SphinxComponentRegistry()
        self.html_themes = {}  # type: Dict[str, str]

        # validate provided directories
        self.srcdir = abspath(srcdir)
        self.outdir = abspath(outdir)
        self.doctreedir = abspath(doctreedir)
        self.confdir = confdir
        if self.confdir:  # confdir is optional
            self.confdir = abspath(self.confdir)
            if not path.isfile(path.join(self.confdir, 'conf.py')):
                raise ApplicationError(
                    __("config directory doesn't contain a "
                       "conf.py file (%s)") % confdir)

        if not path.isdir(self.srcdir):
            raise ApplicationError(
                __('Cannot find source directory (%s)') % self.srcdir)

        if self.srcdir == self.outdir:
            raise ApplicationError(
                __('Source directory and destination '
                   'directory cannot be identical'))

        self.parallel = parallel

        if status is None:
            self._status = StringIO()  # type: IO
            self.quiet = True
        else:
            self._status = status
            self.quiet = False

        if warning is None:
            self._warning = StringIO()  # type: IO
        else:
            self._warning = warning
        self._warncount = 0
        self.keep_going = warningiserror and keep_going
        if self.keep_going:
            self.warningiserror = False
        else:
            self.warningiserror = warningiserror
        logging.setup(self, self._status, self._warning)

        self.events = EventManager(self)

        # keep last few messages for traceback
        # This will be filled by sphinx.util.logging.LastMessagesWriter
        self.messagelog = deque(maxlen=10)  # type: deque

        # say hello to the world
        logger.info(bold(
            __('Running Sphinx v%s') % sphinx.__display_version__))

        # notice for parallel build on macOS and py38+
        if sys.version_info > (
                3, 8) and platform.system() == 'Darwin' and parallel > 1:
            logger.info(
                bold(
                    __("For security reason, parallel mode is disabled on macOS and "
                       "python3.8 and above.  For more details, please read "
                       "https://github.com/sphinx-doc/sphinx/issues/6803")))

        # status code for command-line application
        self.statuscode = 0

        # read config
        self.tags = Tags(tags)
        if self.confdir is None:
            self.config = Config({}, confoverrides or {})
        else:
            self.config = Config.read(self.confdir, confoverrides or {},
                                      self.tags)

        # initialize some limited config variables before initialize i18n and loading
        # extensions
        self.config.pre_init_values()

        # set up translation infrastructure
        self._init_i18n()

        # check the Sphinx version if requested
        if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
            raise VersionRequirementError(
                __('This project needs at least Sphinx v%s and therefore cannot '
                   'be built with this version.') % self.config.needs_sphinx)

        # set confdir to srcdir if -C given (!= no confdir); a few pieces
        # of code expect a confdir to be set
        if self.confdir is None:
            self.confdir = self.srcdir

        # load all built-in extension modules
        for extension in builtin_extensions:
            self.setup_extension(extension)

        # load all user-given extension modules
        for extension in self.config.extensions:
            self.setup_extension(extension)

        # preload builder module (before init config values)
        self.preload_builder(buildername)

        if not path.isdir(outdir):
            with progress_message(__('making output directory')):
                ensuredir(outdir)

        # the config file itself can be an extension
        if self.config.setup:
            prefix = __('while setting up extension %s:') % "conf.py"
            with prefixed_warnings(prefix):
                if callable(self.config.setup):
                    self.config.setup(self)
                else:
                    raise ConfigError(
                        __("'setup' as currently defined in conf.py isn't a Python callable. "
                           "Please modify its definition to make it a callable function. "
                           "This is needed for conf.py to behave as a Sphinx extension."
                           ))

        # now that we know all config values, collect them from conf.py
        self.config.init_values()
        self.events.emit('config-inited', self.config)

        # create the project
        self.project = Project(self.srcdir, self.config.source_suffix)
        # create the builder
        self.builder = self.create_builder(buildername)
        # set up the build environment
        self._init_env(freshenv)
        # set up the builder
        self._init_builder()
Exemplo n.º 23
0
    def build(self, docnames: Iterable[str], summary: str = None, method: str = 'update') -> None:  # NOQA
        """Main build method.

        First updates the environment, and then calls :meth:`write`.
        """
        if summary:
            logger.info(bold(__('building [%s]: ') % self.name) + summary)

        # while reading, collect all warnings from docutils
        with logging.pending_warnings():
            updated_docnames = set(self.read())

        doccount = len(updated_docnames)
        logger.info(bold(__('looking for now-outdated files... ')), nonl=True)
        for docname in self.env.check_dependents(self.app, updated_docnames):
            updated_docnames.add(docname)
        outdated = len(updated_docnames) - doccount
        if outdated:
            logger.info(__('%d found'), outdated)
        else:
            logger.info(__('none found'))

        if updated_docnames:
            # save the environment
            from sphinx.application import ENV_PICKLE_FILENAME
            with progress_message(__('pickling environment')):
                with open(path.join(self.doctreedir, ENV_PICKLE_FILENAME), 'wb') as f:
                    pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)

            # global actions
            self.app.phase = BuildPhase.CONSISTENCY_CHECK
            with progress_message(__('checking consistency')):
                self.env.check_consistency()
        else:
            if method == 'update' and not docnames:
                logger.info(bold(__('no targets are out of date.')))
                return

        self.app.phase = BuildPhase.RESOLVING

        # filter "docnames" (list of outdated files) by the updated
        # found_docs of the environment; this will remove docs that
        # have since been removed
        if docnames and docnames != ['__all__']:
            docnames = set(docnames) & self.env.found_docs

        # determine if we can write in parallel
        if parallel_available and self.app.parallel > 1 and self.allow_parallel:
            self.parallel_ok = self.app.is_parallel_allowed('write')
        else:
            self.parallel_ok = False

        #  create a task executor to use for misc. "finish-up" tasks
        # if self.parallel_ok:
        #     self.finish_tasks = ParallelTasks(self.app.parallel)
        # else:
        # for now, just execute them serially
        self.finish_tasks = SerialTasks()

        # write all "normal" documents (or everything for some builders)
        self.write(docnames, list(updated_docnames), method)

        # finish (write static files etc.)
        self.finish()

        # wait for all tasks
        self.finish_tasks.join()
Exemplo n.º 24
0
    def __init__(self, srcdir, confdir, outdir, doctreedir, buildername,
                 confoverrides=None, status=sys.stdout, warning=sys.stderr,
                 freshenv=False, warningiserror=False, tags=None, verbosity=0,
                 parallel=0, keep_going=False):
        # type: (str, str, str, str, str, Dict, IO, IO, bool, bool, List[str], int, int, bool) -> None  # NOQA
        self.phase = BuildPhase.INITIALIZATION
        self.verbosity = verbosity
        self.extensions = {}                    # type: Dict[str, Extension]
        self.builder = None                     # type: Builder
        self.env = None                         # type: BuildEnvironment
        self.project = None                     # type: Project
        self.registry = SphinxComponentRegistry()
        self.html_themes = {}                   # type: Dict[str, str]

        # validate provided directories
        self.srcdir = abspath(srcdir)
        self.outdir = abspath(outdir)
        self.doctreedir = abspath(doctreedir)
        self.confdir = confdir
        if self.confdir:  # confdir is optional
            self.confdir = abspath(self.confdir)
            if not path.isfile(path.join(self.confdir, 'conf.py')):
                raise ApplicationError(__("config directory doesn't contain a "
                                          "conf.py file (%s)") % confdir)

        if not path.isdir(self.srcdir):
            raise ApplicationError(__('Cannot find source directory (%s)') %
                                   self.srcdir)

        if self.srcdir == self.outdir:
            raise ApplicationError(__('Source directory and destination '
                                      'directory cannot be identical'))

        self.parallel = parallel

        if status is None:
            self._status = StringIO()      # type: IO
            self.quiet = True
        else:
            self._status = status
            self.quiet = False

        if warning is None:
            self._warning = StringIO()     # type: IO
        else:
            self._warning = warning
        self._warncount = 0
        self.keep_going = warningiserror and keep_going
        if self.keep_going:
            self.warningiserror = False
        else:
            self.warningiserror = warningiserror
        logging.setup(self, self._status, self._warning)

        self.events = EventManager()

        # keep last few messages for traceback
        # This will be filled by sphinx.util.logging.LastMessagesWriter
        self.messagelog = deque(maxlen=10)  # type: deque

        # say hello to the world
        logger.info(bold(__('Running Sphinx v%s') % sphinx.__display_version__))

        # status code for command-line application
        self.statuscode = 0

        # read config
        self.tags = Tags(tags)
        if self.confdir is None:
            self.config = Config({}, confoverrides or {})
        else:
            self.config = Config.read(self.confdir, confoverrides or {}, self.tags)

        # initialize some limited config variables before initialize i18n and loading
        # extensions
        self.config.pre_init_values()

        # set up translation infrastructure
        self._init_i18n()

        # check the Sphinx version if requested
        if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
            raise VersionRequirementError(
                __('This project needs at least Sphinx v%s and therefore cannot '
                   'be built with this version.') % self.config.needs_sphinx)

        # set confdir to srcdir if -C given (!= no confdir); a few pieces
        # of code expect a confdir to be set
        if self.confdir is None:
            self.confdir = self.srcdir

        # load all built-in extension modules
        for extension in builtin_extensions:
            self.setup_extension(extension)

        # load all user-given extension modules
        for extension in self.config.extensions:
            self.setup_extension(extension)

        # preload builder module (before init config values)
        self.preload_builder(buildername)

        if not path.isdir(outdir):
            with progress_message(__('making output directory')):
                ensuredir(outdir)

        # the config file itself can be an extension
        if self.config.setup:
            prefix = __('while setting up extension %s:') % "conf.py"
            with prefixed_warnings(prefix):
                if callable(self.config.setup):
                    self.config.setup(self)
                else:
                    raise ConfigError(
                        __("'setup' as currently defined in conf.py isn't a Python callable. "
                           "Please modify its definition to make it a callable function. "
                           "This is needed for conf.py to behave as a Sphinx extension.")
                    )

        # now that we know all config values, collect them from conf.py
        self.config.init_values()
        self.emit('config-inited', self.config)

        # create the project
        self.project = Project(self.srcdir, self.config.source_suffix)
        # create the builder
        self.builder = self.create_builder(buildername)
        # set up the build environment
        self._init_env(freshenv)
        # set up the builder
        self._init_builder()