Ejemplo n.º 1
0
 def _write_serial(self, docnames):
     # type: (Sequence[unicode]) -> None
     with logging.pending_warnings():
         for docname in status_iterator(docnames, 'writing output... ', "darkgreen",
                                        len(docnames), self.app.verbosity):
             doctree = self.env.get_and_resolve_doctree(docname, self)
             self.write_doc_serialized(docname, doctree)
             self.write_doc(docname, doctree)
Ejemplo n.º 2
0
 def _write_serial(self, docnames):
     # type: (Sequence[str]) -> None
     with logging.pending_warnings():
         for docname in status_iterator(docnames, __('writing output... '), "darkgreen",
                                        len(docnames), self.app.verbosity):
             self.app.phase = BuildPhase.RESOLVING
             doctree = self.env.get_and_resolve_doctree(docname, self)
             self.app.phase = BuildPhase.WRITING
             self.write_doc_serialized(docname, doctree)
             self.write_doc(docname, doctree)
Ejemplo n.º 3
0
def test_skip_warningiserror(app, status, warning):
    logging.setup(app, status, warning)
    logger = logging.getLogger(__name__)

    app.warningiserror = True
    with logging.skip_warningiserror():
        logger.warning('message')

    # if False, warning raises SphinxWarning exception
    with pytest.raises(SphinxWarning):
        with logging.skip_warningiserror(False):
            logger.warning('message')

    # It also works during pending_warnings.
    with logging.pending_warnings():
        with logging.skip_warningiserror():
            logger.warning('message')

    with pytest.raises(SphinxWarning):
        with logging.pending_warnings():
            with logging.skip_warningiserror(False):
                logger.warning('message')
Ejemplo n.º 4
0
def test_pending_warnings(app, status, warning):
    logging.setup(app, status, warning)
    logger = logging.getLogger(__name__)

    logger.warning('message1')
    with logging.pending_warnings():
        # not logged yet (bufferred) in here
        logger.warning('message2')
        logger.warning('message3')
        assert 'WARNING: message1' in warning.getvalue()
        assert 'WARNING: message2' not in warning.getvalue()
        assert 'WARNING: message3' not in warning.getvalue()

    # actually logged as ordered
    assert 'WARNING: message2\nWARNING: message3' in strip_escseq(warning.getvalue())
Ejemplo n.º 5
0
def test_pending_warnings(app, status, warning):
    logging.setup(app, status, warning)
    logger = logging.getLogger(__name__)

    logger.warning('message1')
    with logging.pending_warnings():
        # not logged yet (bufferred) in here
        logger.warning('message2')
        logger.warning('message3')
        assert 'WARNING: message1' in warning.getvalue()
        assert 'WARNING: message2' not in warning.getvalue()
        assert 'WARNING: message3' not in warning.getvalue()

    # actually logged as ordered
    assert 'WARNING: message2\nWARNING: message3' in strip_escseq(
        warning.getvalue())
Ejemplo n.º 6
0
    def build(self, docnames, summary=None, method='update'):
        # type: (Iterable[unicode], unicode, unicode) -> None
        """Main build method.

        First updates the environment, and then calls :meth:`write`.
        """
        if summary:
            logger.info(bold(__('building [%s]') % self.name) + ': ' + summary)

        # while reading, collect all warnings from docutils
        with logging.pending_warnings():
            updated_docnames = set(self.read())

        doccount = len(updated_docnames)
        logger.info(bold(__('looking for now-outdated files... ')), nonl=1)
        for docname in self.env.check_dependents(self.app, updated_docnames):
            updated_docnames.add(docname)
        outdated = len(updated_docnames) - doccount
        if outdated:
            logger.info(__('%d found'), outdated)
        else:
            logger.info(__('none found'))

        if updated_docnames:
            # save the environment
            from sphinx.application import ENV_PICKLE_FILENAME
            logger.info(bold(__('pickling environment... ')), nonl=True)
            self.env.topickle(path.join(self.doctreedir, ENV_PICKLE_FILENAME))
            logger.info(__('done'))

            # global actions
            self.app.phase = BuildPhase.CONSISTENCY_CHECK
            logger.info(bold(__('checking consistency... ')), nonl=True)
            self.env.check_consistency()
            logger.info(__('done'))
        else:
            if method == 'update' and not docnames:
                logger.info(bold(__('no targets are out of date.')))
                return

        self.app.phase = BuildPhase.RESOLVING

        # filter "docnames" (list of outdated files) by the updated
        # found_docs of the environment; this will remove docs that
        # have since been removed
        if docnames and docnames != ['__all__']:
            docnames = set(docnames) & self.env.found_docs

        # determine if we can write in parallel
        if parallel_available and self.app.parallel > 1 and self.allow_parallel:
            self.parallel_ok = self.app.is_parallel_allowed('write')
        else:
            self.parallel_ok = False

        #  create a task executor to use for misc. "finish-up" tasks
        # if self.parallel_ok:
        #     self.finish_tasks = ParallelTasks(self.app.parallel)
        # else:
        # for now, just execute them serially
        self.finish_tasks = SerialTasks()

        # write all "normal" documents (or everything for some builders)
        self.write(docnames, list(updated_docnames), method)

        # finish (write static files etc.)
        self.finish()

        # wait for all tasks
        self.finish_tasks.join()
Ejemplo n.º 7
0
    def build(self, docnames, summary=None, method='update'):
        # type: (Iterable[unicode], unicode, unicode) -> None
        """Main build method.

        First updates the environment, and then calls :meth:`write`.
        """
        if summary:
            logger.info(bold(__('building [%s]') % self.name) + ': ' + summary)

        # while reading, collect all warnings from docutils
        with logging.pending_warnings():
            updated_docnames = set(self.read())

        doccount = len(updated_docnames)
        logger.info(bold(__('looking for now-outdated files... ')), nonl=1)
        for docname in self.env.check_dependents(self.app, updated_docnames):
            updated_docnames.add(docname)
        outdated = len(updated_docnames) - doccount
        if outdated:
            logger.info(__('%d found'), outdated)
        else:
            logger.info(__('none found'))

        # save the environment
        from sphinx.application import ENV_PICKLE_FILENAME
        logger.info(bold(__('pickling environment... ')), nonl=True)
        with open(path.join(self.doctreedir, ENV_PICKLE_FILENAME), 'wb') as f:
            pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
        logger.info(__('done'))

        if updated_docnames:
            # global actions
            self.app.phase = BuildPhase.CONSISTENCY_CHECK
            logger.info(bold(__('checking consistency... ')), nonl=True)
            self.env.check_consistency()
            logger.info(__('done'))
        else:
            if method == 'update' and not docnames:
                logger.info(bold(__('no targets are out of date.')))
                return

        self.app.phase = BuildPhase.RESOLVING

        # filter "docnames" (list of outdated files) by the updated
        # found_docs of the environment; this will remove docs that
        # have since been removed
        if docnames and docnames != ['__all__']:
            docnames = set(docnames) & self.env.found_docs

        # determine if we can write in parallel
        if parallel_available and self.app.parallel > 1 and self.allow_parallel:
            self.parallel_ok = self.app.is_parallel_allowed('write')
        else:
            self.parallel_ok = False

        #  create a task executor to use for misc. "finish-up" tasks
        # if self.parallel_ok:
        #     self.finish_tasks = ParallelTasks(self.app.parallel)
        # else:
        # for now, just execute them serially
        self.finish_tasks = SerialTasks()

        # write all "normal" documents (or everything for some builders)
        self.write(docnames, list(updated_docnames), method)

        # finish (write static files etc.)
        self.finish()

        # wait for all tasks
        self.finish_tasks.join()
Ejemplo n.º 8
0
    def build(self, docnames, summary=None, method='update'):
        # type: (Iterable[unicode], unicode, unicode) -> None
        """Main build method.

        First updates the environment, and then calls :meth:`write`.
        """
        if summary:
            logger.info(bold('building [%s]' % self.name) + ': ' + summary)

        # while reading, collect all warnings from docutils
        with logging.pending_warnings():
            updated_docnames = set(self.env.update(self.config, self.srcdir, self.doctreedir))

        doccount = len(updated_docnames)
        logger.info(bold('looking for now-outdated files... '), nonl=1)
        for docname in self.env.check_dependents(self.app, updated_docnames):
            updated_docnames.add(docname)
        outdated = len(updated_docnames) - doccount
        if outdated:
            logger.info('%d found', outdated)
        else:
            logger.info('none found')

        if updated_docnames:
            # save the environment
            from sphinx.application import ENV_PICKLE_FILENAME
            logger.info(bold('pickling environment... '), nonl=True)
            self.env.topickle(path.join(self.doctreedir, ENV_PICKLE_FILENAME))
            logger.info('done')

            # global actions
            logger.info(bold('checking consistency... '), nonl=True)
            self.env.check_consistency()
            logger.info('done')
        else:
            if method == 'update' and not docnames:
                logger.info(bold('no targets are out of date.'))
                return

        # filter "docnames" (list of outdated files) by the updated
        # found_docs of the environment; this will remove docs that
        # have since been removed
        if docnames and docnames != ['__all__']:
            docnames = set(docnames) & self.env.found_docs

        # determine if we can write in parallel
        self.parallel_ok = False
        if parallel_available and self.app.parallel > 1 and self.allow_parallel:
            self.parallel_ok = True
            for extension in itervalues(self.app.extensions):
                if not extension.parallel_write_safe:
                    logger.warning('the %s extension is not safe for parallel '
                                   'writing, doing serial write', extension.name)
                    self.parallel_ok = False
                    break

        #  create a task executor to use for misc. "finish-up" tasks
        # if self.parallel_ok:
        #     self.finish_tasks = ParallelTasks(self.app.parallel)
        # else:
        # for now, just execute them serially
        self.finish_tasks = SerialTasks()

        # write all "normal" documents (or everything for some builders)
        self.write(docnames, list(updated_docnames), method)

        # finish (write static files etc.)
        self.finish()

        # wait for all tasks
        self.finish_tasks.join()