Пример #1
0
 def build_container(self, outdir, outname):
     # type: (unicode, unicode) -> None
     """Write the metainfo file META-INF/container.xml."""
     logger.info('writing %s file...', outname)
     filename = path.join(outdir, outname)
     ensuredir(path.dirname(filename))
     copy_asset_file(path.join(self.template_dir, 'container.xml'), filename)
Пример #2
0
    def convert_to(self, path, builder):
        try:
            tmpdir = mkdtemp()

            astah_command = find_astah_command(builder)
            astah_args = [astah_command, '-image', 'all', '-f', self['filename'], '-o', tmpdir]
            if astah_command is None:
                return False

            ensuredir(os.path.dirname(path))
            retcode = subprocess.call(astah_args)
            if retcode != 0:
                builder.warn('Fail to convert astah image (exitcode: %s)' % retcode)
                return False

            dirname = os.path.splitext(os.path.basename(self['filename']))[0]
            imagedir = os.path.join(tmpdir, dirname)
            if self['sheet']:
                image_path = os.path.join(imagedir, self['sheet'] + '.png')
            else:
                image_path = os.path.join(imagedir, os.listdir(imagedir)[0])  # first item in dir

            if os.path.exists(image_path):
                copyfile(image_path, path)
                return True
            else:
                builder.warn('Fail to convert astah image: unknown sheet [%s]' % self['sheet'])
                return False
        except Exception as exc:
            builder.warn('Fail to convert astah image: %s' % exc)
            return False
        finally:
            rmtree(tmpdir, ignore_errors=True)
Пример #3
0
    def write(self, *ignored):
        # build_all
        docnames = set([doc for doc in self.env.found_docs if doc.startswith("stdlib")])

        self.info(bold('preparing documents... '), nonl=True)
        self.prepare_writing(docnames)
        self.info('done')

        # write target files
        warnings = []
        self.env.set_warnfunc(lambda *args: warnings.append(args))

        outfilename = path.join(self.outdir, self.name + self.out_suffix)
        ensuredir(path.dirname(outfilename))
        try:
            f = codecs.open(outfilename, 'w', 'utf-8')
            try:
                f.write('# automatically generated from files in doc/stdlib/ -- do not edit here\n\n' +
                        '{\n\n')

                for docname in self.status_iterator(
                    sorted(docnames), 'processing... ', darkgreen, len(docnames)):
                    doctree = self.env.get_and_resolve_doctree(docname, self)
                    self.writer.write(doctree, f)
                    f.write("\n")

                f.write('\n}\n')
            finally:
                f.close()
        except (IOError, OSError) as err:
            self.warn("error writing file %s: %s" % (outfilename, err))

        for warning in warnings:
            self.warn(*warning)
        self.env.set_warnfunc(self.warn)
Пример #4
0
    def __init__(self, app):
        # type: (Sphinx) -> None
        self.srcdir = app.srcdir
        self.confdir = app.confdir
        self.outdir = app.outdir
        self.doctreedir = app.doctreedir
        ensuredir(self.doctreedir)

        self.app = app              # type: Sphinx
        self.env = None             # type: BuildEnvironment
        self.config = app.config    # type: Config
        self.tags = app.tags        # type: Tags
        self.tags.add(self.format)
        self.tags.add(self.name)
        self.tags.add("format_%s" % self.format)
        self.tags.add("builder_%s" % self.name)

        # images that need to be copied over (source -> dest)
        self.images = {}  # type: Dict[unicode, unicode]
        # basename of images directory
        self.imagedir = ""
        # relative path to image directory from current docname (used at writing docs)
        self.imgpath = ""  # type: unicode

        # these get set later
        self.parallel_ok = False
        self.finish_tasks = None  # type: Any
Пример #5
0
def get_image_filename(self, code, format, options, prefix='rackdiag'):
    """
    Get path of output file.
    """
    if format not in ('PNG', 'PDF'):
        raise RackdiagError('rackdiag error:\nunknown format: %s\n' % format)

    if format == 'PDF':
        try:
            import reportlab
        except ImportError:
            msg = 'rackdiag error:\n' + \
                  'colud not output PDF format; Install reportlab\n'
            raise RackdiagError(msg)

    hashkey = code.encode('utf-8') + str(options)
    fname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), format.lower())
    if hasattr(self.builder, 'imgpath'):
        # HTML
        relfn = posixpath.join(self.builder.imgpath, fname)
        outfn = os.path.join(self.builder.outdir, '_images', fname)
    else:
        # LaTeX
        relfn = fname
        outfn = os.path.join(self.builder.outdir, fname)

    if os.path.isfile(outfn):
        return relfn, outfn

    ensuredir(os.path.dirname(outfn))

    return relfn, outfn
Пример #6
0
def copy_static_entry(source, targetdir, builder, context={},
                      exclude_matchers=(), level=0):
    # type: (str, str, Any, Dict, Tuple[Callable, ...], int) -> None
    """[DEPRECATED] Copy a HTML builder static_path entry from source to targetdir.

    Handles all possible cases of files, directories and subdirectories.
    """
    warnings.warn('sphinx.util.copy_static_entry is deprecated for removal',
                  RemovedInSphinx30Warning, stacklevel=2)

    if exclude_matchers:
        relpath = relative_path(path.join(builder.srcdir, 'dummy'), source)
        for matcher in exclude_matchers:
            if matcher(relpath):
                return
    if path.isfile(source):
        copy_asset_file(source, targetdir, context, builder.templates)
    elif path.isdir(source):
        ensuredir(targetdir)
        for entry in os.listdir(source):
            if entry.startswith('.'):
                continue
            newtarget = targetdir
            if path.isdir(path.join(source, entry)):
                newtarget = path.join(targetdir, entry)
            copy_static_entry(path.join(source, entry), newtarget,
                              builder, context, level=level + 1,
                              exclude_matchers=exclude_matchers)
Пример #7
0
    def copy_static_files(self):
        # copy static files
        self.info(bold('copying static files... '), nonl=True)
        ensuredir(path.join(self.outdir, '_static'))
        # first, create pygments style file
        f = open(path.join(self.outdir, '_static', 'pygments.css'), 'w')
        f.write(self.highlighter.get_stylesheet())
        f.close()
        # then, copy translations JavaScript file
        if self.config.language is not None:
            jsfile = self._get_translations_js()
            if jsfile:
                copyfile(jsfile, path.join(self.outdir, '_static',
                                           'translations.js'))

        # copy non-minified stemmer JavaScript file
        if self.indexer is not None:
            jsfile = self.indexer.get_js_stemmer_rawcode()
            if jsfile:
                copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))

        ctx = self.globalcontext.copy()

        # add context items for search function used in searchtools.js_t
        if self.indexer is not None:
            ctx.update(self.indexer.context_for_searchtool())

        # then, copy over theme-supplied static files
        if self.theme:
            for theme_path in self.theme.get_dirchain()[::-1]:
                entry = path.join(theme_path, 'static')
                copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
                           context=ctx, renderer=self.templates)
        # then, copy over all user-supplied static files
        excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
        for static_path in self.config.html_static_path:
            entry = path.join(self.confdir, static_path)
            if not path.exists(entry):
                self.warn('html_static_path entry %r does not exist' % entry)
                continue
            copy_asset(entry, path.join(self.outdir, '_static'), excluded,
                       context=ctx, renderer=self.templates)
        # copy logo and favicon files if not already in static path
        if self.config.html_logo:
            logobase = path.basename(self.config.html_logo)
            logotarget = path.join(self.outdir, '_static', logobase)
            if not path.isfile(path.join(self.confdir, self.config.html_logo)):
                self.warn('logo file %r does not exist' % self.config.html_logo)
            elif not path.isfile(logotarget):
                copyfile(path.join(self.confdir, self.config.html_logo),
                         logotarget)
        if self.config.html_favicon:
            iconbase = path.basename(self.config.html_favicon)
            icontarget = path.join(self.outdir, '_static', iconbase)
            if not path.isfile(path.join(self.confdir, self.config.html_favicon)):
                self.warn('favicon file %r does not exist' % self.config.html_favicon)
            elif not path.isfile(icontarget):
                copyfile(path.join(self.confdir, self.config.html_favicon),
                         icontarget)
        self.info('done')
Пример #8
0
    def to_image(self, builder):
        if builder.format == 'html':
            reldir = "_images"
            outdir = os.path.join(builder.outdir, '_images')
        else:
            reldir = ""
            outdir = builder.outdir

        try:
            cacoo = Cacoo(builder.config.cacoo_apikey)
            last_modified = cacoo.get_last_modified(self['diagramid'])

            filename = "cacoo-%s.png" % self['diagramid'].replace('#', '-')
            path = os.path.join(outdir, filename)
            if not os.path.exists(path) or os.stat(path).st_mtime < last_modified:
                ensuredir(outdir)
                with open(path, 'wb') as fd:
                    fd.write(cacoo.get_image(self['diagramid']).read())
                os.utime(path, (last_modified, last_modified))
        except Exception as exc:
            builder.warn('Fail to download cacoo image: %s (check your cacoo_apikey or diagramid)' % exc)
            return nodes.Text('')

        relfn = os.path.join(reldir, filename)
        image_node = nodes.image(candidates={'*': relfn}, **self.attributes)
        image_node['uri'] = relfn

        return image_node
Пример #9
0
def render_scruffy(self, code, options, format, prefix='scruffy'):
    """Render scruffy code into a PNG output file.

    :param self: Sphinx main class object
    :param code: `string` of the code to be rendered
    :param options: `list` of rendering options
    :param format: `string` image format: png, jpg, etc
    :param prefix: `string` image file name prefix

    :return: `tuple` in form (<source image file name>, <output image file name>)
    """
    code = code.replace('\n', ',')
    hashkey = code.encode('utf-8') + str(options)
    image_filename = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), format)
    assert hasattr(self.builder, 'imgpath'), "Only HTML output is supported!"
    source_image_file_name = posixpath.join(self.builder.imgpath, image_filename)
    output_image_file_name = path.join(self.builder.outdir, '_images', image_filename)
    if not path.isfile(output_image_file_name):
        ensuredir(path.dirname(output_image_file_name))
        with open(output_image_file_name, 'wb') as stream:
            scruffy_options = ScruffyOptions(dict((k, True) for k in options))
            if scruffy_options.sequence:
                suml.suml2pic.transform(code, stream, scruffy_options)
            else:
                suml.yuml2dot.transform(code, stream, scruffy_options)
    return source_image_file_name, output_image_file_name
Пример #10
0
def _run_hg_command(command, override_dir=''):
    """
    Runs the given command, as if it were typed at the command line, in the
    appropriate directory.
    """
    verb = command[0]
    actions = hg_verbs[verb][1]
    try:
        command[0] = hg_verbs[verb][0]
        command.insert(0, executable)
        ensuredir(override_dir or local_repo_physical_dir)
        out = subprocess.Popen(command, stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               cwd=override_dir or local_repo_physical_dir)
        if testing:
            # For some strange reason, unit tests fail if we don't have
            # a small pause here.
            import time
            time.sleep(0.1)

        stderr = out.stderr.readlines()
        if stderr:
            return stderr

        if out.returncode == 0 or out.returncode is None:
            if actions.get(0, '') == '<string>':
                return out.communicate()[0]
            else:
                return out.returncode
        else:
            return out.returncode

    except OSError as err:
        if err.strerror == 'No such file or directory':
            raise DVCSError('The ``hg`` executable file was not found.')
Пример #11
0
    def handle_page(self, pagename, ctx, templatename='page.html',
                    outfilename=None, event_arg=None):
        # type: (str, Dict, str, str, Any) -> None
        ctx['current_page_name'] = pagename
        self.add_sidebars(pagename, ctx)

        if not outfilename:
            outfilename = path.join(self.outdir,
                                    os_path(pagename) + self.out_suffix)

        # we're not taking the return value here, since no template is
        # actually rendered
        self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)

        # make context object serializable
        for key in list(ctx):
            if isinstance(ctx[key], types.FunctionType):
                del ctx[key]

        ensuredir(path.dirname(outfilename))
        self.dump_context(ctx, outfilename)

        # if there is a source file, copy the source file for the
        # "show source" link
        if ctx.get('sourcename'):
            source_name = path.join(self.outdir, '_sources',
                                    os_path(ctx['sourcename']))
            ensuredir(path.dirname(source_name))
            copyfile(self.env.doc2path(pagename), source_name)
Пример #12
0
 def init_indexing(self, changed=[]):
     ensuredir(self.db_path)
     self.database = xapian.WritableDatabase(self.db_path,
                                             xapian.DB_CREATE_OR_OPEN)
     self.indexer = xapian.TermGenerator()
     stemmer = xapian.Stem("english")
     self.indexer.set_stemmer(stemmer)
Пример #13
0
    def output_rst(self, root, source_suffix):
        for id, obj in self.objects.items():

            if not obj or not obj.top_level_object:
                continue

            rst = obj.render()
            if not rst:
                continue

            try:
                filename = id.split('(')[0]
            except IndexError:
                filename = id
            filename = filename.replace('#', '-')
            detail_dir = os.path.join(root, *filename.split('.'))
            ensuredir(detail_dir)
            path = os.path.join(detail_dir, '%s%s' % ('index', source_suffix))
            with open(path, 'wb+') as detail_file:
                detail_file.write(rst.encode('utf-8'))

        # Render Top Index
        top_level_index = os.path.join(root, 'index.rst')
        pages = self.objects.values()
        with open(top_level_index, 'w+') as top_level_file:
            content = self.jinja_env.get_template('index.rst')
            top_level_file.write(content.render(pages=pages))
Пример #14
0
 def __init__(self, db_path):
     ensuredir(db_path)
     if index.exists_in(db_path):
         self.index = index.open_dir(db_path)
     else:
         self.index = index.create_in(db_path, schema=self.schema)
     self.qparser = QueryParser('text', self.schema)
Пример #15
0
def build_latex_doc(app, status, warning, engine, docclass):
    app.config.latex_engine = engine
    app.config.latex_documents[0] = app.config.latex_documents[0][:4] + (docclass,)

    LaTeXTranslator.ignore_missing_images = True
    app.builder.build_all()

    # file from latex_additional_files
    assert (app.outdir / 'svgimg.svg').isfile()

    # now, try to run latex over it
    with cd(app.outdir):
        try:
            ensuredir(engine)
            p = Popen([engine, '--interaction=nonstopmode',
                       '-output-directory=%s' % engine, 'SphinxTests.tex'],
                      stdout=PIPE, stderr=PIPE)
        except OSError:  # most likely the latex executable was not found
            raise SkipTest
        else:
            stdout, stderr = p.communicate()
            if p.returncode != 0:
                print(stdout)
                print(stderr)
                assert False, '%s exited with return code %s' % (
                    engine, p.returncode)
Пример #16
0
    def handle_page(self, pagename, ctx, templatename='page.html',
                    outfilename=None, event_arg=None):
        ctx['current_page_name'] = pagename
        self.add_sidebars(pagename, ctx)

        if not outfilename:
            outfilename = path.join(self.outdir,
                                    os_path(pagename) + self.out_suffix)

        self.app.emit('html-page-context', pagename, templatename,
                      ctx, event_arg)

        ensuredir(path.dirname(outfilename))
        f = open(outfilename, 'wb')
        try:
            self.implementation.dump(ctx, f, 2)
        finally:
            f.close()

        # if there is a source file, copy the source file for the
        # "show source" link
        if ctx.get('sourcename'):
            source_name = path.join(self.outdir, '_sources',
                                    os_path(ctx['sourcename']))
            ensuredir(path.dirname(source_name))
            copyfile(self.env.doc2path(pagename), source_name)
Пример #17
0
def get_image_filename(self, uri):
    """
    Get paths of output file.

    Args:
        uri: The URI of the source Inkscape file

    Returns:
        A 2-tuple containing two paths.  The first is a relative URI which can
        be used in the output HTML to refer to the produced image file. The
        second is an absolute path to which the generated image should be
        rendered.
    """
    uri_dirname, uri_filename = os.path.split(uri)
    uri_basename, uri_ext = os.path.splitext(uri_filename)
    fname = "{0}.svg".format(uri_basename)

    log.info("fname = {0}".format(fname))

    if hasattr(self.builder, "imgpath"):
        # HTML
        refer_path = posixpath.join(self.builder.imgpath, fname)
        render_path = os.path.join(self.builder.outdir, "_images", fname)
    else:
        # LaTeX
        refer_path = fname
        render_path = os.path.join(self.builder.outdir, fname)

    if os.path.isfile(render_path):
        return refer_path, render_path

    ensuredir(os.path.dirname(render_path))

    return refer_path, render_path
Пример #18
0
    def render_to_file(self, file_name, template_name, context):
        """Render a template file to a file

        Ensures that target directories exist and only writes
        the file if the content has changed.

        Args:
          file_name: Target file name
          template_name: Name of template file
          context: dictionary to pass to jinja

        Returns:
          True if a file was written
        """
        content = self.render(template_name, {**self.extra_context, **context})

        # skip if exists and unchanged:
        if os.path.exists(file_name):
            with open(file_name, encoding="utf-8") as filedes:
                if filedes.read() == content:
                    return False  # unchanged

        ensuredir(op.dirname(file_name))
        with open(file_name, "w", encoding="utf-8") as filedes:
            filedes.write(content)
        return True
Пример #19
0
def copy_asset(source, destination, excluded=lambda path: False, context=None, renderer=None):
    """Copy asset files to destination recursively.

    On copying, it expands the template variables if context argument is given and
    the asset is a template file.

    :param source: The path to source file or directory
    :param destination: The path to destination directory
    :param excluded: The matcher to determine the given path should be copied or not
    :param context: The template variables.  If not given, template files are simply copied
    :param renderer: The template engine.  If not given, SphinxRenderer is used by default
    """
    if not os.path.exists(source):
        return

    ensuredir(destination)
    if os.path.isfile(source):
        copy_asset_file(source, destination, context, renderer)
        return

    for root, dirs, files in walk(source):
        reldir = relative_path(source, root)
        for dir in dirs[:]:
            if excluded(posixpath.join(reldir, dir)):
                dirs.remove(dir)
            else:
                ensuredir(posixpath.join(destination, reldir, dir))

        for filename in files:
            if not excluded(posixpath.join(reldir, filename)):
                copy_asset_file(posixpath.join(root, filename),
                                posixpath.join(destination, reldir),
                                context, renderer)
Пример #20
0
    def convert(self, filename, to, sheetname=None):
        try:
            tmpdir = mkdtemp()
            self.extract(filename, tmpdir)

            subdirname = os.path.splitext(os.path.basename(filename))[0]
            imagedir = os.path.join(tmpdir, subdirname)
            if sheetname:
                target = os.path.join(imagedir, sheetname + '.png')
            else:
                target = os.path.join(imagedir, os.listdir(imagedir)[0])  # first item in archive

            if os.path.exists(target):
                ensuredir(os.path.dirname(to))
                copyfile(target, to)
                return True
            else:
                self.warn('Fail to convert astah image: unknown sheet [%s]' % self['sheet'])
                return False
        except AstahException:
            return False
        except Exception as exc:
            self.warn('Fail to convert astah image: %s' % exc)
            return False
        finally:
            rmtree(tmpdir, ignore_errors=True)
Пример #21
0
    def write(self, build_docnames, update_docnames, method='update'):
        # TODO: only rebuild updated?
        self.prepare_writing(build_docnames)

        def build_doc(docname):
            doctree = self.env.get_doctree(docname)
            doctree.settings = self.docsettings
            destination = io.StringOutput(encoding='utf-8')
            self.docwriter.write(doctree, destination)
            self.docwriter.assemble_parts()
            return self.docwriter.parts['fragment']

        def build_context(docname):
            body = build_doc(docname)
            filename = docname + self.app.config.source_suffix
            commit = get_git_commit(filename) 
            return os.path.basename(docname), dict(
                    body=body,
                    author=commit.author,
                    time=commit.author_time,
                    sha=commit.id,
                    filename=filename,
                    docname=docname)

        self.info('building comments...')
        context = dict(build_context(name) for name in build_docnames)
        self.info('doing stuff... ')

        # TODO: get docname from config
        outfilename = os.path.join(self.outdir, 'comments.json')
        osutil.ensuredir(os.path.dirname(outfilename))
        self.dump_context(context, outfilename)
Пример #22
0
 def install_lightbox_static_files(app):
     source_static_path = os.path.join(app.builder.srcdir, '_static')
     target_static_path = os.path.join(app.builder.outdir, '_static')
     source_lightbox_path = os.path.join(source_static_path, 'lightbox2')
     target_lightbox_path = os.path.join(target_static_path, 'lightbox2')
     relative_file_paths = []
     for root, _, file_names in os.walk(source_lightbox_path):
         for file_name in file_names:
             absolute_file_path = os.path.join(root, file_name)
             relative_file_path = os.path.relpath(
                 absolute_file_path,
                 source_static_path,
                 )
             relative_file_paths.append(relative_file_path)
     if os.path.exists(target_lightbox_path):
         shutil.rmtree(target_lightbox_path)
     for relative_file_path in app.builder.status_iterator(
         relative_file_paths,
         'installing lightbox files... ',
         brown,
         len(relative_file_paths),
         ):
         source_path = os.path.join(source_static_path, relative_file_path)
         target_path = os.path.join(target_static_path, relative_file_path)
         target_directory = os.path.dirname(target_path)
         if not os.path.exists(target_directory):
             ensuredir(target_directory)
         copyfile(source_path, target_path)
         if relative_file_path.endswith('.js'):
             app.add_javascript(relative_file_path)
         elif relative_file_path.endswith('.css'):
             app.add_stylesheet(relative_file_path)
Пример #23
0
    def output_rst(self, root, source_suffix):
        if not self.objects:
            raise ExtensionError("No API objects exist. Can't continue")
        for id, obj in self.objects.items():

            if not obj or not obj.top_level_object:
                continue

            rst = obj.render()
            if not rst:
                continue

            detail_dir = os.path.join(root, obj.pathname)
            ensuredir(detail_dir)
            path = os.path.join(detail_dir, "%s%s" % ("index", source_suffix))
            with open(path, "wb") as detail_file:
                detail_file.write(rst.encode("utf-8"))

        # Render Top Index
        top_level_index = os.path.join(root, "index.rst")
        with open(top_level_index, "wb") as top_level_file:
            content = self.jinja_env.get_template("index.rst")
            top_level_file.write(
                content.render(pages=self.namespaces.values()).encode("utf-8")
            )
Пример #24
0
    def visit(self, docname, image_node):
        rel_imagedir, abs_imagedir = get_imagedir(self.app, docname)
        basename = self.get_filename_for(image_node)
        if URI_PATTERN.match(image_node['uri']):
            srcpath = image_node['uri']
        else:
            srcpath = os.path.join(self.app.srcdir, image_node['uri'])
        abs_imgpath = os.path.join(abs_imagedir, basename)

        last_modified = self.get_last_modified_for(image_node)
        if last_modified is None:
            ret = False
        elif not os.path.exists(abs_imgpath) or os.stat(abs_imgpath).st_mtime < last_modified:
            ensuredir(os.path.dirname(abs_imgpath))
            ret = self.convert(image_node,
                               os.path.normpath(srcpath),
                               os.path.normpath(abs_imgpath))
        else:
            ret = True

        if ret:
            if last_modified is not None and os.path.exists(abs_imgpath):
                os.utime(abs_imgpath, (last_modified, last_modified))

            rel_imgpath = posixpath.join(rel_imagedir, basename)
            newnode = nodes.image(**image_node.attributes)
            newnode['candidates'] = {'*': rel_imgpath}
            newnode['uri'] = rel_imgpath
            image_node.replace_self(newnode)
        else:
            image_node.parent.remove(image_node)
Пример #25
0
 def copy_image_files_pil(self):
     """Copy images using the PIL.
     The method tries to read and write the files with the PIL,
     converting the format and resizing the image if necessary/possible.
     """
     ensuredir(path.join(self.outdir, self.imagedir))
     for src in self.app.status_iterator(self.images, "copying images... ", brown, len(self.images)):
         dest = self.images[src]
         try:
             img = Image.open(path.join(self.srcdir, src))
         except IOError:
             if not self.is_vector_graphics(src):
                 self.warn("cannot read image file %r: copying it instead" % (path.join(self.srcdir, src),))
             try:
                 copyfile(path.join(self.srcdir, src), path.join(self.outdir, self.imagedir, dest))
             except (IOError, OSError) as err:
                 self.warn("cannot copy image file %r: %s" % (path.join(self.srcdir, src), err))
             continue
         if self.config.epub_fix_images:
             if img.mode in ("P",):
                 # See PIL documentation for Image.convert()
                 img = img.convert()
         if self.config.epub_max_image_width > 0:
             (width, height) = img.size
             nw = self.config.epub_max_image_width
             if width > nw:
                 nh = (height * nw) / width
                 img = img.resize((nw, nh), Image.BICUBIC)
         try:
             img.save(path.join(self.outdir, self.imagedir, dest))
         except (IOError, OSError) as err:
             self.warn("cannot write image file %r: %s" % (path.join(self.srcdir, src), err))
Пример #26
0
def install_backend_static_files(app, env):
    STATICS_DIR_PATH = os.path.join(app.builder.outdir, STATICS_DIR_NAME)
    dest_path = os.path.join(STATICS_DIR_PATH, 'sphinxcontrib-images',
                             app.sphinxcontrib_images_backend.__class__.__name__)
    files_to_copy = app.sphinxcontrib_images_backend.STATIC_FILES

    for source_file_path in app.builder.status_iterator(
        files_to_copy,
        'Copying static files for sphinxcontrib-images...',
        brown, len(files_to_copy)):

        dest_file_path = os.path.join(dest_path, source_file_path)

        if not os.path.exists(os.path.dirname(dest_file_path)):
            ensuredir(os.path.dirname(dest_file_path))

        source_file_path = os.path.join(os.path.dirname(
            sys.modules[app.sphinxcontrib_images_backend.__class__.__module__].__file__),
            source_file_path)

        copyfile(source_file_path, dest_file_path)

        if dest_file_path.endswith('.js'):
            app.add_javascript(os.path.relpath(dest_file_path, STATICS_DIR_PATH))
        elif dest_file_path.endswith('.css'):
            app.add_stylesheet(os.path.relpath(dest_file_path, STATICS_DIR_PATH))
Пример #27
0
    def write_doc(self, docname, doctree):
        self.current_docname = docname
        destination = StringOutput(encoding='utf-8')
        self.writer.write(doctree, destination)
        outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
        ensuredir(path.dirname(outfilename))
        try:
            f = codecs.open(outfilename, 'w', 'utf-8')
            try:
		f.write("#rst2hooktail_source" + (linesep*2))
		for link in self.writer.links:
		  f.write(".. _%s: %s%s" % (link.children[0].astext(), link['refuri'], linesep))
                f.write(linesep)

                f.write(self.writer.output)
		f.write("@@author:%s@@%s" % (self.config.copyright[6:], linesep))
		f.write("@@accept:%s@@%s" % (ustrftime("%Y-%m-%d"), linesep))
		relations = self.env.collect_relations().get(docname)
		if relations and relations[0] and relations[0] != "index":
		  f.write("@@category:%s@@%s" % (self.categories[relations[0]], linesep))
		f.write("@@id:%s@@%s" % (docname.split('/')[-1], linesep))
            finally:
                f.close()
        except (IOError, OSError) as err:
            self.warn("error writing file %s: %s" % (outfilename, err))
Пример #28
0
 def write_doc(self, docname, doctree):
     # work around multiple string % tuple issues in docutils;
     # replace tuples in attribute values with lists
     doctree = doctree.deepcopy()
     for node in doctree.traverse(nodes.Element):
         for att, value in list(node.attributes.items()):
             if isinstance(value, tuple):
                 node.attributes[att] = list(value)
             value = node.attributes[att]
             if isinstance(value, list):
                 for i, val in enumerate(value):
                     if isinstance(val, tuple):
                         value[i] = list(val)
     destination = StringOutput(encoding='utf-8')
     self.writer.write(doctree, destination)
     outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
     ensuredir(path.dirname(outfilename))
     try:
         f = codecs.open(outfilename, 'w', 'utf-8')
         try:
             f.write(self.writer.output)
         finally:
             f.close()
     except (IOError, OSError) as err:
         self.warn("error writing file %s: %s" % (outfilename, err))
Пример #29
0
    def handle_page(self, pagename, addctx, templatename='page.html',
                    outfilename=None, event_arg=None):
        ctx = self.globalcontext.copy()
        # current_page_name is backwards compatibility
        ctx['pagename'] = ctx['current_page_name'] = pagename
        default_baseuri = self.get_target_uri(pagename)
        # in the singlehtml builder, default_baseuri still contains an #anchor
        # part, which relative_uri doesn't really like...
        default_baseuri = default_baseuri.rsplit('#', 1)[0]

        def pathto(otheruri, resource=False, baseuri=default_baseuri):
            if resource and '://' in otheruri:
                # allow non-local resources given by scheme
                return otheruri
            elif not resource:
                otheruri = self.get_target_uri(otheruri)
            uri = relative_uri(baseuri, otheruri) or '#'
            return uri
        ctx['pathto'] = pathto
        ctx['hasdoc'] = lambda name: name in self.env.all_docs
        if self.name != 'htmlhelp':
            ctx['encoding'] = encoding = self.config.html_output_encoding
        else:
            ctx['encoding'] = encoding = self.encoding
        ctx['toctree'] = lambda **kw: self._get_local_toctree(pagename, **kw)
        self.add_sidebars(pagename, ctx)
        ctx.update(addctx)

        newtmpl = self.app.emit_firstresult('html-page-context', pagename,
                                            templatename, ctx, event_arg)
        if newtmpl:
            templatename = newtmpl

        try:
            output = self.templates.render(templatename, ctx)
        except UnicodeError:
            self.warn("a Unicode error occurred when rendering the page %s. "
                      "Please make sure all config values that contain "
                      "non-ASCII content are Unicode strings." % pagename)
            return

        if not outfilename:
            outfilename = self.get_outfilename(pagename)
        # outfilename's path is in general different from self.outdir
        ensuredir(path.dirname(outfilename))
        try:
            f = codecs.open(outfilename, 'w', encoding, 'xmlcharrefreplace')
            try:
                f.write(output)
            finally:
                f.close()
        except (IOError, OSError) as err:
            self.warn("error writing file %s: %s" % (outfilename, err))
        if self.copysource and ctx.get('sourcename'):
            # copy the source file for the "show source" link
            source_name = path.join(self.outdir, '_sources',
                                    os_path(ctx['sourcename']))
            ensuredir(path.dirname(source_name))
            copyfile(self.env.doc2path(pagename), source_name)
Пример #30
0
def render_dot(self, code, options, format, prefix='graphviz'):
    # type: (nodes.NodeVisitor, unicode, Dict, unicode, unicode) -> Tuple[unicode, unicode]
    """Render graphviz code into a PNG or PDF output file."""
    graphviz_dot = options.get('graphviz_dot', self.builder.config.graphviz_dot)
    hashkey = (code + str(options) + str(graphviz_dot) +
               str(self.builder.config.graphviz_dot_args)).encode('utf-8')

    fname = '%s-%s.%s' % (prefix, sha1(hashkey).hexdigest(), format)
    relfn = posixpath.join(self.builder.imgpath, fname)
    outfn = path.join(self.builder.outdir, self.builder.imagedir, fname)

    if path.isfile(outfn):
        return relfn, outfn

    if (hasattr(self.builder, '_graphviz_warned_dot') and
       self.builder._graphviz_warned_dot.get(graphviz_dot)):
        return None, None

    ensuredir(path.dirname(outfn))

    # graphviz expects UTF-8 by default
    if isinstance(code, text_type):
        code = code.encode('utf-8')

    dot_args = [graphviz_dot]
    dot_args.extend(self.builder.config.graphviz_dot_args)
    dot_args.extend(['-T' + format, '-o' + outfn])
    if format == 'png':
        dot_args.extend(['-Tcmapx', '-o%s.map' % outfn])
    try:
        p = Popen(dot_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
    except OSError as err:
        if err.errno != ENOENT:   # No such file or directory
            raise
        logger.warning(__('dot command %r cannot be run (needed for graphviz '
                          'output), check the graphviz_dot setting'), graphviz_dot)
        if not hasattr(self.builder, '_graphviz_warned_dot'):
            self.builder._graphviz_warned_dot = {}
        self.builder._graphviz_warned_dot[graphviz_dot] = True
        return None, None
    try:
        # Graphviz may close standard input when an error occurs,
        # resulting in a broken pipe on communicate()
        stdout, stderr = p.communicate(code)
    except (OSError, IOError) as err:
        if err.errno not in (EPIPE, EINVAL):
            raise
        # in this case, read the standard output and standard error streams
        # directly, to get the error message(s)
        stdout, stderr = p.stdout.read(), p.stderr.read()
        p.wait()
    if p.returncode != 0:
        raise GraphvizError(__('dot exited with error:\n[stderr]\n%s\n'
                               '[stdout]\n%s') % (stderr, stdout))
    if not path.isfile(outfn):
        raise GraphvizError(__('dot did not produce an output file:\n[stderr]\n%s\n'
                               '[stdout]\n%s') % (stderr, stdout))
    return relfn, outfn
Пример #31
0
def configure_backend(app):
    global DEFAULT_CONFIG

    config = copy.deepcopy(DEFAULT_CONFIG)
    config.update(app.config.images_config)
    app.config.images_config = config

    ensuredir(os.path.join(app.env.srcdir, config['cache_path']))

    # html builder
    # self.relfn2path(imguri, docname)

    backend_name_or_callable = config['backend']
    if isinstance(backend_name_or_callable, str):
        try:
            backend = list(
                pkg_resources.iter_entry_points(
                    group='sphinxcontrib.images.backend',
                    name=backend_name_or_callable))[0]
            backend = backend.load()
        except IndexError:
            raise IndexError(
                "Cannot find sphinxcontrib-images backend "
                "with name `{}`.".format(backend_name_or_callable))
    elif callable(backend_name_or_callable):
        pass
    else:
        raise TypeError(
            "sphinxcontrib-images backend is configured "
            "improperly. It has to be a string (name of "
            "installed backend) or callable which returns "
            "backend instance but is `{}` (type:`{}`). Please read "
            "sphinxcontrib-images documentation for "
            "more informations.".format(backend_name_or_callable,
                                        type(backend_name_or_callable)))

    try:
        backend = backend(app)
    except TypeError as error:
        app.info(
            'Cannot instantiate sphinxcontrib-images backend `{}`. '
            'Please, select correct backend. Available backends: {}.'.format(
                config['backend'],
                ', '.join(ep.name for ep in pkg_resources.iter_entry_points(
                    group='sphinxcontrib.images.backend'))))
        raise SystemExit(1)

    # remember the chosen backend for processing. Env and config cannot be used
    # because sphinx try to make a pickle from it.
    app.sphinxcontrib_images_backend = backend

    app.info('Initiated sphinxcontrib-images backend: ', nonl=True)
    app.info('`{}`'.format(
        str(backend.__class__.__module__ + ':' + backend.__class__.__name__)))

    def backend_methods(node, output_type):
        def backend_method(f):
            @functools.wraps(f)
            def inner_wrapper(writer, node):
                return f(writer, node)

            return inner_wrapper

        signature = '_{}_{}'.format(node.__name__, output_type)
        return (backend_method(
            getattr(backend, 'visit' + signature,
                    getattr(backend, 'visit_' + node.__name__ + '_fallback'))),
                backend_method(
                    getattr(
                        backend, 'depart' + signature,
                        getattr(backend,
                                'depart_' + node.__name__ + '_fallback'))))

    # add new node to the stack
    # connect backend processing methods to this node
    app.add_node(
        image_node, **{
            output_type: backend_methods(image_node, output_type)
            for output_type in ('html', 'latex', 'man', 'texinfo', 'text',
                                'epub')
        })

    app.add_directive('thumbnail', ImageDirective)
    if config['override_image_directive']:
        app.add_directive('image', ImageDirective)
    app.env.remote_images = {}
Пример #32
0
def render_ditaa(self, code, options, prefix='ditaa'):
    """Render ditaa code into a PNG output file."""
    hashkey = code.encode('utf-8') + str(options).encode('utf-8') + \
              str(self.builder.config.ditaa).encode('utf-8') + \
              str(self.builder.config.ditaa_args).encode('utf-8')
    infname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "ditaa")
    outfname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "png")

    imgpath = self.builder.imgpath if hasattr(self.builder, 'imgpath') else ''
    inrelfn = posixpath.join(imgpath, infname)
    infullfn = path.join(self.builder.outdir, '_images', infname)
    outrelfn = posixpath.join(imgpath, outfname)
    outfullfn = path.join(self.builder.outdir, '_images', outfname)

    if path.isfile(outfullfn):
        return outrelfn, outfullfn

    ensuredir(path.dirname(outfullfn))

    # ditaa expects UTF-8 by default
    # In Python 3, all strings are sequences of Unicode characters.
    if sys.version_info < (3, 0):
        if isinstance(code, unicode):
            code = code.encode('utf-8')
    else:
        if isinstance(code, str):
            code = code.encode('utf-8')

    ditaa_args = [self.builder.config.ditaa]
    ditaa_args.extend(self.builder.config.ditaa_args)
    ditaa_args.extend(options)
    ditaa_args.extend([infullfn])
    ditaa_args.extend([outfullfn])

    f = open(infullfn, 'wb')
    f.write(code)
    f.close()

    try:
        p = Popen(ditaa_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
    except OSError as err:
        if err.errno != ENOENT:  # No such file or directory
            raise
        self.builder.warn('ditaa command %r cannot be run (needed for ditaa '
                          'output), check the ditaa setting' %
                          self.builder.config.ditaa)
        self.builder._ditaa_warned_dot = True
        return None, None
    wentWrong = False
    try:
        # Ditaa may close standard input when an error occurs,
        # resulting in a broken pipe on communicate()
        stdout, stderr = p.communicate(code)
    except OSError as err:
        if err.errno != EPIPE:
            raise
        wentWrong = True
    except IOError as err:
        if err.errno != EINVAL:
            raise
        wentWrong = True
    if wentWrong:
        # in this case, read the standard output and standard error streams
        # directly, to get the error message(s)
        stdout, stderr = p.stdout.read(), p.stderr.read()
        p.wait()
    if p.returncode != 0:
        raise DitaaError('ditaa exited with error:\n[stderr]\n%s\n'
                         '[stdout]\n%s' % (stderr, stdout))
    return outrelfn, outfullfn
Пример #33
0
def add_notebook_outputs(env, ntbk, file_path=None):
    """
    Add outputs to a NotebookNode by pulling from cache.

    Function to get the database instance. Get the cached output of the notebook
    and merge it with the original notebook. If there is no cached output,
    checks if there was error during execution, then saves the traceback to a log file.
    """
    # If we have a jupyter_cache, see if there's a cache for this notebook
    file_path = file_path or env.doc2path(env.docname)
    dest_path = Path(env.app.outdir)
    reports_dir = str(dest_path) + "/reports"
    path_cache = False

    if not is_valid_exec_file(env, env.docname):
        return ntbk

    if "cache" in env.config["jupyter_execute_notebooks"]:
        path_cache = env.path_cache

    if not path_cache:
        if "off" not in env.config["jupyter_execute_notebooks"]:
            has_outputs = _read_nb_output_cells(
                file_path, env.config["jupyter_execute_notebooks"]
            )
            if not has_outputs:
                LOGGER.info("Executing: {}".format(env.docname))
                ntbk = execute(ntbk)
            else:
                LOGGER.info(
                    "Did not execute {}. "
                    "Set jupyter_execute_notebooks to `force` to execute".format(
                        env.docname
                    )
                )
        return ntbk

    cache_base = get_cache(path_cache)
    # Use relpath here in case Sphinx is building from a non-parent folder
    r_file_path = Path(os.path.relpath(file_path, Path().resolve()))

    try:
        _, ntbk = cache_base.merge_match_into_notebook(ntbk)
    except KeyError:
        message = (
            f"Couldn't find cache key for notebook file {str(r_file_path)}. "
            "Outputs will not be inserted."
        )
        try:
            stage_record = cache_base.get_staged_record(file_path)
        except KeyError:
            stage_record = None
        if stage_record and stage_record.traceback:
            # save the traceback to a log file
            ensuredir(reports_dir)
            file_name = os.path.splitext(r_file_path.name)[0]
            full_path = reports_dir + "/{}.log".format(file_name)
            with open(full_path, "w") as log_file:
                log_file.write(stage_record.traceback)
            message += "\n  Last execution failed with traceback saved in {}".format(
                full_path
            )

        LOGGER.error(message)

        # This is a 'fix' for jupyter_sphinx, which requires this value for dumping the
        # script file, to stop it from raising an exception if not found:
        # Normally it would be added from the executed notebook but,
        # since we are already logging an error, we don't want to block the whole build.
        # So here we just add a dummy .txt extension
        if "language_info" not in ntbk.metadata:
            ntbk.metadata["language_info"] = nbf.from_dict({"file_extension": ".txt"})
    else:
        LOGGER.verbose("Merged cached outputs into %s", str(r_file_path))

    return ntbk
Пример #34
0
def generate_autosummary_docs(sources: List[str],
                              output_dir: str = None,
                              suffix: str = '.rst',
                              warn: Callable = None,
                              info: Callable = None,
                              base_path: str = None,
                              builder: Builder = None,
                              template_dir: str = None,
                              imported_members: bool = False,
                              app: Any = None,
                              overwrite: bool = True,
                              encoding: str = 'utf-8') -> None:
    if info:
        warnings.warn(
            'info argument for generate_autosummary_docs() is deprecated.',
            RemovedInSphinx40Warning,
            stacklevel=2)
        _info = info
    else:
        _info = logger.info

    if warn:
        warnings.warn(
            'warn argument for generate_autosummary_docs() is deprecated.',
            RemovedInSphinx40Warning,
            stacklevel=2)
        _warn = warn
    else:
        _warn = logger.warning

    if builder:
        warnings.warn(
            'builder argument for generate_autosummary_docs() is deprecated.',
            RemovedInSphinx50Warning,
            stacklevel=2)

    if template_dir:
        warnings.warn(
            'template_dir argument for generate_autosummary_docs() is deprecated.',
            RemovedInSphinx50Warning,
            stacklevel=2)

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    _info(
        __('[autosummary] generating autosummary for: %s') %
        ', '.join(showed_sources))

    if output_dir:
        _info(__('[autosummary] writing to %s') % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    template = AutosummaryRenderer(app)

    # read
    items = find_autosummary_in_files(sources)

    # keep track of new files
    new_files = []

    if app:
        filename_map = app.config.autosummary_filename_map
    else:
        filename_map = {}

    # write
    for entry in sorted(set(items), key=str):
        if entry.path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(entry.path)
        ensuredir(path)

        try:
            name, obj, parent, modname = import_by_name(entry.name)
            qualname = name.replace(modname + ".", "")
        except ImportError as e:
            try:
                # try to importl as an instance attribute
                name, obj, parent, modname = import_ivar_by_name(entry.name)
                qualname = name.replace(modname + ".", "")
            except ImportError:
                _warn(
                    __('[autosummary] failed to import %r: %s') %
                    (entry.name, e))
                continue

        context = {}
        if app:
            context.update(app.config.autosummary_context)

        content = generate_autosummary_content(name, obj, parent, template,
                                               entry.template,
                                               imported_members, app,
                                               entry.recursive, context,
                                               modname, qualname)

        filename = os.path.join(path, filename_map.get(name, name) + suffix)
        if os.path.isfile(filename):
            with open(filename, encoding=encoding) as f:
                old_content = f.read()

            if content == old_content:
                continue
            elif overwrite:  # content has changed
                with open(filename, 'w', encoding=encoding) as f:
                    f.write(content)
                new_files.append(filename)
        else:
            with open(filename, 'w', encoding=encoding) as f:
                f.write(content)
            new_files.append(filename)

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files,
                                  output_dir=output_dir,
                                  suffix=suffix,
                                  warn=warn,
                                  info=info,
                                  base_path=base_path,
                                  imported_members=imported_members,
                                  app=app,
                                  overwrite=overwrite)
Пример #35
0
            outfilename = self.get_outfilename(pagename)
        # outfilename's path is in general different from self.outdir
        ensuredir(path.dirname(outfilename))
        try:
            f = codecs.open(outfilename, 'w', encoding, 'xmlcharrefreplace')
            try:
                f.write(output)
            finally:
                f.close()
        except (IOError, OSError), err:
            self.warn("error writing file %s: %s" % (outfilename, err))
        if self.copysource and ctx.get('sourcename'):
            # copy the source file for the "show source" link
            source_name = path.join(self.outdir, '_sources',
                                    os_path(ctx['sourcename']))
            ensuredir(path.dirname(source_name))
            copyfile(self.env.doc2path(pagename), source_name)

    def handle_finish(self):
        self.dump_search_index()
        self.dump_inventory()

    def dump_inventory(self):
        self.info(bold('dumping object inventory... '), nonl=True)
        f = open(path.join(self.outdir, INVENTORY_FILENAME), 'wb')
        try:
            f.write((u'# Sphinx inventory version 2\n'
                     u'# Project: %s\n'
                     u'# Version: %s\n'
                     u'# The remainder of this file is compressed using zlib.\n'
                     % (self.config.project, self.config.version)
Пример #36
0
def main(argv=sys.argv[1:]):
    # type: (List[str]) -> int
    """Parse and check the command line arguments."""
    locale.setlocale(locale.LC_ALL, '')
    sphinx.locale.init_console(os.path.join(package_dir, 'locale'), 'sphinx')

    parser = get_parser()
    args = parser.parse_args(argv)

    rootpath = path.abspath(args.module_path)

    # normalize opts

    if args.header is None:
        args.header = rootpath.split(path.sep)[-1]
    if args.suffix.startswith('.'):
        args.suffix = args.suffix[1:]
    if not path.isdir(rootpath):
        print(__('%s is not a directory.') % rootpath, file=sys.stderr)
        sys.exit(1)
    if not args.dryrun:
        ensuredir(args.destdir)
    excludes = [path.abspath(exclude) for exclude in args.exclude_pattern]
    modules = recurse_tree(rootpath, excludes, args)

    if args.full:
        from sphinx.cmd import quickstart as qs
        modules.sort()
        prev_module = ''  # type: unicode
        text = ''
        for module in modules:
            if module.startswith(prev_module + '.'):
                continue
            prev_module = module
            text += '   %s\n' % module
        d = {
            'path':
            args.destdir,
            'sep':
            False,
            'dot':
            '_',
            'project':
            args.header,
            'author':
            args.author or 'Author',
            'version':
            args.version or '',
            'release':
            args.release or args.version or '',
            'suffix':
            '.' + args.suffix,
            'master':
            'index',
            'epub':
            True,
            'extensions':
            ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.todo'],
            'makefile':
            True,
            'batchfile':
            True,
            'make_mode':
            True,
            'mastertocmaxdepth':
            args.maxdepth,
            'mastertoctree':
            text,
            'language':
            'en',
            'module_path':
            rootpath,
            'append_syspath':
            args.append_syspath,
        }
        if args.extensions:
            d['extensions'].extend(args.extensions)

        if not args.dryrun:
            qs.generate(d, silent=True, overwrite=args.force)
    elif args.tocfile:
        create_modules_toc_file(modules, args, args.tocfile)

    return 0
Пример #37
0
    def read_doc(self, docname, app=None):
        """Parse a file and add/update inventory entries for the doctree."""

        self.temp_data['docname'] = docname
        # defaults to the global default, but can be re-set in a document
        self.temp_data['default_domain'] = \
            self.domains.get(self.config.primary_domain)

        self.settings['input_encoding'] = self.config.source_encoding
        self.settings['trim_footnote_reference_space'] = \
            self.config.trim_footnote_reference_space
        self.settings['gettext_compact'] = self.config.gettext_compact

        docutilsconf = path.join(self.srcdir, 'docutils.conf')
        # read docutils.conf from source dir, not from current dir
        OptionParser.standard_config_files[1] = docutilsconf
        if path.isfile(docutilsconf):
            self.note_dependency(docutilsconf)

        with sphinx_domains(self):
            if self.config.default_role:
                role_fn, messages = roles.role(self.config.default_role, english,
                                               0, dummy_reporter)
                if role_fn:
                    roles._roles[''] = role_fn
                else:
                    self.warn(docname, 'default role %s not found' %
                              self.config.default_role)

            codecs.register_error('sphinx', self.warn_and_replace)

            # publish manually
            reader = SphinxStandaloneReader(self.app, parsers=self.config.source_parsers)
            pub = Publisher(reader=reader,
                            writer=SphinxDummyWriter(),
                            destination_class=NullOutput)
            pub.set_components(None, 'restructuredtext', None)
            pub.process_programmatic_settings(None, self.settings, None)
            src_path = self.doc2path(docname)
            source = SphinxFileInput(app, self, source=None, source_path=src_path,
                                     encoding=self.config.source_encoding)
            pub.source = source
            pub.settings._source = src_path
            pub.set_destination(None, None)
            pub.publish()
            doctree = pub.document

        # post-processing
        self.process_dependencies(docname, doctree)
        self.process_images(docname, doctree)
        self.process_downloads(docname, doctree)
        self.process_metadata(docname, doctree)
        self.create_title_from(docname, doctree)
        for manager in itervalues(self.managers):
            manager.process_doc(docname, doctree)
        for domain in itervalues(self.domains):
            domain.process_doc(self, docname, doctree)

        # allow extension-specific post-processing
        if app:
            app.emit('doctree-read', doctree)

        # store time of reading, for outdated files detection
        # (Some filesystems have coarse timestamp resolution;
        # therefore time.time() can be older than filesystem's timestamp.
        # For example, FAT32 has 2sec timestamp resolution.)
        self.all_docs[docname] = max(
            time.time(), path.getmtime(self.doc2path(docname)))

        if self.versioning_condition:
            old_doctree = None
            if self.versioning_compare:
                # get old doctree
                try:
                    with open(self.doc2path(docname,
                                            self.doctreedir, '.doctree'), 'rb') as f:
                        old_doctree = pickle.load(f)
                except EnvironmentError:
                    pass

            # add uids for versioning
            if not self.versioning_compare or old_doctree is None:
                list(add_uids(doctree, self.versioning_condition))
            else:
                list(merge_doctrees(
                    old_doctree, doctree, self.versioning_condition))

        # make it picklable
        doctree.reporter = None
        doctree.transformer = None
        doctree.settings.warning_stream = None
        doctree.settings.env = None
        doctree.settings.record_dependencies = None

        # cleanup
        self.temp_data.clear()
        self.ref_context.clear()
        roles._roles.pop('', None)  # if a document has set a local default role

        # save the parsed doctree
        doctree_filename = self.doc2path(docname, self.doctreedir,
                                         '.doctree')
        ensuredir(path.dirname(doctree_filename))
        with open(doctree_filename, 'wb') as f:
            pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
Пример #38
0
                raise
            self.builder.warn('LaTeX command %r cannot be run (needed for rail '
                              'display), check the rail_latex setting' %
                              self.builder.config.rail_latex)
            self.builder._railpng_warned_latex = True
            return None, None
    finally:
        chdir(curdir)
    print 'second run of latex',ltx_args        

    stdout, stderr = p.communicate()
    if p.returncode != 0:
        raise RailExtError('latex exited with error:\n[stderr]\n%s\n'
                           '[stdout]\n%s' % (stderr, stdout))

    ensuredir(path.dirname(outfn))

    # Now, convert the image to a PNG file
    # use some standard dvipng arguments
    dvipng_args = [self.builder.config.rail_dvipng]
    dvipng_args += ['-o', outfn, '-T', 'tight', '-z9']
    # add custom ones from config value
    dvipng_args.extend(self.builder.config.rail_dvipng_args)
    # last, the input file name
    dvipng_args.append(path.join(tempdir, 'rail.dvi'))
    try:
        p = Popen(dvipng_args, stdout=PIPE, stderr=PIPE)
        print 'dvipng run',dvipng_args
    except OSError, err:
        if err.errno != ENOENT:   # No such file or directory
            raise
Пример #39
0
    def read_doc(self, docname, app=None):
        # type: (unicode, Sphinx) -> None
        """Parse a file and add/update inventory entries for the doctree."""

        self.temp_data['docname'] = docname
        # defaults to the global default, but can be re-set in a document
        self.temp_data['default_role'] = self.config.default_role
        self.temp_data['default_domain'] = \
            self.domains.get(self.config.primary_domain)

        self.settings['input_encoding'] = self.config.source_encoding
        self.settings['trim_footnote_reference_space'] = \
            self.config.trim_footnote_reference_space
        self.settings['gettext_compact'] = self.config.gettext_compact

        language = self.config.language or 'en'
        self.settings['language_code'] = language
        if 'smart_quotes' not in self.settings:
            self.settings['smart_quotes'] = self.config.smartquotes
            if self.config.html_use_smartypants is not None:
                warnings.warn(
                    "html_use_smartypants option is deprecated. Smart "
                    "quotes are on by default; if you want to disable "
                    "them, use the smartquotes option.",
                    RemovedInSphinx17Warning)
                self.settings[
                    'smart_quotes'] = self.config.html_use_smartypants

            # some conditions exclude smart quotes, overriding smart_quotes
            for valname, vallist in iteritems(
                    self.config.smartquotes_excludes):
                if valname == 'builders':
                    # this will work only for checking first build target
                    if self.app.builder.name in vallist:
                        self.settings['smart_quotes'] = False
                        break
                elif valname == 'languages':
                    if self.config.language in vallist:
                        self.settings['smart_quotes'] = False
                        break

        # confirm selected language supports smart_quotes or not
        for tag in normalize_language_tag(language):
            if tag in smartchars.quotes:
                break
        else:
            self.settings['smart_quotes'] = False

        docutilsconf = path.join(self.srcdir, 'docutils.conf')
        # read docutils.conf from source dir, not from current dir
        OptionParser.standard_config_files[1] = docutilsconf
        if path.isfile(docutilsconf):
            self.note_dependency(docutilsconf)

        with sphinx_domains(self):
            if self.config.default_role:
                role_fn, messages = roles.role(self.config.default_role,
                                               english, 0, dummy_reporter)
                if role_fn:
                    roles._roles[''] = role_fn
                else:
                    logger.warning('default role %s not found',
                                   self.config.default_role,
                                   location=docname)

            codecs.register_error('sphinx',
                                  self.warn_and_replace)  # type: ignore

            # publish manually
            reader = SphinxStandaloneReader(
                self.app, parsers=self.app.registry.get_source_parsers())
            pub = Publisher(reader=reader,
                            writer=SphinxDummyWriter(),
                            destination_class=NullOutput)
            pub.set_components(None, 'restructuredtext', None)
            pub.process_programmatic_settings(None, self.settings, None)
            src_path = self.doc2path(docname)
            source = SphinxFileInput(app,
                                     self,
                                     source=None,
                                     source_path=src_path,
                                     encoding=self.config.source_encoding)
            pub.source = source
            pub.settings._source = src_path
            pub.set_destination(None, None)
            pub.publish()
            doctree = pub.document

        # post-processing
        for domain in itervalues(self.domains):
            domain.process_doc(self, docname, doctree)

        # allow extension-specific post-processing
        if app:
            app.emit('doctree-read', doctree)

        # store time of reading, for outdated files detection
        # (Some filesystems have coarse timestamp resolution;
        # therefore time.time() can be older than filesystem's timestamp.
        # For example, FAT32 has 2sec timestamp resolution.)
        self.all_docs[docname] = max(time.time(),
                                     path.getmtime(self.doc2path(docname)))

        if self.versioning_condition:
            old_doctree = None
            if self.versioning_compare:
                # get old doctree
                try:
                    with open(
                            self.doc2path(docname, self.doctreedir,
                                          '.doctree'), 'rb') as f:
                        old_doctree = pickle.load(f)
                except EnvironmentError:
                    pass

            # add uids for versioning
            if not self.versioning_compare or old_doctree is None:
                list(add_uids(doctree, self.versioning_condition))
            else:
                list(
                    merge_doctrees(old_doctree, doctree,
                                   self.versioning_condition))

        # make it picklable
        doctree.reporter = None
        doctree.transformer = None
        doctree.settings.warning_stream = None
        doctree.settings.env = None
        doctree.settings.record_dependencies = None

        # cleanup
        self.temp_data.clear()
        self.ref_context.clear()
        roles._roles.pop('',
                         None)  # if a document has set a local default role

        # save the parsed doctree
        doctree_filename = self.doc2path(docname, self.doctreedir, '.doctree')
        ensuredir(path.dirname(doctree_filename))
        with open(doctree_filename, 'wb') as f:
            pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
Пример #40
0
    def build_helpbook(self):
        # type: () -> None
        contents_dir = path.join(self.bundle_path, 'Contents')
        resources_dir = path.join(contents_dir, 'Resources')
        language_dir = path.join(resources_dir,
                                 self.config.applehelp_locale + '.lproj')

        for d in [contents_dir, resources_dir, language_dir]:
            ensuredir(d)

        # Construct the Info.plist file
        toc = self.config.master_doc + self.out_suffix

        info_plist = {
            'CFBundleDevelopmentRegion': self.config.applehelp_dev_region,
            'CFBundleIdentifier': self.config.applehelp_bundle_id,
            'CFBundleInfoDictionaryVersion': '6.0',
            'CFBundlePackageType': 'BNDL',
            'CFBundleShortVersionString': self.config.release,
            'CFBundleSignature': 'hbwr',
            'CFBundleVersion': self.config.applehelp_bundle_version,
            'HPDBookAccessPath': '_access.html',
            'HPDBookIndexPath': 'search.helpindex',
            'HPDBookTitle': self.config.applehelp_title,
            'HPDBookType': '3',
            'HPDBookUsesExternalViewer': False,
        }

        if self.config.applehelp_icon is not None:
            info_plist['HPDBookIconPath'] \
                = path.basename(self.config.applehelp_icon)

        if self.config.applehelp_kb_url is not None:
            info_plist['HPDBookKBProduct'] = self.config.applehelp_kb_product
            info_plist['HPDBookKBURL'] = self.config.applehelp_kb_url

        if self.config.applehelp_remote_url is not None:
            info_plist['HPDBookRemoteURL'] = self.config.applehelp_remote_url

        logger.info(bold('writing Info.plist... '), nonl=True)
        with open(path.join(contents_dir, 'Info.plist'), 'wb') as f:
            write_plist(info_plist, f)
        logger.info('done')

        # Copy the icon, if one is supplied
        if self.config.applehelp_icon:
            logger.info(bold('copying icon... '), nonl=True)

            try:
                copyfile(path.join(self.srcdir, self.config.applehelp_icon),
                         path.join(resources_dir, info_plist['HPDBookIconPath']))

                logger.info('done')
            except Exception as err:
                logger.warning('cannot copy icon file %r: %s',
                               path.join(self.srcdir, self.config.applehelp_icon), err)
                del info_plist['HPDBookIconPath']

        # Build the access page
        logger.info(bold('building access page...'), nonl=True)
        with codecs.open(path.join(language_dir, '_access.html'), 'w') as f:  # type: ignore
            f.write(access_page_template % {
                'toc': htmlescape(toc, quote=True),
                'title': htmlescape(self.config.applehelp_title)
            })
        logger.info('done')

        # Generate the help index
        logger.info(bold('generating help index... '), nonl=True)

        args = [
            self.config.applehelp_indexer_path,
            '-Cf',
            path.join(language_dir, 'search.helpindex'),
            language_dir
        ]

        if self.config.applehelp_index_anchors is not None:
            args.append('-a')

        if self.config.applehelp_min_term_length is not None:
            args += ['-m', '%s' % self.config.applehelp_min_term_length]

        if self.config.applehelp_stopwords is not None:
            args += ['-s', self.config.applehelp_stopwords]

        if self.config.applehelp_locale is not None:
            args += ['-l', self.config.applehelp_locale]

        if self.config.applehelp_disable_external_tools:
            logger.info('skipping')

            logger.warning('you will need to index this help book with:\n  %s',
                           ' '.join([pipes.quote(arg) for arg in args]))
        else:
            try:
                p = subprocess.Popen(args,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.STDOUT)

                output = p.communicate()[0]

                if p.returncode != 0:
                    raise AppleHelpIndexerFailed(output)
                else:
                    logger.info('done')
            except OSError:
                raise AppleHelpIndexerFailed('Command not found: %s' % args[0])

        # If we've been asked to, sign the bundle
        if self.config.applehelp_codesign_identity:
            logger.info(bold('signing help book... '), nonl=True)

            args = [
                self.config.applehelp_codesign_path,
                '-s', self.config.applehelp_codesign_identity,
                '-f'
            ]

            args += self.config.applehelp_codesign_flags

            args.append(self.bundle_path)

            if self.config.applehelp_disable_external_tools:
                logger.info('skipping')
                logger.warning('you will need to sign this help book with:\n  %s',
                               ' '.join([pipes.quote(arg) for arg in args]))
            else:
                try:
                    p = subprocess.Popen(args,
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.STDOUT)

                    output = p.communicate()[0]

                    if p.returncode != 0:
                        raise AppleHelpCodeSigningFailed(output)
                    else:
                        logger.info('done')
                except OSError:
                    raise AppleHelpCodeSigningFailed('Command not found: %s' % args[0])
Пример #41
0
def generate_autosummary_docs(sources,
                              output_dir=None,
                              suffix='.rst',
                              warn=_simple_warn,
                              info=_simple_info,
                              base_path=None,
                              builder=None,
                              template_dir=None,
                              imported_members=False,
                              app=None):
    # type: (List[unicode], unicode, unicode, Callable, Callable, unicode, Builder, unicode, bool, Any) -> None  # NOQA

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    info('[autosummary] generating autosummary for: %s' %
         ', '.join(showed_sources))

    if output_dir:
        info('[autosummary] writing to %s' % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    # create our own templating environment
    template_dirs = None  # type: List[unicode]
    template_dirs = [
        os.path.join(package_dir, 'ext', 'autosummary', 'templates')
    ]

    template_loader = None  # type: BaseLoader
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)  # type: ignore
    template_env = SandboxedEnvironment(loader=template_loader)
    template_env.filters['underline'] = _underline

    # replace the builtin html filters
    template_env.filters['escape'] = rst_escape
    template_env.filters['e'] = rst_escape

    # read
    items = find_autosummary_in_files(sources)

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name in sorted(set(items), key=str):
        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent, mod_name = import_by_name(name)
        except ImportError as e:
            warn('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        with open(fn, 'w') as f:
            doc = get_documenter(app, obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                try:
                    template = template_env.get_template('autosummary/%s.rst' %
                                                         doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template(
                        'autosummary/base.rst')

            def get_members(obj, typ, include_public=[], imported=False):
                # type: (Any, unicode, List[unicode], bool) -> Tuple[List[unicode], List[unicode]]  # NOQA
                items = []  # type: List[unicode]
                for name in dir(obj):
                    try:
                        value = safe_getattr(obj, name)
                    except AttributeError:
                        continue
                    documenter = get_documenter(app, value, obj)
                    if documenter.objtype == typ:
                        if typ == 'method':
                            items.append(name)
                        elif imported or getattr(value, '__module__',
                                                 None) == obj.__name__:
                            # skip imported members if expected
                            items.append(name)
                public = [
                    x for x in items
                    if x in include_public or not x.startswith('_')
                ]
                return public, items

            ns = {}  # type: Dict[unicode, Any]

            if doc.objtype == 'module':
                ns['members'] = dir(obj)
                ns['functions'], ns['all_functions'] = \
                    get_members(obj, 'function', imported=imported_members)
                ns['classes'], ns['all_classes'] = \
                    get_members(obj, 'class', imported=imported_members)
                ns['exceptions'], ns['all_exceptions'] = \
                    get_members(obj, 'exception', imported=imported_members)
            elif doc.objtype == 'class':
                ns['members'] = dir(obj)
                ns['methods'], ns['all_methods'] = \
                    get_members(obj, 'method', ['__init__'], imported=imported_members)
                ns['attributes'], ns['all_attributes'] = \
                    get_members(obj, 'attribute', imported=imported_members)

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(name) * '='

            rendered = template.render(**ns)
            f.write(rendered)  # type: ignore

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files,
                                  output_dir=output_dir,
                                  suffix=suffix,
                                  warn=warn,
                                  info=info,
                                  base_path=base_path,
                                  builder=builder,
                                  template_dir=template_dir)
Пример #42
0
    def __init__(self,
                 srcdir,
                 confdir,
                 outdir,
                 doctreedir,
                 buildername,
                 confoverrides=None,
                 status=sys.stdout,
                 warning=sys.stderr,
                 freshenv=False,
                 warningiserror=False,
                 tags=None,
                 verbosity=0,
                 parallel=0,
                 keep_going=False):
        # type: (str, str, str, str, str, Dict, IO, IO, bool, bool, List[str], int, int, bool) -> None  # NOQA
        self.phase = BuildPhase.INITIALIZATION
        self.verbosity = verbosity
        self.extensions = {}  # type: Dict[str, Extension]
        self.builder = None  # type: Builder
        self.env = None  # type: BuildEnvironment
        self.project = None  # type: Project
        self.registry = SphinxComponentRegistry()
        self.html_themes = {}  # type: Dict[str, str]

        # validate provided directories
        self.srcdir = abspath(srcdir)
        self.outdir = abspath(outdir)
        self.doctreedir = abspath(doctreedir)
        self.confdir = confdir
        if self.confdir:  # confdir is optional
            self.confdir = abspath(self.confdir)
            if not path.isfile(path.join(self.confdir, 'conf.py')):
                raise ApplicationError(
                    __("config directory doesn't contain a "
                       "conf.py file (%s)") % confdir)

        if not path.isdir(self.srcdir):
            raise ApplicationError(
                __('Cannot find source directory (%s)') % self.srcdir)

        if self.srcdir == self.outdir:
            raise ApplicationError(
                __('Source directory and destination '
                   'directory cannot be identical'))

        self.parallel = parallel

        if status is None:
            self._status = StringIO()  # type: IO
            self.quiet = True
        else:
            self._status = status
            self.quiet = False

        if warning is None:
            self._warning = StringIO()  # type: IO
        else:
            self._warning = warning
        self._warncount = 0
        self.keep_going = warningiserror and keep_going
        if self.keep_going:
            self.warningiserror = False
        else:
            self.warningiserror = warningiserror
        logging.setup(self, self._status, self._warning)

        self.events = EventManager(self)

        # keep last few messages for traceback
        # This will be filled by sphinx.util.logging.LastMessagesWriter
        self.messagelog = deque(maxlen=10)  # type: deque

        # say hello to the world
        logger.info(bold(
            __('Running Sphinx v%s') % sphinx.__display_version__))

        # status code for command-line application
        self.statuscode = 0

        # read config
        self.tags = Tags(tags)
        if self.confdir is None:
            self.config = Config({}, confoverrides or {})
        else:
            self.config = Config.read(self.confdir, confoverrides or {},
                                      self.tags)

        # initialize some limited config variables before initialize i18n and loading
        # extensions
        self.config.pre_init_values()

        # set up translation infrastructure
        self._init_i18n()

        # check the Sphinx version if requested
        if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
            raise VersionRequirementError(
                __('This project needs at least Sphinx v%s and therefore cannot '
                   'be built with this version.') % self.config.needs_sphinx)

        # set confdir to srcdir if -C given (!= no confdir); a few pieces
        # of code expect a confdir to be set
        if self.confdir is None:
            self.confdir = self.srcdir

        # load all built-in extension modules
        for extension in builtin_extensions:
            self.setup_extension(extension)

        # load all user-given extension modules
        for extension in self.config.extensions:
            self.setup_extension(extension)

        # preload builder module (before init config values)
        self.preload_builder(buildername)

        if not path.isdir(outdir):
            with progress_message(__('making output directory')):
                ensuredir(outdir)

        # the config file itself can be an extension
        if self.config.setup:
            prefix = __('while setting up extension %s:') % "conf.py"
            with prefixed_warnings(prefix):
                if callable(self.config.setup):
                    self.config.setup(self)
                else:
                    raise ConfigError(
                        __("'setup' as currently defined in conf.py isn't a Python callable. "
                           "Please modify its definition to make it a callable function. "
                           "This is needed for conf.py to behave as a Sphinx extension."
                           ))

        # now that we know all config values, collect them from conf.py
        self.config.init_values()
        self.events.emit('config-inited', self.config)

        # create the project
        self.project = Project(self.srcdir, self.config.source_suffix)
        # create the builder
        self.builder = self.create_builder(buildername)
        # set up the build environment
        self._init_env(freshenv)
        # set up the builder
        self._init_builder()
Пример #43
0
def generate_autosummary_docs(sources,
                              output_dir=None,
                              suffix='.rst',
                              warn=None,
                              info=None,
                              base_path=None,
                              builder=None,
                              template_dir=None,
                              imported_members=False,
                              app=None):
    # type: (List[str], str, str, Callable, Callable, str, Builder, str, bool, Any) -> None
    if info:
        warnings.warn(
            'info argument for generate_autosummary_docs() is deprecated.',
            RemovedInSphinx40Warning)
        _info = info
    else:
        _info = logger.info

    if warn:
        warnings.warn(
            'warn argument for generate_autosummary_docs() is deprecated.',
            RemovedInSphinx40Warning)
        _warn = warn
    else:
        _warn = logger.warning

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    _info(
        __('[autosummary] generating autosummary for: %s') %
        ', '.join(showed_sources))

    if output_dir:
        _info(__('[autosummary] writing to %s') % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    template = AutosummaryRenderer(builder, template_dir)

    # read
    items = find_autosummary_in_files(sources)

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name in sorted(set(items), key=str):
        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent, mod_name = import_by_name(name)
        except ImportError as e:
            _warn('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        with open(fn, 'w') as f:
            rendered = generate_autosummary_content(name, obj, parent,
                                                    template, template_name,
                                                    imported_members, app)
            f.write(rendered)

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files,
                                  output_dir=output_dir,
                                  suffix=suffix,
                                  warn=warn,
                                  info=info,
                                  base_path=base_path,
                                  builder=builder,
                                  template_dir=template_dir,
                                  app=app)
Пример #44
0
def builder_inited(app):
    """
    autoapi Sphinx extension hook for the ``builder-inited`` event.

    This hook will read the configuration value ``autoapi_modules`` and render
    the modules described in it.

    See http://sphinx-doc.org/extdev/appapi.html#event-builder-inited
    """
    # Get modules to build documentation for
    modules = app.config.autoapi_modules
    if not modules:
        return

    # Get template environment
    template_env = get_template_env(app)

    for module, overrides in modules.items():

        # Get options
        options = {
            'prune': False,
            'override': True,
            'template': 'module',
            'output': module
        }
        if overrides:
            options.update(overrides)

        # Get template
        template = template_env.get_template('autoapi/{}.rst'.format(
            options['template']))

        # Build API tree
        tree = APINode(module)

        # Gather nodes to document
        if options['prune']:
            nodes = [
                node for node in tree.directory.values() if node.is_relevant()
            ]
        else:
            nodes = tree.directory.values()

        if not nodes:
            continue

        # Define output directory
        out_dir = join(app.env.srcdir, options['output'])
        ensuredir(out_dir)

        # Iterate nodes and render them
        for node in nodes:
            source_suffix = next(iter(app.config.source_suffix))
            out_file = join(out_dir, node.name + source_suffix)

            # Skip file if it override is off and it exists
            if not options['override'] and exists(out_file):
                continue

            # Consider only subnodes that are relevant if prune is enabled
            subnodes = node.subnodes
            if options['prune']:
                subnodes = [
                    subnode for subnode in node.subnodes
                    if subnode.is_relevant()
                ]

            # Write file
            with open(out_file, 'w') as fd:
                fd.write(template.render(node=node, subnodes=subnodes))
Пример #45
0
def generate(d: Dict,
             overwrite: bool = True,
             silent: bool = False,
             templatedir: str = None) -> None:
    """Generate project based on values in *d*."""
    template = QuickstartRenderer(templatedir=templatedir)

    if 'mastertoctree' not in d:
        d['mastertoctree'] = ''
    if 'mastertocmaxdepth' not in d:
        d['mastertocmaxdepth'] = 2

    d['now'] = time.asctime()
    d['project_underline'] = column_width(d['project']) * '='
    d.setdefault('extensions', [])
    d['copyright'] = time.strftime('%Y') + ', ' + d['author']

    d["path"] = os.path.abspath(d['path'])
    ensuredir(d['path'])

    srcdir = path.join(d['path'], 'source') if d['sep'] else d['path']

    ensuredir(srcdir)
    if d['sep']:
        builddir = path.join(d['path'], 'build')
        d['exclude_patterns'] = ''
    else:
        builddir = path.join(srcdir, d['dot'] + 'build')
        exclude_patterns = map(repr, [
            d['dot'] + 'build',
            'Thumbs.db',
            '.DS_Store',
        ])
        d['exclude_patterns'] = ', '.join(exclude_patterns)
    ensuredir(builddir)
    ensuredir(path.join(srcdir, d['dot'] + 'templates'))
    ensuredir(path.join(srcdir, d['dot'] + 'static'))

    def write_file(fpath: str, content: str, newline: str = None) -> None:
        if overwrite or not path.isfile(fpath):
            if 'quiet' not in d:
                print(__('Creating file %s.') % fpath)
            with open(fpath, 'wt', encoding='utf-8', newline=newline) as f:
                f.write(content)
        else:
            if 'quiet' not in d:
                print(__('File %s already exists, skipping.') % fpath)

    conf_path = os.path.join(templatedir, 'conf.py_t') if templatedir else None
    if not conf_path or not path.isfile(conf_path):
        conf_path = os.path.join(package_dir, 'templates', 'quickstart',
                                 'conf.py_t')
    with open(conf_path) as f:
        conf_text = f.read()

    write_file(path.join(srcdir, 'conf.py'),
               template.render_string(conf_text, d))

    masterfile = path.join(srcdir, d['master'] + d['suffix'])
    write_file(masterfile, template.render('quickstart/master_doc.rst_t', d))

    if d.get('make_mode') is True:
        makefile_template = 'quickstart/Makefile.new_t'
        batchfile_template = 'quickstart/make.bat.new_t'
    else:
        makefile_template = 'quickstart/Makefile_t'
        batchfile_template = 'quickstart/make.bat_t'

    if d['makefile'] is True:
        d['rsrcdir'] = 'source' if d['sep'] else '.'
        d['rbuilddir'] = 'build' if d['sep'] else d['dot'] + 'build'
        # use binary mode, to avoid writing \r\n on Windows
        write_file(path.join(d['path'], 'Makefile'),
                   template.render(makefile_template, d), '\n')

    if d['batchfile'] is True:
        d['rsrcdir'] = 'source' if d['sep'] else '.'
        d['rbuilddir'] = 'build' if d['sep'] else d['dot'] + 'build'
        write_file(path.join(d['path'], 'make.bat'),
                   template.render(batchfile_template, d), '\r\n')

    if silent:
        return
    print()
    print(
        bold(__('Finished: An initial directory structure has been created.')))
    print()
    print(__(
        'You should now populate your master file %s and create other documentation\n'
        'source files. ') % masterfile,
          end='')
    if d['makefile'] or d['batchfile']:
        print(
            __('Use the Makefile to build the docs, like so:\n'
               '   make builder'))
    else:
        print(
            __('Use the sphinx-build command to build the docs, like so:\n'
               '   sphinx-build -b builder %s %s') % (srcdir, builddir))
    print(
        __('where "builder" is one of the supported builders, '
           'e.g. html, latex or linkcheck.'))
    print()
Пример #46
0
    def write(self, *ignored):
        # type: (Any) -> None
        version = self.config.version
        domain = cast(ChangeSetDomain, self.env.get_domain('changeset'))
        libchanges = {}  # type: Dict[str, List[Tuple[str, str, int]]]
        apichanges = []  # type: List[Tuple[str, str, int]]
        otherchanges = {
        }  # type: Dict[Tuple[str, str], List[Tuple[str, str, int]]]

        changesets = domain.get_changesets_for(version)
        if not changesets:
            logger.info(bold(__('no changes in version %s.') % version))
            return
        logger.info(bold(__('writing summary file...')))
        for changeset in changesets:
            if isinstance(changeset.descname, tuple):
                descname = changeset.descname[0]
            else:
                descname = changeset.descname
            ttext = self.typemap[changeset.type]
            context = changeset.content.replace('\n', ' ')
            if descname and changeset.docname.startswith('c-api'):
                if context:
                    entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext,
                                                          context)
                else:
                    entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
                apichanges.append((entry, changeset.docname, changeset.lineno))
            elif descname or changeset.module:
                if not changeset.module:
                    module = _('Builtins')
                if not descname:
                    descname = _('Module level')
                if context:
                    entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext,
                                                          context)
                else:
                    entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
                libchanges.setdefault(module, []).append(
                    (entry, changeset.docname, changeset.lineno))
            else:
                if not context:
                    continue
                entry = '<i>%s:</i> %s' % (ttext.capitalize(), context)
                title = self.env.titles[changeset.docname].astext()
                otherchanges.setdefault((changeset.docname, title), []).append(
                    (entry, changeset.docname, changeset.lineno))

        ctx = {
            'project': self.config.project,
            'version': version,
            'docstitle': self.config.html_title,
            'shorttitle': self.config.html_short_title,
            'libchanges': sorted(libchanges.items()),
            'apichanges': sorted(apichanges),
            'otherchanges': sorted(otherchanges.items()),
            'show_copyright': self.config.html_show_copyright,
            'show_sphinx': self.config.html_show_sphinx,
        }
        with open(path.join(self.outdir, 'index.html'), 'w',
                  encoding='utf8') as f:
            f.write(self.templates.render('changes/frameset.html', ctx))
        with open(path.join(self.outdir, 'changes.html'), 'w',
                  encoding='utf8') as f:
            f.write(self.templates.render('changes/versionchanges.html', ctx))

        hltext = [
            '.. versionadded:: %s' % version,
            '.. versionchanged:: %s' % version,
            '.. deprecated:: %s' % version
        ]

        def hl(no, line):
            # type: (int, str) -> str
            line = '<a name="L%s"> </a>' % no + html.escape(line)
            for x in hltext:
                if x in line:
                    line = '<span class="hl">%s</span>' % line
                    break
            return line

        logger.info(bold(__('copying source files...')))
        for docname in self.env.all_docs:
            with open(self.env.doc2path(docname),
                      encoding=self.env.config.source_encoding) as f:
                try:
                    lines = f.readlines()
                except UnicodeDecodeError:
                    logger.warning(
                        __('could not read %r for changelog creation'),
                        docname)
                    continue
            targetfn = path.join(self.outdir, 'rst',
                                 os_path(docname)) + '.html'
            ensuredir(path.dirname(targetfn))
            with open(targetfn, 'w', encoding='utf-8') as f:
                text = ''.join(
                    hl(i + 1, line) for (i, line) in enumerate(lines))
                ctx = {
                    'filename': self.env.doc2path(docname, None),
                    'text': text
                }
                f.write(self.templates.render('changes/rstsource.html', ctx))
        themectx = dict(('theme_' + key, val)
                        for (key, val) in self.theme.get_options({}).items())
        copy_asset_file(path.join(package_dir, 'themes', 'default', 'static',
                                  'default.css_t'),
                        self.outdir,
                        context=themectx,
                        renderer=self.templates)
        copy_asset_file(
            path.join(package_dir, 'themes', 'basic', 'static', 'basic.css'),
            self.outdir)
Пример #47
0
def render_dot(self, code, options, format, prefix='graphviz'):
    # type: (nodes.NodeVisitor, unicode, Dict, unicode, unicode) -> Tuple[unicode, unicode]
    """Render graphviz code into a PNG or PDF output file."""
    graphviz_dot = options.get('graphviz_dot',
                               self.builder.config.graphviz_dot)
    hashkey = (code + str(options) + str(graphviz_dot) +
               str(self.builder.config.graphviz_dot_args)).encode('utf-8')

    fname = '%s-%s.%s' % (prefix, sha1(hashkey).hexdigest(), format)
    relfn = posixpath.join(self.builder.imgpath, fname)
    outfn = path.join(self.builder.outdir, self.builder.imagedir, fname)

    if path.isfile(outfn):
        return relfn, outfn

    if (hasattr(self.builder, '_graphviz_warned_dot')
            and self.builder._graphviz_warned_dot.get(graphviz_dot)):
        return None, None

    ensuredir(path.dirname(outfn))

    # graphviz expects UTF-8 by default
    if isinstance(code, text_type):
        code = code.encode('utf-8')

    dot_args = [graphviz_dot]
    dot_args.extend(self.builder.config.graphviz_dot_args)
    dot_args.extend(['-T' + format, '-o' + outfn])

    docname = options.get('docname', 'index')
    cwd = path.dirname(path.join(self.builder.srcdir, docname))

    if format == 'png':
        dot_args.extend(['-Tcmapx', '-o%s.map' % outfn])
    try:
        p = Popen(dot_args, stdout=PIPE, stdin=PIPE, stderr=PIPE, cwd=cwd)
    except OSError as err:
        if err.errno != ENOENT:  # No such file or directory
            raise
        logger.warning(
            __('dot command %r cannot be run (needed for graphviz '
               'output), check the graphviz_dot setting'), graphviz_dot)
        if not hasattr(self.builder, '_graphviz_warned_dot'):
            self.builder._graphviz_warned_dot = {}
        self.builder._graphviz_warned_dot[graphviz_dot] = True
        return None, None
    try:
        # Graphviz may close standard input when an error occurs,
        # resulting in a broken pipe on communicate()
        stdout, stderr = p.communicate(code)
    except (OSError, IOError) as err:
        if err.errno not in (EPIPE, EINVAL):
            raise
        # in this case, read the standard output and standard error streams
        # directly, to get the error message(s)
        stdout, stderr = p.stdout.read(), p.stderr.read()
        p.wait()
    if p.returncode != 0:
        raise GraphvizError(
            __('dot exited with error:\n[stderr]\n%s\n'
               '[stdout]\n%s') % (stderr, stdout))
    if not path.isfile(outfn):
        raise GraphvizError(
            __('dot did not produce an output file:\n[stderr]\n%s\n'
               '[stdout]\n%s') % (stderr, stdout))
    return relfn, outfn
Пример #48
0
def render_tikz(self, node, libs='', stringsubst=False):
    tikz = cleanup_tikzcode(self, node)
    hashkey = tikz.encode('utf-8')
    fname = 'tikz-%s.%s' % (sha(hashkey).hexdigest(),
                            OUT_EXTENSION[self.builder.config.tikz_proc_suite])
    relfn = posixpath.join(self.builder.imgpath, fname)
    outfn = path.join(self.builder.outdir, '_images', fname)

    if path.isfile(outfn):
        return relfn

    if hasattr(self.builder, '_tikz_warned'):
        return None

    ensuredir(path.dirname(outfn))

    latex = DOC_HEAD % libs
    latex += self.builder.config.tikz_latex_preamble
    latex += DOC_BODY % tikz
    latex = latex.encode('utf-8')

    with changedir(self.builder._tikz_tempdir):

        tf = open('tikz.tex', 'wb')
        tf.write(latex)
        tf.close()

        system([
            self.builder.config.latex_engine, '--interaction=nonstopmode',
            'tikz.tex'
        ], self.builder)

        if self.builder.config.tikz_proc_suite in ['ImageMagick', 'Netpbm']:

            system(['pdftoppm', '-r', '400', 'tikz.pdf', 'tikz'], self.builder)
            #in Windows 'pdf2ppm -singlefile' does not work
            #therefore do without it and glob instead
            ppmfilename = glob('tikz*.ppm')[0]

            if self.builder.config.tikz_proc_suite == "ImageMagick":
                if self.builder.config.tikz_transparent:
                    convert_args = ['-fuzz', '2%', '-transparent', 'white']
                else:
                    convert_args = []

                system([which('convert'), '-trim'] + convert_args +
                       [ppmfilename, outfn], self.builder)

            elif self.builder.config.tikz_proc_suite == "Netpbm":
                if self.builder.config.tikz_transparent:
                    pnm_args = ['-transparent', 'rgb:ff/ff/ff']
                else:
                    pnm_args = []
                system(['pnmtopng'] + pnm_args + [ppmfilename],
                       self.builder,
                       outfile=outfn)

        elif self.builder.config.tikz_proc_suite == "GhostScript":
            ghostscript = which('ghostscript') or which('gs') or which(
                'gswin64')
            if self.builder.config.tikz_transparent:
                device = "pngalpha"
            else:
                device = "png256"
            system([
                ghostscript, '-dBATCH', '-dNOPAUSE',
                '-sDEVICE=%s' % device,
                '-sOutputFile=%s' % outfn, '-r120x120', '-f', 'tikz.pdf'
            ], self.builder)
        elif self.builder.config.tikz_proc_suite == "pdf2svg":
            system(['pdf2svg', 'tikz.pdf', outfn], self.builder)
        else:
            self.builder._tikz_warned = True
            raise TikzExtError('Error (tikz extension): Invalid configuration '
                               'value for tikz_proc_suite')

        return relfn
Пример #49
0
    def write_doc(self, docname, doctree):
        # work around multiple string % tuple issues in docutils;
        # replace tuples in attribute values with lists
        doctree = doctree.deepcopy()
        destination = docutils.io.StringOutput(encoding="utf-8")
        ### print an output for downloading notebooks as well with proper links if variable is set
        if "jupyter_download_nb" in self.config and self.config[
                "jupyter_download_nb"]:

            outfilename = os.path.join(self.downloadsdir,
                                       os_path(docname) + self.out_suffix)
            ensuredir(os.path.dirname(outfilename))
            self.writer._set_ref_urlpath(
                self.config["jupyter_download_nb_urlpath"])
            self.writer._set_jupyter_download_nb_image_urlpath(
                (self.config["jupyter_download_nb_image_urlpath"]))
            self.writer.write(doctree, destination)

            # get a NotebookNode object from a string
            nb = nbformat.reads(self.writer.output, as_version=4)
            nb = self.update_Metadata(docname, nb)
            try:
                with codecs.open(outfilename, "w", "utf-8") as f:
                    self.writer.output = nbformat.writes(nb, version=4)
                    f.write(self.writer.output)
            except (IOError, OSError) as err:
                self.warn("error writing file %s: %s" % (outfilename, err))

            ### executing downloaded notebooks
            if (self.config['jupyter_download_nb_execute']):
                strDocname = str(docname)
                if strDocname in self.download_execution_vars[
                        'dependency_lists'].keys():
                    self.download_execution_vars['delayed_notebooks'].update(
                        {strDocname: nb})
                else:
                    self._execute_notebook_class.execute_notebook(
                        self, nb, docname, self.download_execution_vars,
                        self.download_execution_vars['futures'])

        ### output notebooks for executing
        self.writer._set_ref_urlpath(None)
        self.writer._set_jupyter_download_nb_image_urlpath(None)
        self.writer.write(doctree, destination)

        # get a NotebookNode object from a string
        nb = nbformat.reads(self.writer.output, as_version=4)
        nb = self.update_Metadata(docname, nb)

        ### execute the notebook
        if (self.config["jupyter_execute_notebooks"]):
            strDocname = str(docname)
            if strDocname in self.execution_vars['dependency_lists'].keys():
                self.execution_vars['delayed_notebooks'].update(
                    {strDocname: nb})
            else:
                self._execute_notebook_class.execute_notebook(
                    self, nb, docname, self.execution_vars,
                    self.execution_vars['futures'])
        else:
            #do not execute
            if (self.config['jupyter_generate_html']):
                language_info = nb.metadata.kernelspec.language
                self._convert_class = convertToHtmlWriter(self)
                self._convert_class.convert(nb, docname, language_info,
                                            self.outdir)

        ### mkdir if the directory does not exist
        outfilename = os.path.join(self.outdir,
                                   os_path(docname) + self.out_suffix)
        ensuredir(os.path.dirname(outfilename))

        try:
            with codecs.open(outfilename, "w", "utf-8") as f:
                self.writer.output = nbformat.writes(nb, version=4)
                f.write(self.writer.output)
        except (IOError, OSError) as err:
            self.logger.warning("error writing file %s: %s" %
                                (outfilename, err))
Пример #50
0
 def write_doc(self, docname, doctree):
     outfilename = os.path.join(self.outdir, docname)
     ensuredir(os.path.dirname(outfilename))
     destination = BinaryFileOutput(destination_path=outfilename)
     self.writer.write(doctree, destination)
Пример #51
0
def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
                              warn=_simple_warn, info=_simple_info,
                              base_path=None, builder=None, template_dir=None):

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    info('[autosummary] generating autosummary for: %s' %
         ', '.join(showed_sources))

    if output_dir:
        info('[autosummary] writing to %s' % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    # create our own templating environment
    template_dirs = [os.path.join(package_dir, 'ext',
                                  'autosummary', 'templates')]
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)

    # read
    items = find_autosummary_in_files(sources)

    # remove possible duplicates
    items = dict([(item, True) for item in items]).keys()

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name in sorted(items):
        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent = import_by_name(name)
        except ImportError as e:
            warn('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        f = open(fn, 'w')

        try:
            doc = get_documenter(obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                try:
                    template = template_env.get_template('autosummary/%s.rst'
                                                         % doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template('autosummary/base.rst')

            def get_members(obj, typ, include_public=[]):
                items = []
                for name in dir(obj):
                    try:
                        documenter = get_documenter(safe_getattr(obj, name),
                                                    obj)
                    except AttributeError:
                        continue
                    if documenter.objtype == typ:
                        items.append(name)
                public = [x for x in items
                          if x in include_public or not x.startswith('_')]
                return public, items

            ns = {}

            if doc.objtype == 'module':
                ns['members'] = dir(obj)
                ns['functions'], ns['all_functions'] = \
                                   get_members(obj, 'function')
                ns['classes'], ns['all_classes'] = \
                                 get_members(obj, 'class')
                ns['exceptions'], ns['all_exceptions'] = \
                                   get_members(obj, 'exception')
            elif doc.objtype == 'class':
                ns['members'] = dir(obj)
                ns['methods'], ns['all_methods'] = \
                                 get_members(obj, 'method', ['__init__'])
                ns['methods'] = \
                    ([item for item in ns['methods'] if item in obj.__dict__])
                ns['attributes'], ns['all_attributes'] = \
                                 get_members(obj, 'attribute')
                ns['attributes'] = \
                    ([item for item in ns['attributes'] if item in obj.__dict__])


            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(name) * '='

            rendered = template.render(**ns)
            f.write(rendered)
        finally:
            f.close()

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files, output_dir=output_dir,
                                  suffix=suffix, warn=warn, info=info,
                                  base_path=base_path, builder=builder,
                                  template_dir=template_dir)
Пример #52
0
def build_main(argv: List[str] = sys.argv[1:]) -> int:
    """Sphinx build "main" command-line entry."""

    parser = get_parser()
    args = parser.parse_args(argv)

    if args.noconfig:
        args.confdir = None
    elif not args.confdir:
        args.confdir = args.sourcedir

    if not args.doctreedir:
        args.doctreedir = os.path.join(args.outputdir, '.doctrees')

    # handle remaining filename arguments
    filenames = args.filenames
    missing_files = []
    for filename in filenames:
        if not os.path.isfile(filename):
            missing_files.append(filename)
    if missing_files:
        parser.error(__('cannot find files %r') % missing_files)

    if args.force_all and filenames:
        parser.error(__('cannot combine -a option and filenames'))

    if args.color == 'no' or (args.color == 'auto' and not color_terminal()):
        nocolor()

    status = sys.stdout
    warning = sys.stderr
    error = sys.stderr

    if args.quiet:
        status = None

    if args.really_quiet:
        status = warning = None

    if warning and args.warnfile:
        try:
            warnfile = abspath(args.warnfile)
            ensuredir(path.dirname(warnfile))
            warnfp = open(args.warnfile, 'w')
        except Exception as exc:
            parser.error(
                __('cannot open warning file %r: %s') % (args.warnfile, exc))
        warning = Tee(warning, warnfp)  # type: ignore
        error = warning

    confoverrides = {}
    for val in args.define:
        try:
            key, val = val.split('=', 1)
        except ValueError:
            parser.error(
                __('-D option argument must be in the form name=value'))
        confoverrides[key] = val

    for val in args.htmldefine:
        try:
            key, val = val.split('=')
        except ValueError:
            parser.error(
                __('-A option argument must be in the form name=value'))
        try:
            val = int(val)
        except ValueError:
            pass
        confoverrides['html_context.%s' % key] = val

    if args.nitpicky:
        confoverrides['nitpicky'] = True

    app = None
    try:
        confdir = args.confdir or args.sourcedir
        with patch_docutils(confdir), docutils_namespace():
            app = Sphinx(args.sourcedir, args.confdir, args.outputdir,
                         args.doctreedir, args.builder, confoverrides, status,
                         warning, args.freshenv, args.warningiserror,
                         args.tags, args.verbosity, args.jobs, args.keep_going)
            app.build(args.force_all, filenames)
            return app.statuscode
    except (Exception, KeyboardInterrupt) as exc:
        handle_exception(app, args, exc, error)
        return 2
Пример #53
0
def generate_automodsumm_docs(lines,
                              srcfn,
                              app=None,
                              suffix='.rst',
                              warn=None,
                              info=None,
                              base_path=None,
                              builder=None,
                              template_dir=None,
                              inherited_members=False):
    """
    This function is adapted from
    `sphinx.ext.autosummary.generate.generate_autosummmary_docs` to
    generate source for the automodsumm directives that should be
    autosummarized. Unlike generate_autosummary_docs, this function is
    called one file at a time.
    """

    from sphinx.jinja2glue import BuiltinTemplateLoader
    from sphinx.ext.autosummary import import_by_name, get_documenter
    from sphinx.ext.autosummary.generate import (_simple_info, _simple_warn)
    from sphinx.util.osutil import ensuredir
    from sphinx.util.inspect import safe_getattr
    from jinja2 import FileSystemLoader, TemplateNotFound
    from jinja2.sandbox import SandboxedEnvironment

    from .utils import find_autosummary_in_lines_for_automodsumm as find_autosummary_in_lines

    if info is None:
        info = _simple_info
    if warn is None:
        warn = _simple_warn

    # info('[automodsumm] generating automodsumm for: ' + srcfn)

    # Create our own templating environment - here we use Astropy's
    # templates rather than the default autosummary templates, in order to
    # allow docstrings to be shown for methods.
    template_dirs = [
        os.path.join(os.path.dirname(__file__), 'templates'),
        os.path.join(base_path, '_templates')
    ]
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)

    # read
    # items = find_autosummary_in_files(sources)
    items = find_autosummary_in_lines(lines, filename=srcfn)
    if len(items) > 0:
        msg = '[automodsumm] {1}: found {0} automodsumm entries to generate'
        info(msg.format(len(items), srcfn))


#    gennms = [item[0] for item in items]
#    if len(gennms) > 20:
#        gennms = gennms[:10] + ['...'] + gennms[-10:]
#    info('[automodsumm] generating autosummary for: ' + ', '.join(gennms))

# remove possible duplicates
    items = list(set(items))

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name, inherited_mem in sorted(items):

        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = os.path.abspath(os.path.join(base_path, path))
        ensuredir(path)

        try:
            import_by_name_values = import_by_name(name)
        except ImportError as e:
            warn('[automodsumm] failed to import %r: %s' % (name, e))
            continue

        # if block to accommodate Sphinx's v1.2.2 and v1.2.3 respectively
        if len(import_by_name_values) == 3:
            name, obj, parent = import_by_name_values
        elif len(import_by_name_values) == 4:
            name, obj, parent, module_name = import_by_name_values

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        f = open(fn, 'w')

        try:

            if SPHINX_LT_17:
                doc = get_documenter(obj, parent)
            else:
                doc = get_documenter(app, obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                tmplstr = 'autosummary_core/%s.rst'
                try:
                    template = template_env.get_template(tmplstr % doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template(tmplstr % 'base')

            def get_members_mod(obj, typ, include_public=[]):
                """
                typ = None -> all
                """
                items = []
                for name in dir(obj):
                    try:
                        if SPHINX_LT_17:
                            documenter = get_documenter(
                                safe_getattr(obj, name), obj)
                        else:
                            documenter = get_documenter(
                                app, safe_getattr(obj, name), obj)
                    except AttributeError:
                        continue
                    if typ is None or documenter.objtype == typ:
                        items.append(name)
                public = [
                    x for x in items
                    if x in include_public or not x.startswith('_')
                ]
                return public, items

            def get_members_class(obj,
                                  typ,
                                  include_public=[],
                                  include_base=False):
                """
                typ = None -> all
                include_base -> include attrs that are from a base class
                """
                items = []

                # using dir gets all of the attributes, including the elements
                # from the base class, otherwise use __slots__ or __dict__
                if include_base:
                    names = dir(obj)
                else:
                    if hasattr(obj, '__slots__'):
                        names = tuple(getattr(obj, '__slots__'))
                    else:
                        names = getattr(obj, '__dict__').keys()

                for name in names:
                    try:
                        if SPHINX_LT_17:
                            documenter = get_documenter(
                                safe_getattr(obj, name), obj)
                        else:
                            documenter = get_documenter(
                                app, safe_getattr(obj, name), obj)
                    except AttributeError:
                        continue
                    if typ is None or documenter.objtype == typ:
                        items.append(name)
                public = [
                    x for x in items
                    if x in include_public or not x.startswith('_')
                ]
                return public, items

            ns = {}

            if doc.objtype == 'module':
                ns['members'] = get_members_mod(obj, None)
                ns['functions'], ns['all_functions'] = \
                    get_members_mod(obj, 'function')
                ns['classes'], ns['all_classes'] = \
                    get_members_mod(obj, 'class')
                ns['exceptions'], ns['all_exceptions'] = \
                    get_members_mod(obj, 'exception')
            elif doc.objtype == 'class':
                if inherited_mem is not None:
                    # option set in this specifc directive
                    include_base = inherited_mem
                else:
                    # use default value
                    include_base = inherited_members

                api_class_methods = ['__init__', '__call__']
                ns['members'] = get_members_class(obj,
                                                  None,
                                                  include_base=include_base)
                ns['methods'], ns['all_methods'] = \
                    get_members_class(obj, 'method', api_class_methods,
                                      include_base=include_base)
                ns['attributes'], ns['all_attributes'] = \
                    get_members_class(obj, 'attribute',
                                      include_base=include_base)
                ns['methods'].sort()
                ns['attributes'].sort()

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(obj_name) * '='

            # We now check whether a file for reference footnotes exists for
            # the module being documented. We first check if the
            # current module is a file or a directory, as this will give a
            # different path for the reference file. For example, if
            # documenting astropy.wcs then the reference file is at
            # ../wcs/references.txt, while if we are documenting
            # astropy.config.logging_helper (which is at
            # astropy/config/logging_helper.py) then the reference file is set
            # to ../config/references.txt
            if '.' in mod_name:
                mod_name_dir = mod_name.replace('.', '/').split('/', 1)[1]
            else:
                mod_name_dir = mod_name
            if not os.path.isdir(os.path.join(base_path, mod_name_dir)) \
               and os.path.isdir(os.path.join(base_path, mod_name_dir.rsplit('/', 1)[0])):
                mod_name_dir = mod_name_dir.rsplit('/', 1)[0]

            # We then have to check whether it exists, and if so, we pass it
            # to the template.
            if os.path.exists(
                    os.path.join(base_path, mod_name_dir, 'references.txt')):
                # An important subtlety here is that the path we pass in has
                # to be relative to the file being generated, so we have to
                # figure out the right number of '..'s
                ndirsback = path.replace(base_path, '').count('/')
                ref_file_rel_segments = ['..'] * ndirsback
                ref_file_rel_segments.append(mod_name_dir)
                ref_file_rel_segments.append('references.txt')
                ns['referencefile'] = os.path.join(*ref_file_rel_segments)

            rendered = template.render(**ns)
            f.write(cleanup_whitespace(rendered))
        finally:
            f.close()
Пример #54
0
def render_math(self, math):
    """Render the LaTeX math expression *math* using latex and dvipng or
    dvisvgm.

    Return the filename relative to the built document and the "depth",
    that is, the distance of image bottom and baseline in pixels, if the
    option to use preview_latex is switched on.

    Error handling may seem strange, but follows a pattern: if LaTeX or dvipng
    (dvisvgm) aren't available, only a warning is generated (since that enables
    people on machines without these programs to at least build the rest of the
    docs successfully).  If the programs are there, however, they may not fail
    since that indicates a problem in the math source.
    """
    image_format = self.builder.config.imgmath_image_format
    if image_format not in ('png', 'svg'):
        raise MathExtError(
            'imgmath_image_format must be either "png" or "svg"')

    font_size = self.builder.config.imgmath_font_size
    use_preview = self.builder.config.imgmath_use_preview
    latex = DOC_HEAD + self.builder.config.imgmath_latex_preamble
    latex += (use_preview and DOC_BODY_PREVIEW
              or DOC_BODY) % (font_size, int(round(font_size * 1.2)), math)

    shasum = "%s.%s" % (sha1(latex.encode('utf-8')).hexdigest(), image_format)
    relfn = posixpath.join(self.builder.imgpath, 'math', shasum)
    outfn = path.join(self.builder.outdir, self.builder.imagedir, 'math',
                      shasum)
    if path.isfile(outfn):
        depth = read_png_depth(outfn)
        return relfn, depth

    # if latex or dvipng (dvisvgm) has failed once, don't bother to try again
    if hasattr(self.builder, '_imgmath_warned_latex') or \
       hasattr(self.builder, '_imgmath_warned_image_translator'):
        return None, None

    # use only one tempdir per build -- the use of a directory is cleaner
    # than using temporary files, since we can clean up everything at once
    # just removing the whole directory (see cleanup_tempdir)
    if not hasattr(self.builder, '_imgmath_tempdir'):
        tempdir = self.builder._imgmath_tempdir = tempfile.mkdtemp()
    else:
        tempdir = self.builder._imgmath_tempdir

    tf = codecs.open(path.join(tempdir, 'math.tex'), 'w', 'utf-8')
    tf.write(latex)
    tf.close()

    # build latex command; old versions of latex don't have the
    # --output-directory option, so we have to manually chdir to the
    # temp dir to run it.
    ltx_args = [self.builder.config.imgmath_latex, '--interaction=nonstopmode']
    # add custom args from the config file
    ltx_args.extend(self.builder.config.imgmath_latex_args)
    ltx_args.append('math.tex')

    with cd(tempdir):
        try:
            p = Popen(ltx_args, stdout=PIPE, stderr=PIPE)
        except OSError as err:
            if err.errno != ENOENT:  # No such file or directory
                raise
            self.builder.warn(
                'LaTeX command %r cannot be run (needed for math '
                'display), check the imgmath_latex setting' %
                self.builder.config.imgmath_latex)
            self.builder._imgmath_warned_latex = True
            return None, None

    stdout, stderr = p.communicate()
    if p.returncode != 0:
        raise MathExtError('latex exited with error', stderr, stdout)

    ensuredir(path.dirname(outfn))
    if image_format == 'png':
        image_translator = 'dvipng'
        image_translator_executable = self.builder.config.imgmath_dvipng
        # use some standard dvipng arguments
        image_translator_args = [self.builder.config.imgmath_dvipng]
        image_translator_args += ['-o', outfn, '-T', 'tight', '-z9']
        # add custom ones from config value
        image_translator_args.extend(self.builder.config.imgmath_dvipng_args)
        if use_preview:
            image_translator_args.append('--depth')
    elif image_format == 'svg':
        image_translator = 'dvisvgm'
        image_translator_executable = self.builder.config.imgmath_dvisvgm
        # use some standard dvisvgm arguments
        image_translator_args = [self.builder.config.imgmath_dvisvgm]
        image_translator_args += ['-o', outfn]
        # add custom ones from config value
        image_translator_args.extend(self.builder.config.imgmath_dvisvgm_args)
    else:
        raise MathExtError(
            'imgmath_image_format must be either "png" or "svg"')

    # last, the input file name
    image_translator_args.append(path.join(tempdir, 'math.dvi'))

    try:
        p = Popen(image_translator_args, stdout=PIPE, stderr=PIPE)
    except OSError as err:
        if err.errno != ENOENT:  # No such file or directory
            raise
        self.builder.warn(
            '%s command %r cannot be run (needed for math '
            'display), check the imgmath_%s setting' %
            (image_translator, image_translator_executable, image_translator))
        self.builder._imgmath_warned_image_translator = True
        return None, None

    stdout, stderr = p.communicate()
    if p.returncode != 0:
        raise MathExtError('%s exited with error' % image_translator, stderr,
                           stdout)
    depth = None
    if use_preview and image_format == 'png':  # depth is only useful for png
        for line in stdout.splitlines():
            m = depth_re.match(line)
            if m:
                depth = int(m.group(1))
                write_png_depth(outfn, depth)
                break

    return relfn, depth
Пример #55
0
def render_mm(self, code, options, format, prefix='mermaid'):
    """Render mermaid code into a PNG or PDF output file."""

    if format == 'raw':
        format = 'png'

    mermaid_cmd = self.builder.config.mermaid_cmd
    verbose = self.builder.config.mermaid_verbose
    hashkey = (
        code + str(options) +
        str(self.builder.config.mermaid_sequence_config)).encode('utf-8')

    basename = '%s-%s' % (prefix, sha1(hashkey).hexdigest())
    fname = '%s.%s' % (basename, format)
    relfn = posixpath.join(self.builder.imgpath, fname)
    outdir = path.join(self.builder.outdir, self.builder.imagedir)
    outfn = path.join(outdir, fname)
    tmpfn = path.join(_get_default_tempdir(), basename)

    if path.isfile(outfn):
        return relfn, outfn

    ensuredir(path.dirname(outfn))

    # mermaid expects UTF-8 by default
    if isinstance(code, text_type):
        code = code.encode('utf-8')

    with open(tmpfn, 'wb') as t:
        t.write(code)

    mm_args = [mermaid_cmd, tmpfn, '-o', outdir]
    if verbose:
        mm_args.extend(['-v'])
    if self.builder.config.mermaid_phantom_path:
        mm_args.extend(
            ['--phantomPath', self.builder.config.mermaid_phantom_path])
    if self.builder.config.mermaid_sequence_config:
        with NamedTemporaryFile(delete=False) as seq:
            json.dump(self.builder.config.mermaid_sequence_config, seq)
        mm_args.extend(['--sequenceConfig', seq.name])
    if format == 'png':
        mm_args.extend(['-p'])
    else:
        mm_args.extend(['-s'])
        self.builder.warn('Mermaid SVG support is experimental')
    try:
        p = Popen(mm_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
    except OSError as err:
        if err.errno != ENOENT:  # No such file or directory
            raise
        self.builder.warn('command %r cannot be run (needed for mermaid '
                          'output), check the mermaid_cmd setting' %
                          mermaid_cmd)
        return None, None

    stdout, stderr = p.communicate(code)
    if verbose:
        self.builder.info(stdout)

    if p.returncode != 0:
        raise MermaidError('Mermaid exited with error:\n[stderr]\n%s\n'
                           '[stdout]\n%s' % (stderr, stdout))
    if not path.isfile(outfn):
        raise MermaidError(
            'Mermaid did not produce an output file:\n[stderr]\n%s\n'
            '[stdout]\n%s' % (stderr, stdout))
    return relfn, outfn
Пример #56
0
def render_sdaps(self, node):
    code = node['sdaps']
    hashkey = code.encode('utf-8')
    hashkey += node['preamble'].encode('utf-8')
    if node['sdapsclassic']:
        hashkey += b'sdapsclassic'
    if node['metadata']:
        hashkey += b'metadata'

    fname = 'sdaps-%s.svg' % (sha(hashkey).hexdigest())
    relfn = posixpath.join(self.builder.imgpath, fname)
    outfn = os.path.join(self.builder.outdir, '_images', fname)

    if os.path.isfile(outfn):
        return relfn, 'Compile was not started as output exists'

    if hasattr(self.builder, '_sdaps_warned'):
        return None, 'Compile was not started due to prior warnings'

    ensuredir(os.path.dirname(outfn))
    curdir = os.getcwd()

    replacements = {
        'target': code,
        'preamble': node['preamble'] if 'preamble' in node else ''
    }

    if not node['sdapsclassic']:
        latex = TEMPLATE % replacements
    else:
        latex = TEMPLATE_SDAPSCLASSIC % replacements
    latex = latex.encode('utf-8')

    tempdir = self.builder._sdaps_tempdir = tempfile.mkdtemp(
        prefix="sphinx-sdapstex-")

    try:
        fd = open(os.path.join(tempdir, 'tmp.tex'), 'wb')
        fd.write(latex)
        fd.close()

        res = compile_target('tmp.tex',
                             cwd=tempdir,
                             inputs=[self.builder.config.sdaps_latex_dir])
        if res != 0:
            self.builder.warn(
                'An error occurred while compiling the LaTeX document')
            self.builder._sdaps_warned = True
            relfn = None
        else:
            if node['metadata']:
                shutil.copy(os.path.join(tempdir, 'tmp.sdaps'),
                            outfn + '.meta')

            topinfo, bottominfo, paper = open(
                os.path.join(tempdir,
                             'tmp.targetinfo')).read().split('\n')[0:3]
            x1, y1 = [float(i[:-2]) * 72.27 / 72 for i in topinfo.split(',')]
            x2, y2 = [
                float(i[:-2]) * 72.27 / 72 for i in bottominfo.split(',')
            ]
            pw, ph = [float(i[:-2]) * 72.27 / 72 for i in paper.split(',')]
            x = int(x1)
            y = int(ph - y1)
            w = int(x2 - x1 + 1)
            h = int(y1 - y2 + 1)

            res = subprocess.call([
                'pdftocairo', '-x',
                str(x), '-y',
                str(y), '-W',
                str(w), '-H',
                str(h), '-paperw',
                str(w), '-paperh',
                str(h), '-svg',
                os.path.join(tempdir, 'tmp.pdf'), outfn
            ])
            if res:
                self.builder.warn('SVG conversion failed')
                self.builder._sdaps_warned = True
                return None, 'SVG conversion failed', ''
    finally:
        errlog = open(os.path.join(tempdir, 'tmp.log')).read()
        shutil.rmtree(tempdir)

    return relfn, errlog
Пример #57
0
    def __init__(self,
                 srcdir,
                 confdir,
                 outdir,
                 doctreedir,
                 buildername,
                 confoverrides=None,
                 status=sys.stdout,
                 warning=sys.stderr,
                 freshenv=False,
                 warningiserror=False,
                 tags=None,
                 verbosity=0,
                 parallel=0):
        # type: (unicode, unicode, unicode, unicode, unicode, Dict, IO, IO, bool, bool, List[unicode], int, int) -> None  # NOQA
        self.verbosity = verbosity
        self.extensions = {}  # type: Dict[unicode, Extension]
        self._setting_up_extension = ['?']  # type: List[unicode]
        self.builder = None  # type: Builder
        self.env = None  # type: BuildEnvironment
        self.registry = SphinxComponentRegistry()
        self.enumerable_nodes = {
        }  # type: Dict[nodes.Node, Tuple[unicode, Callable]]  # NOQA
        self.html_themes = {}  # type: Dict[unicode, unicode]

        self.srcdir = srcdir
        self.confdir = confdir
        self.outdir = outdir
        self.doctreedir = doctreedir

        self.parallel = parallel

        if status is None:
            self._status = cStringIO()  # type: IO
            self.quiet = True
        else:
            self._status = status
            self.quiet = False

        if warning is None:
            self._warning = cStringIO()  # type: IO
        else:
            self._warning = warning
        self._warncount = 0
        self.warningiserror = warningiserror
        logging.setup(self, self._status, self._warning)

        self.events = EventManager()

        # keep last few messages for traceback
        # This will be filled by sphinx.util.logging.LastMessagesWriter
        self.messagelog = deque(maxlen=10)  # type: deque

        # say hello to the world
        logger.info(bold('Running Sphinx v%s' % sphinx.__display_version__))

        # status code for command-line application
        self.statuscode = 0

        # read config
        self.tags = Tags(tags)
        self.config = Config(confdir, CONFIG_FILENAME, confoverrides or {},
                             self.tags)
        self.config.check_unicode()
        # defer checking types until i18n has been initialized

        # initialize some limited config variables before initialize i18n and loading
        # extensions
        self.config.pre_init_values()

        # set up translation infrastructure
        self._init_i18n()

        # check the Sphinx version if requested
        if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
            raise VersionRequirementError(
                __('This project needs at least Sphinx v%s and therefore cannot '
                   'be built with this version.') % self.config.needs_sphinx)

        # set confdir to srcdir if -C given (!= no confdir); a few pieces
        # of code expect a confdir to be set
        if self.confdir is None:
            self.confdir = self.srcdir

        # load all built-in extension modules
        for extension in builtin_extensions:
            self.setup_extension(extension)

        # load all user-given extension modules
        for extension in self.config.extensions:
            self.setup_extension(extension)

        # preload builder module (before init config values)
        self.preload_builder(buildername)

        if not path.isdir(outdir):
            logger.info('making output directory...')
            ensuredir(outdir)

        # the config file itself can be an extension
        if self.config.setup:
            self._setting_up_extension = ['conf.py']
            # py31 doesn't have 'callable' function for below check
            if hasattr(self.config.setup, '__call__'):
                self.config.setup(self)
            else:
                raise ConfigError(
                    __("'setup' as currently defined in conf.py isn't a Python callable. "
                       "Please modify its definition to make it a callable function. This is "
                       "needed for conf.py to behave as a Sphinx extension."))

        # now that we know all config values, collect them from conf.py
        self.config.init_values()

        # check extension versions if requested
        verify_required_extensions(self, self.config.needs_extensions)

        # check primary_domain if requested
        primary_domain = self.config.primary_domain
        if primary_domain and not self.registry.has_domain(primary_domain):
            logger.warning(__('primary_domain %r not found, ignored.'),
                           primary_domain)

        # create the builder
        self.builder = self.create_builder(buildername)
        # check all configuration values for permissible types
        self.config.check_types()
        # set up source_parsers
        self._init_source_parsers()
        # set up the build environment
        self._init_env(freshenv)
        # set up the builder
        self._init_builder()
        # set up the enumerable nodes
        self._init_enumerable_nodes()
Пример #58
0
def generate(d, overwrite=True, silent=False, templatedir=None):
    # type: (Dict, bool, bool, unicode) -> None
    """Generate project based on values in *d*."""
    template = QuickstartRenderer(templatedir=templatedir)

    texescape.init()

    if 'mastertoctree' not in d:
        d['mastertoctree'] = ''
    if 'mastertocmaxdepth' not in d:
        d['mastertocmaxdepth'] = 2

    d['PY3'] = PY3
    d['project_fn'] = make_filename(d['project'])
    d['project_url'] = urlquote(d['project'].encode('idna'))
    d['project_manpage'] = d['project_fn'].lower()
    d['now'] = time.asctime()
    d['project_underline'] = column_width(d['project']) * '='
    d.setdefault('extensions', [])
    d['copyright'] = time.strftime('%Y') + ', ' + d['author']
    d['author_texescaped'] = text_type(d['author']).\
        translate(texescape.tex_escape_map)
    d['project_doc'] = d['project'] + ' Documentation'
    d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation').\
        translate(texescape.tex_escape_map)

    # escape backslashes and single quotes in strings that are put into
    # a Python string literal
    for key in ('project', 'project_doc', 'project_doc_texescaped', 'author',
                'author_texescaped', 'copyright', 'version', 'release',
                'master'):
        d[key + '_str'] = d[key].replace('\\', '\\\\').replace("'", "\\'")

    if not path.isdir(d['path']):
        ensuredir(d['path'])

    srcdir = d['sep'] and path.join(d['path'], 'source') or d['path']

    ensuredir(srcdir)
    if d['sep']:
        builddir = path.join(d['path'], 'build')
        d['exclude_patterns'] = ''
    else:
        builddir = path.join(srcdir, d['dot'] + 'build')
        exclude_patterns = map(repr, [
            d['dot'] + 'build',
            'Thumbs.db',
            '.DS_Store',
        ])
        d['exclude_patterns'] = ', '.join(exclude_patterns)
    ensuredir(builddir)
    ensuredir(path.join(srcdir, d['dot'] + 'templates'))
    ensuredir(path.join(srcdir, d['dot'] + 'static'))

    def write_file(fpath, content, newline=None):
        # type: (unicode, unicode, unicode) -> None
        if overwrite or not path.isfile(fpath):
            if 'quiet' not in d:
                print(__('Creating file %s.') % fpath)
            with open(fpath, 'wt', encoding='utf-8', newline=newline) as f:
                f.write(content)
        else:
            if 'quiet' not in d:
                print(__('File %s already exists, skipping.') % fpath)

    conf_path = os.path.join(templatedir, 'conf.py_t') if templatedir else None
    if not conf_path or not path.isfile(conf_path):
        conf_path = os.path.join(package_dir, 'templates', 'quickstart',
                                 'conf.py_t')
    with open(conf_path) as f:
        conf_text = convert_python_source(f.read())

    write_file(path.join(srcdir, 'conf.py'),
               template.render_string(conf_text, d))

    masterfile = path.join(srcdir, d['master'] + d['suffix'])
    write_file(masterfile, template.render('quickstart/master_doc.rst_t', d))

    if d.get('make_mode') is True:
        makefile_template = 'quickstart/Makefile.new_t'
        batchfile_template = 'quickstart/make.bat.new_t'
    else:
        makefile_template = 'quickstart/Makefile_t'
        batchfile_template = 'quickstart/make.bat_t'

    if d['makefile'] is True:
        d['rsrcdir'] = d['sep'] and 'source' or '.'
        d['rbuilddir'] = d['sep'] and 'build' or d['dot'] + 'build'
        # use binary mode, to avoid writing \r\n on Windows
        write_file(path.join(d['path'], 'Makefile'),
                   template.render(makefile_template, d), u'\n')

    if d['batchfile'] is True:
        d['rsrcdir'] = d['sep'] and 'source' or '.'
        d['rbuilddir'] = d['sep'] and 'build' or d['dot'] + 'build'
        write_file(path.join(d['path'], 'make.bat'),
                   template.render(batchfile_template, d), u'\r\n')

    if silent:
        return
    print()
    print(
        bold(__('Finished: An initial directory structure has been created.')))
    print(
        __('''
You should now populate your master file %s and create other documentation
source files. ''') % masterfile +
        ((d['makefile'] or d['batchfile']) and __('''\
Use the Makefile to build the docs, like so:
   make builder
''') or __('''\
Use the sphinx-build command to build the docs, like so:
   sphinx-build -b builder %s %s
''') % (srcdir, builddir)) + __('''\
where "builder" is one of the supported builders, e.g. html, latex or linkcheck.
'''))
Пример #59
0
    def write(self, *ignored):
        version = self.config.version
        libchanges = {}
        apichanges = []
        otherchanges = {}
        if version not in self.env.versionchanges:
            self.info(bold('no changes in version %s.' % version))
            return
        self.info(bold('writing summary file...'))
        for type, docname, lineno, module, descname, content in \
                self.env.versionchanges[version]:
            if isinstance(descname, tuple):
                descname = descname[0]
            ttext = self.typemap[type]
            context = content.replace('\n', ' ')
            if descname and docname.startswith('c-api'):
                if not descname:
                    continue
                if context:
                    entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext,
                                                          context)
                else:
                    entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
                apichanges.append((entry, docname, lineno))
            elif descname or module:
                if not module:
                    module = _('Builtins')
                if not descname:
                    descname = _('Module level')
                if context:
                    entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext,
                                                          context)
                else:
                    entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
                libchanges.setdefault(module, []).append(
                    (entry, docname, lineno))
            else:
                if not context:
                    continue
                entry = '<i>%s:</i> %s' % (ttext.capitalize(), context)
                title = self.env.titles[docname].astext()
                otherchanges.setdefault((docname, title), []).append(
                    (entry, docname, lineno))

        ctx = {
            'project': self.config.project,
            'version': version,
            'docstitle': self.config.html_title,
            'shorttitle': self.config.html_short_title,
            'libchanges': sorted(libchanges.items()),
            'apichanges': sorted(apichanges),
            'otherchanges': sorted(otherchanges.items()),
            'show_copyright': self.config.html_show_copyright,
            'show_sphinx': self.config.html_show_sphinx,
        }
        f = codecs.open(path.join(self.outdir, 'index.html'), 'w', 'utf8')
        try:
            f.write(self.templates.render('changes/frameset.html', ctx))
        finally:
            f.close()
        f = codecs.open(path.join(self.outdir, 'changes.html'), 'w', 'utf8')
        try:
            f.write(self.templates.render('changes/versionchanges.html', ctx))
        finally:
            f.close()

        hltext = [
            '.. versionadded:: %s' % version,
            '.. versionchanged:: %s' % version,
            '.. deprecated:: %s' % version
        ]

        def hl(no, line):
            line = '<a name="L%s"> </a>' % no + htmlescape(line)
            for x in hltext:
                if x in line:
                    line = '<span class="hl">%s</span>' % line
                    break
            return line

        self.info(bold('copying source files...'))
        for docname in self.env.all_docs:
            f = codecs.open(self.env.doc2path(docname), 'r', 'latin1')
            try:
                lines = f.readlines()
            finally:
                f.close()
            targetfn = path.join(self.outdir, 'rst',
                                 os_path(docname)) + '.html'
            ensuredir(path.dirname(targetfn))
            f = codecs.open(targetfn, 'w', 'latin1')
            try:
                text = ''.join(
                    hl(i + 1, line) for (i, line) in enumerate(lines))
                ctx = {
                    'filename': self.env.doc2path(docname, None),
                    'text': text
                }
                f.write(self.templates.render('changes/rstsource.html', ctx))
            finally:
                f.close()
        themectx = dict(('theme_' + key, val)
                        for (key, val) in self.theme.get_options({}).items())
        copy_static_entry(
            path.join(package_dir, 'themes', 'default', 'static',
                      'default.css_t'), self.outdir, self, themectx)
        copy_static_entry(
            path.join(package_dir, 'themes', 'basic', 'static', 'basic.css'),
            self.outdir, self)
Пример #60
0
def render_symbol(self, code, options, format, prefix='symbol'):
    # type: (nodes.NodeVisitor, unicode, Dict, unicode, unicode) -> Tuple[unicode, unicode]
    """Render symbolator code into a PNG or SVG output file."""

    symbolator_cmd = options.get('symbolator_cmd',
                                 self.builder.config.symbolator_cmd)
    hashkey = (code + str(options) + str(symbolator_cmd) +
               str(self.builder.config.symbolator_cmd_args)).encode('utf-8')

    # Use name option if present otherwise fallback onto SHA-1 hash
    name = options.get('name', sha1(hashkey).hexdigest())
    fname = '%s-%s.%s' % (prefix, name, format)
    relfn = posixpath.join(self.builder.imgpath, fname)
    outfn = path.join(self.builder.outdir, self.builder.imagedir, fname)

    if path.isfile(outfn):
        return relfn, outfn

    if (hasattr(self.builder, '_symbolator_warned_cmd')
            and self.builder._symbolator_warned_cmd.get(symbolator_cmd)):
        return None, None

    ensuredir(path.dirname(outfn))

    # Symbolator expects UTF-8 by default
    if isinstance(code, text_type):
        code = code.encode('utf-8')

    cmd_args = [symbolator_cmd]
    cmd_args.extend(self.builder.config.symbolator_cmd_args)
    cmd_args.extend(['-i', '-', '-f', format, '-o', outfn])

    try:
        p = Popen(cmd_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
    except OSError as err:
        if err.errno != ENOENT:  # No such file or directory
            raise
        logger.warning(
            'symbolator command %r cannot be run (needed for symbolator '
            'output), check the symbolator_cmd setting', symbolator_cmd)
        if not hasattr(self.builder, '_symbolator_warned_cmd'):
            self.builder._symbolator_warned_cmd = {}
        self.builder._symbolator_warned_cmd[symbolator_cmd] = True
        return None, None
    try:
        # Symbolator may close standard input when an error occurs,
        # resulting in a broken pipe on communicate()
        stdout, stderr = p.communicate(code)
    except (OSError, IOError) as err:
        if err.errno not in (EPIPE, EINVAL):
            raise
        # in this case, read the standard output and standard error streams
        # directly, to get the error message(s)
        stdout, stderr = p.stdout.read(), p.stderr.read()
        p.wait()
    if p.returncode != 0:
        raise SymbolatorError('symbolator exited with error:\n[stderr]\n%s\n'
                              '[stdout]\n%s' % (stderr, stdout))
    if not path.isfile(outfn):
        raise SymbolatorError(
            'symbolator did not produce an output file:\n[stderr]\n%s\n'
            '[stdout]\n%s' % (stderr, stdout))
    return relfn, outfn