コード例 #1
0
def pre_build(local_root, versions):
    """Build docs for all versions to determine root directory and master_doc names.

    Need to build docs to (a) avoid filename collision with files from root_ref and branch/tag names and (b) determine
    master_doc config values for all versions (in case master_doc changes from e.g. contents.rst to index.rst between
    versions).

    Exports all commits into a temporary directory and returns the path to avoid re-exporting during the final build.

    :param str local_root: Local path to git root directory.
    :param sphinxcontrib.versioning.versions.Versions versions: Versions class instance.

    :return: Tempdir path with exported commits as subdirectories.
    :rtype: str
    """
    log = logging.getLogger(__name__)
    exported_root = TempDir(True).name

    # Extract all.
    for sha in {r['sha'] for r in versions.remotes}:
        target = os.path.join(exported_root, sha)
        log.debug('Exporting %s to temporary directory.', sha)
        export(local_root, sha, target)

    # Build root.
    remote = versions[Config.from_context().root_ref]
    with TempDir() as temp_dir:
        log.debug('Building root (before setting root_dirs) in temporary directory: %s', temp_dir)
        source = os.path.dirname(os.path.join(exported_root, remote['sha'], remote['conf_rel_path']))
        build(source, temp_dir, versions, remote['name'], True)
        existing = os.listdir(temp_dir)

    # Define root_dir for all versions to avoid file name collisions.
    for remote in versions.remotes:
        root_dir = RE_INVALID_FILENAME.sub('_', remote['name'])
        while root_dir in existing:
            root_dir += '_'
        remote['root_dir'] = root_dir
        log.debug('%s root directory is %s', remote['name'], root_dir)
        existing.append(root_dir)

    # Get found_docs and master_doc values for all versions.
    for remote in list(versions.remotes):
        log.debug('Partially running sphinx-build to read configuration for: %s', remote['name'])
        source = os.path.dirname(os.path.join(exported_root, remote['sha'], remote['conf_rel_path']))
        try:
            config = read_config(source, remote['name'])
        except HandledError:
            log.warning('Skipping. Will not be building: %s', remote['name'])
            versions.remotes.pop(versions.remotes.index(remote))
            continue
        remote['found_docs'] = config['found_docs']
        remote['master_doc'] = config['master_doc']

    return exported_root
コード例 #2
0
def read_config(source, current_name):
    """Read the Sphinx config for one version.

    :raise HandledError: If sphinx-build fails. Will be logged before raising.

    :param str source: Source directory to pass to sphinx-build.
    :param str current_name: The ref name of the current version being built.

    :return: Specific Sphinx config values.
    :rtype: dict
    """
    log = logging.getLogger(__name__)
    queue = multiprocessing.Queue()
    config = Config.from_context()

    with TempDir() as temp_dir:
        argv = ('sphinx-build', source, temp_dir)
        log.debug('Running sphinx-build for config values with args: %s',
                  str(argv))
        child = multiprocessing.Process(target=_read_config,
                                        args=(argv, config, current_name,
                                              queue))
        child.start()
        child.join()  # Block.
        if child.exitcode != 0:
            log.error(
                'sphinx-build failed for branch/tag while reading config: %s',
                current_name)
            raise HandledError

    config = queue.get()
    return config
コード例 #3
0
def push(ctx, config, rel_source, dest_branch, rel_dest, **options):
    """Build locally and then push to remote branch.

    First the build sub command is invoked which takes care of building all versions of your documentation in a
    temporary directory. If that succeeds then all built documents will be pushed to a remote branch.

    REL_SOURCE is the path to the docs directory relative to the git root. If the source directory has moved around
    between git tags you can specify additional directories.

    DEST_BRANCH is the branch name where generated docs will be committed to. The branch will then be pushed to remote.
    If there is a race condition with another job pushing to remote the docs will be re-generated and pushed again.

    REL_DEST is the path to the directory that will hold all generated docs for all versions relative to the git roof of
    DEST_BRANCH.

    To pass options to sphinx-build (run for every branch/tag) use a double hyphen
    (e.g. push docs gh-pages . -- -D setting=value).
    \f

    :param click.core.Context ctx: Click context.
    :param sphinxcontrib.versioning.lib.Config config: Runtime configuration.
    :param tuple rel_source: Possible relative paths (to git root) of Sphinx directory containing conf.py (e.g. docs).
    :param str dest_branch: Branch to clone and push to.
    :param str rel_dest: Relative path (to git root) to write generated docs to.
    :param dict options: Additional Click options.
    """
    if 'pre' in config:
        config.pop('pre')(rel_source)
        config.update({k: v for k, v in options.items() if v})
        if config.local_conf:
            config.update(read_local_conf(config.local_conf), ignore_set=True)
    if NO_EXECUTE:
        raise RuntimeError(config, rel_source, dest_branch, rel_dest)
    log = logging.getLogger(__name__)

    # Clone, build, push.
    for _ in range(PUSH_RETRIES):
        with TempDir() as temp_dir:
            log.info('Cloning %s into temporary directory...', dest_branch)
            try:
                clone(config.git_root, temp_dir, config.push_remote,
                      dest_branch, rel_dest, config.grm_exclude)
            except GitError as exc:
                log.error(exc.message)
                log.error(exc.output)
                raise HandledError

            log.info('Building docs...')
            ctx.invoke(build,
                       rel_source=rel_source,
                       destination=os.path.join(temp_dir, rel_dest))
            versions = config.pop('versions')

            log.info('Attempting to push to branch %s on remote repository.',
                     dest_branch)
            try:
                if commit_and_push(temp_dir, config.push_remote, versions):
                    return
            except GitError as exc:
                log.error(exc.message)
                log.error(exc.output)
                raise HandledError
        log.warning(
            'Failed to push to remote repository. Retrying in %d seconds...',
            PUSH_SLEEP)
        time.sleep(PUSH_SLEEP)

    # Failed if this is reached.
    log.error('Ran out of retries, giving up.')
    raise HandledError
コード例 #4
0
def _build(argv, config, versions, remote, is_root):
    """Build Sphinx docs via multiprocessing for isolation.

    :param tuple argv: Arguments to pass to Sphinx.
    :param sphinxcontrib.versioning.lib.Config config: Runtime configuration.
    :param sphinxcontrib.versioning.versions.Versions versions: Versions class instance.
    :param dict remote: The remote of the current version being built.
    :param bool is_root: Is this build in the web root?
    """
    # Patch.
    application.Config = ConfigInject
    if config.show_banner:
        EventHandlers.BANNER_GREATEST_TAG = config.banner_greatest_tag
        EventHandlers.BANNER_MAIN_VERSION = config.banner_main_ref
        EventHandlers.BANNER_RECENT_TAG = config.banner_recent_tag
        EventHandlers.SHOW_BANNER = True
    EventHandlers.CURRENT_VERSION = remote['name']
    EventHandlers.IS_ROOT = is_root
    EventHandlers.VERSIONS = versions
    SC_VERSIONING_VERSIONS[:] = [
        p for r in versions.remotes for p in sorted(r.items())
        if p[0] not in ('sha', 'date')
    ]

    # Update argv.
    if config.verbose > 1:
        argv += ('-v', ) * (config.verbose - 1)
    if config.no_colors:
        argv += ('-N', )
    if config.overflow:
        argv += config.overflow

    log = logging.getLogger(__name__)
    with tempfile.NamedTemporaryFile(delete=False) as requirements:
        log.info('Freezing requirements to %s', requirements.name)
        requirements.write(
            subprocess.check_output([sys.executable, '-m', 'pip', 'freeze']), )

    with TempDir() as temp_dir:
        log.info('Creating a venv at %s...', temp_dir)
        try:
            venv.EnvBuilder(with_pip=True).create(temp_dir)
            #subprocess.check_output([sys.executable, '-m', 'venv', temp_dir])
        except subprocess.CalledProcessError as exc:
            raise Exception(exc.stdout)

        venv_python = os.path.join(temp_dir, 'bin', 'python')

        log.info('Installing the frozen requirements...')
        subprocess.run(
            [venv_python, '-m', 'pip', 'install', '-r', requirements.name],
            check=True,
        )

        # Install the project before building.
        # This works because _build runs in a subprocess
        # (which is not affected by other subprocesses or their imports);
        # however, the package at pkg_path MUST NOT be imported yet
        # (because the already-loaded module will be used instead).
        rel_path = remote['conf_rel_path']
        while not argv[0].endswith(rel_path):
            assert rel_path != '/'
            rel_path = os.path.dirname(rel_path)
        assert argv[0].endswith(rel_path), (argv[0], rel_path)
        pkg_path = argv[0][:-len(rel_path)]
        log.info('Installing the package from %s...', pkg_path)
        subprocess.run(
            [venv_python, '-m', 'pip', 'install', pkg_path],
            check=True,
        )

        # Build.
        try:
            subprocess.run([venv_python, '-m', 'sphinx', *argv], check=True)
        except subprocess.CalledProcessError:
            raise SphinxError