Example #1
0
def api(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        app.extend_queue(apiarg_tasks(c))
Example #2
0
def api(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        app.extend_queue(apiarg_tasks(c))
Example #3
0
def source(args):
    args.builder = 'html'
    conf = fetch_config(args)

    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        sphinx_content_preperation(app, conf)
Example #4
0
def source(args):
    args.builder = 'html'
    conf = fetch_config(args)

    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        sphinx_content_preperation(app, conf)
Example #5
0
def robots(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        app.pool = 'serial'
        app.extend_queue(robots_txt_tasks(c))
Example #6
0
def robots(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        app.pool = 'serial'
        app.extend_queue(robots_txt_tasks(c))
Example #7
0
def main(args):
    """
    Removes build artifacts from ``build/`` directory.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    to_remove = set()

    if c.runstate.git_branch is not None:
        to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output))

    if c.runstate.builder != []:
        for edition, language, builder in get_builder_jobs(c):
            builder_path = resolve_builder_path(builder, edition, language, c)
            builder_path = os.path.join(c.paths.projectroot, c.paths.branch_output, builder_path)

            to_remove.add(builder_path)
            dirpath, base = os.path.split(builder_path)
            to_remove.add(os.path.join(dirpath, 'doctrees-' + base))

            m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})'
            logger.debug(m.format(builder, c.git.branches.current, edition, language))

    if c.runstate.days_to_save is not None:
        published_branches = ['docs-tools', 'archive', 'public', 'primer', c.git.branches.current]
        published_branches.extend(c.git.branches.published)

        for build in os.listdir(os.path.join(c.paths.projectroot, c.paths.output)):
            build = os.path.join(c.paths.projectroot, c.paths.output, build)
            branch = os.path.split(build)[1]

            if branch in published_branches:
                continue
            elif not os.path.isdir(build):
                continue
            elif os.stat(build).st_mtime > c.runstate.days_to_save:
                to_remove.add(build)
                to_remove.add(os.path.join(c.paths.projectroot, c.paths.output, 'public', branch))
                logger.debug('removed stale artifacts: "{0}" and "build/public/{0}"'.format(branch))

    for fn in to_remove:
        if os.path.isdir(fn):
            job = shutil.rmtree
        else:
            job = os.remove

        t = app.add('task')
        t.job = job
        t.args = fn
        m = 'removing artifact: {0}'.format(fn)
        t.description = m
        logger.critical(m)

    app.run()
Example #8
0
def create_branch(args):
    """
    Takes a single branch name and (if necessary) creates a new branch. Then,
    populates the ``build/<branch>`` directory for the new branch using either
    the parent branch or ``master``. Safe to run multiple times (after a rebase)
    to update the build cache from master.

    Also calls :method:`~giza.operations.build_env.fix_build_environment()` to
    tweak the new build output to update hashes and on-disk copies of the
    environment to prevent unnecessary full-rebuilds from sphinx.
    """

    conf = fetch_config(args)

    g = GitRepo(conf.paths.projectroot)

    branch = conf.runstate.git_branch
    base_branch = g.current_branch()

    if base_branch == branch:
        base_branch = 'master'
        logger.warning(
            'seeding build data for branch "{0}" from "master"'.format(branch))

    branch_builddir = os.path.join(conf.paths.projectroot, conf.paths.output,
                                   branch)

    base_builddir = os.path.join(conf.paths.projectroot, conf.paths.output,
                                 base_branch)

    if g.branch_exists(branch):
        logger.info('checking out branch "{0}"'.format(branch))
    else:
        logger.info(
            'creating and checking out a branch named "{0}"'.format(branch))

    g.checkout_branch(branch)

    cmd = "rsync -r --times --checksum {0}/ {1}".format(
        base_builddir, branch_builddir)
    logger.info('seeding build directory for "{0}" from "{1}"'.format(
        branch, base_branch))

    try:
        subprocess.check_call(args=cmd.split())
        logger.info('branch creation complete.')
    except subprocess.CalledProcessError:
        logger.error(cmd)

    # get a new config here for the new branch
    conf = fetch_config(args)
    builders = get_existing_builders(conf)

    with BuildApp.new(pool_type='process',
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        app.exted_queue(fix_build_env_tasks(builders, conf))
Example #9
0
def steps(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        if c.runstate.clean_generated is True:
            app.extend_queue(step_clean(c))
        else:
            app.extend_queue(step_tasks(c))
Example #10
0
def steps(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        if c.runstate.clean_generated is True:
            app.extend_queue(step_clean(c))
        else:
            app.extend_queue(step_tasks(c))
Example #11
0
def redirects(args):
    c = fetch_config(args)

    if args.dry_run is True:
        print(''.join(make_redirect(c)))
    else:
        with BuildApp.new(pool_type=c.runstate.runner,
                          pool_size=c.runstate.pool_size,
                          force=c.runstate.force).context() as app:
            app.extend_queue(redirect_tasks(c))
Example #12
0
def redirects(args):
    c = fetch_config(args)

    if args.dry_run is True:
        print(''.join(make_redirect(c)))
    else:
        with BuildApp.new(pool_type=c.runstate.runner,
                          pool_size=c.runstate.pool_size,
                          force=c.runstate.force).context() as app:
            app.extend_queue(redirect_tasks(c))
Example #13
0
def extract(args):
    conf = fetch_config(args)

    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        path = fetch_package(conf.runstate._path, conf)
        extract_package_at_root(path, conf)

        builders = get_existing_builders(conf)
        app.extend_queue(fix_build_env_tasks(builders, conf))
Example #14
0
def images(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:

        if c.runstate.clean_generated is True:
            app.extend_queue(image_clean(c))
        else:
            for (_, (bconf, sconf)) in get_restricted_builder_jobs(c):
                app.extend_queue(image_tasks(bconf, sconf))
Example #15
0
def create_branch(args):
    """
    Takes a single branch name and (if necessary) creates a new branch. Then,
    populates the ``build/<branch>`` directory for the new branch using either
    the parent branch or ``master``. Safe to run multiple times (after a rebase)
    to update the build cache from master.

    Also calls :method:`~giza.operations.build_env.fix_build_environment()` to
    tweak the new build output to update hashes and on-disk copies of the
    environment to prevent unnecessary full-rebuilds from sphinx.
    """

    conf = fetch_config(args)

    g = GitRepo(conf.paths.projectroot)

    branch = conf.runstate.git_branch
    base_branch = g.current_branch()

    if base_branch == branch:
        base_branch = 'master'
        logger.warning('seeding build data for branch "{0}" from "master"'.format(branch))

    branch_builddir = os.path.join(conf.paths.projectroot,
                                   conf.paths.output, branch)

    base_builddir = os.path.join(conf.paths.projectroot,
                                 conf.paths.output, base_branch)

    if g.branch_exists(branch):
        logger.info('checking out branch "{0}"'.format(branch))
    else:
        logger.info('creating and checking out a branch named "{0}"'.format(branch))

    g.checkout_branch(branch)

    cmd = "rsync -r --times --checksum {0}/ {1}".format(base_builddir, branch_builddir)
    logger.info('seeding build directory for "{0}" from "{1}"'.format(branch, base_branch))

    try:
        subprocess.check_call(args=cmd.split())
        logger.info('branch creation complete.')
    except subprocess.CalledProcessError:
        logger.error(cmd)

    # get a new config here for the new branch
    conf = fetch_config(args)
    builders = get_existing_builders(conf)

    with BuildApp.new(pool_type='process',
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        app.exted_queue(fix_build_env_tasks(builders, conf))
Example #16
0
def images(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:

        if c.runstate.clean_generated is True:
            app.extend_queue(image_clean(c))
        else:
            for (_, (bconf, sconf)) in get_restricted_builder_jobs(c):
                app.extend_queue(image_tasks(bconf, sconf))
Example #17
0
def sphinx(args):
    if args.runner == 'serial':
        args.serial_sphinx = True

    conf = fetch_config(args)
    logger.warning('not for production use: this expects that content generation is complete.')

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       pool_size=conf.runstate.pool_size,
                       force=conf.runstate.force)

    r = sphinx_builder_tasks(app, conf)

    raise SystemExit(r)
Example #18
0
def main(args):
    """
    Uploads all build artifacts to the production environment. Does not build or
    render artifacts.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    deploy_tasks(c, app)

    if c.runstate.dry_run is False:
        app.run()
Example #19
0
def main(args):
    """
    Uploads all build artifacts to the production environment. Does not build or
    render artifacts.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    deploy_tasks(c, app)

    if c.runstate.dry_run is False:
        app.run()
Example #20
0
def sphinx(args):
    if args.runner == 'serial':
        args.serial_sphinx = True

    conf = fetch_config(args)
    logger.warning(
        'not for production use: this expects that content generation is complete.'
    )

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       pool_size=conf.runstate.pool_size,
                       force=conf.runstate.force)

    r = sphinx_builder_tasks(app, conf)

    raise SystemExit(r)
Example #21
0
def main(args):
    """
    Use Sphinx to generate build artifacts. Can generate artifacts for multiple
    output types, content editions and translations.
    """
    conf = fetch_config(args)

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       pool_size=conf.runstate.pool_size,
                       force=conf.runstate.force)

    with Timer("full sphinx build process"):
        # In general we try to avoid passing the "app" object between functions
        # and mutating it at too many places in the stack (although in earlier
        # versions this was the primary idiom). This call is a noted exception,
        # and makes it possible to run portions of this process in separate
        # targets.

        sphinx_publication(conf, app)
def main(args):
    """
    Use Sphinx to generate build artifacts. Can generate artifacts for multiple
    output types, content editions and translations.
    """
    conf = fetch_config(args)

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       pool_size=conf.runstate.pool_size,
                       force=conf.runstate.force)

    with Timer("full sphinx build process"):
        # In general we try to avoid passing the "app" object between functions
        # and mutating it at too many places in the stack (although in earlier
        # versions this was the primary idiom). This call is a noted exception,
        # and makes it possible to run portions of this process in separate
        # targets.

        sphinx_publication(conf, app)
Example #23
0
def publish_and_deploy(args):
    """
    Combines the work of ``giza sphinx`` and ``giza deploy``, to produce build
    artifacts and then upload those artifacts to the servers.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    sphinx_ret = sphinx_publication(c, app)
    if sphinx_ret == 0 or c.runstate.force is True:
        deploy_tasks(c, app)

        if c.runstate.dry_run is False:
            app.run()
    else:
        logger.warning(sphinx_ret + ' sphinx build(s) failed, and build not forced. not deploying.')
Example #24
0
def test_build_site(args):
    args.languages_to_build = args.editions_to_build = []
    args.builder = 'html'

    conf = fetch_config(args)

    safe_create_directory('build')
    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        try:
            sphinx_publication(conf, args, app)
        except:
            sphinx_publication(conf, args, app)
            if os.path.exists('doc-tools'):
                shutil.rmtree('docs-tools')

    logger.info('bootstrapped makefile system')

    logger.info('updated project skeleton in current directory.')
Example #25
0
def publish_and_deploy(args):
    """
    Combines the work of ``giza sphinx`` and ``giza deploy``, to produce build
    artifacts and then upload those artifacts to the servers.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    sphinx_ret = sphinx_publication(c, app)
    if sphinx_ret == 0 or c.runstate.force is True:
        deploy_tasks(c, app)

        if c.runstate.dry_run is False:
            app.run()
    else:
        logger.warning(
            sphinx_ret +
            ' sphinx build(s) failed, and build not forced. not deploying.')
Example #26
0
def run_make_operations(targets, conf):
    """
    :param list targets: A list of tuples in the form of ``(<action>, [option,
         option])`` that define build targets.

    :param Configuration conf: The top level configuration object.

    Parses the ``targets`` list and runs tasks defined, including all specified
    sphinx targets, all ``push`` deployment targets, and will create the ``env``
    packages. Noteworthy behavior:

    - The order of options *except* for the action in the first option is not
      important.

    - If you run ``push`` target with the ``deploy`` option
      (i.e. ``push-deploy`` or ``push-<edition>-deploy``), ``giza`` will *not*
      run the ``publish`` Sphinx build.

    - This interface assumes that all deployment targets (defined in each
      project begin with ``push-`` or ``stage-``.) If you have a project with
      different deployment targets, you will need to call ``giza deploy``
      directly.

    - The ``env`` cache targets take the same options as the Sphinx builders and
      package the environment for only those builders. If you specify ``env``
      after a Sphinx target, ``giza`` will build the cache for only that
      package.
    """

    sphinx_opts = {"languages": set(),
                   "editions": set(),
                   "builders": set()}
    push_opts = {"targets": set(),
                 "type": None}
    packaging_opts = {}

    sphinx_builders = available_sphinx_builders()

    if 'push' in conf.system.files.data:
        deploy_configs = dict((item['target'], item) for item in conf.system.files.data.push)
    else:
        deploy_configs = []

    tasks = []
    for action, options in targets:
        if action in sphinx_builders:
            tasks.append(sphinx_opts)

            add_sphinx_build_options(sphinx_opts, action, options, conf)
        elif action in ('stage', 'push'):
            tasks.append(push_opts)
            push_opts['type'] = action

            if 'deploy' not in options:
                sphinx_opts['builders'].add('publish')
                tasks.append(sphinx_opts)
                add_sphinx_build_options(sphinx_opts, action, options, conf)
                conf.runstate.fast = False

            if action in deploy_configs:
                push_opts['targets'].add(action)

            for build_option in options:
                deploy_target_name = '-'.join((action, build_option))

                if build_option in deploy_configs:
                    push_opts['targets'].add(build_option)
                elif deploy_target_name in deploy_configs:
                    push_opts['targets'].add(deploy_target_name)
        elif action.startswith('env'):
            if len(packaging_opts) > 0:
                packaging_opts = copy.copy(sphinx_opts)

            tasks.append(packaging_opts)
            add_sphinx_build_options(packaging_opts, False, options, conf)
        else:
            logger.error('target: {0} not defined in the make interface'.format(action))

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       force=conf.runstate.force,
                       pool_size=conf.runstate.pool_size)

    if sphinx_opts in tasks:
        conf.runstate.languages_to_build = list(sphinx_opts['languages'])
        conf.runstate.editions_to_build = list(sphinx_opts['editions'])
        conf.runstate.builder = list(sphinx_opts['builders'])

        if 'publish' in conf.runstate.builder:
            conf.runstate.fast = False

        derive_command('sphinx', conf)

        sphinx_publication(conf, app)

    if push_opts in tasks:
        if len(push_opts['targets']) == 0:
            for lang, edition in itertools.product(conf.runstate.languages_to_build,
                                                   conf.runstate.editions_to_build):
                push_target_name = [push_opts['type']]
                for opt in (edition, lang):
                    if opt is not None:
                        push_target_name.append(opt)
                push_target_name = '-'.join(push_target_name)
                push_opts['targets'].add(push_target_name)

        conf.runstate.push_targets = list(push_opts['targets'])
        deploy_tasks(conf, app)
        derive_command('deploy', conf)

    if packaging_opts in tasks:
        derive_command('env', conf)

        app.add(Task(job=env_package_worker,
                     args=(conf.runstate, conf),
                     target=True,
                     dependency=None))

    if len(app.queue) >= 1:
        app.run()
Example #27
0
def run_make_operations(targets, conf):
    """
    :param list targets: A list of tuples in the form of ``(<action>, [option,
         option])`` that define build targets.

    :param Configuration conf: The top level configuration object.

    Parses the ``targets`` list and runs tasks defined, including all specified
    sphinx targets, all ``push`` deployment targets, and will create the ``env``
    packages. Noteworthy behavior:

    - The order of options *except* for the action in the first option is not
      important.

    - If you run ``push`` target with the ``deploy`` option
      (i.e. ``push-deploy`` or ``push-<edition>-deploy``), ``giza`` will *not*
      run the ``publish`` Sphinx build.

    - This interface assumes that all deployment targets (defined in each
      project begin with ``push-`` or ``stage-``.) If you have a project with
      different deployment targets, you will need to call ``giza deploy``
      directly.

    - The ``env`` cache targets take the same options as the Sphinx builders and
      package the environment for only those builders. If you specify ``env``
      after a Sphinx target, ``giza`` will build the cache for only that
      package.
    """

    sphinx_opts = {"languages": set(), "editions": set(), "builders": set()}
    push_opts = {"targets": set(), "type": None}
    packaging_opts = {}

    sphinx_builders = available_sphinx_builders()

    if 'push' in conf.system.files.data:
        deploy_configs = dict(
            (item['target'], item) for item in conf.system.files.data.push)
    else:
        deploy_configs = []

    tasks = []
    for action, options in targets:
        if action in sphinx_builders:
            tasks.append(sphinx_opts)

            add_sphinx_build_options(sphinx_opts, action, options, conf)
        elif action in ('stage', 'push'):
            tasks.append(push_opts)
            push_opts['type'] = action

            if 'deploy' not in options:
                sphinx_opts['builders'].add('publish')
                tasks.append(sphinx_opts)
                add_sphinx_build_options(sphinx_opts, action, options, conf)
                conf.runstate.fast = False

            if action in deploy_configs:
                push_opts['targets'].add(action)

            for build_option in options:
                deploy_target_name = '-'.join((action, build_option))

                if build_option in deploy_configs:
                    push_opts['targets'].add(build_option)
                elif deploy_target_name in deploy_configs:
                    push_opts['targets'].add(deploy_target_name)
        elif action.startswith('env'):
            if len(packaging_opts) > 0:
                packaging_opts = copy.copy(sphinx_opts)

            tasks.append(packaging_opts)
            add_sphinx_build_options(packaging_opts, False, options, conf)
        else:
            logger.error(
                'target: {0} not defined in the make interface'.format(action))

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       force=conf.runstate.force,
                       pool_size=conf.runstate.pool_size)

    if sphinx_opts in tasks:
        conf.runstate.languages_to_build = list(sphinx_opts['languages'])
        conf.runstate.editions_to_build = list(sphinx_opts['editions'])
        conf.runstate.builder = list(sphinx_opts['builders'])

        if 'publish' in conf.runstate.builder:
            conf.runstate.fast = False

        derive_command('sphinx', conf)

        sphinx_publication(conf, app)

    if push_opts in tasks:
        if len(push_opts['targets']) == 0:
            for lang, edition in itertools.product(
                    conf.runstate.languages_to_build,
                    conf.runstate.editions_to_build):
                push_target_name = [push_opts['type']]
                for opt in (edition, lang):
                    if opt is not None:
                        push_target_name.append(opt)
                push_target_name = '-'.join(push_target_name)
                push_opts['targets'].add(push_target_name)

        conf.runstate.push_targets = list(push_opts['targets'])
        deploy_tasks(conf, app)
        derive_command('deploy', conf)

    if packaging_opts in tasks:
        derive_command('env', conf)

        app.add(
            Task(job=env_package_worker,
                 args=(conf.runstate, conf),
                 target=True,
                 dependency=None))

    if len(app.queue) >= 1:
        app.run()
Example #28
0
def main(args):
    """
    Removes build artifacts from ``build/`` directory.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    to_remove = set()

    if c.runstate.git_branch is not None:
        to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output))

    if c.runstate.builder != []:
        for edition, language, builder in get_builder_jobs(c):
            builder_path = resolve_builder_path(builder, edition, language, c)
            builder_path = os.path.join(c.paths.projectroot,
                                        c.paths.branch_output, builder_path)

            to_remove.add(builder_path)
            dirpath, base = os.path.split(builder_path)
            to_remove.add(os.path.join(dirpath, 'doctrees-' + base))

            m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})'
            logger.debug(
                m.format(builder, c.git.branches.current, edition, language))

    if c.runstate.days_to_save is not None:
        published_branches = [
            'docs-tools', 'archive', 'public', 'primer', c.git.branches.current
        ]
        published_branches.extend(c.git.branches.published)

        for build in os.listdir(
                os.path.join(c.paths.projectroot, c.paths.output)):
            build = os.path.join(c.paths.projectroot, c.paths.output, build)
            branch = os.path.split(build)[1]

            if branch in published_branches:
                continue
            elif not os.path.isdir(build):
                continue
            elif os.stat(build).st_mtime > c.runstate.days_to_save:
                to_remove.add(build)
                to_remove.add(
                    os.path.join(c.paths.projectroot, c.paths.output, 'public',
                                 branch))
                logger.debug(
                    'removed stale artifacts: "{0}" and "build/public/{0}"'.
                    format(branch))

    for fn in to_remove:
        if os.path.isdir(fn):
            job = shutil.rmtree
        else:
            job = os.remove

        t = app.add('task')
        t.job = job
        t.args = fn
        m = 'removing artifact: {0}'.format(fn)
        t.description = m
        logger.critical(m)

    app.run()
Example #29
0
 def setUp(self):
     self.app = BuildApp.new(pool_type=random.choice(['serial', 'thread']),
                             pool_size=None, force=None)
     self.c = None