Ejemplo n.º 1
0
    def test_finalizers_nested(self):
        t = self.Task(job=sum, args=((1, 2, 3), 0))

        self.assertEqual(t.finalizers, [])

        t.finalizers = [
            Task(job=sum, args=((4, 5, 6, i), 0)) for i in range(10)
        ]
        self.assertEqual(len(t.finalizers), 10)

        for task in t.finalizers:
            task.finalizers.extend(
                [Task(job=sum, args=((4, 5, 6, i), 0)) for i in range(10)])
            self.assertEqual(len(task.finalizers), 10)

        task_result = t.run()
        self.assertEqual(task_result, 6)
        results = [task_result]
        results.extend(t.finalize())

        self.assertEqual(len(results), 111)
        self.assertEqual(results[0], 6)
        self.assertEqual(results[1], 15)

        for result in results:
            self.assertIsInstance(result, numbers.Number)
            self.assertTrue(result >= 6)
            self.assertTrue(result <= 24)
Ejemplo n.º 2
0
def extract_tasks(conf):
    extracts = ExtractDataCache(conf.system.content.extracts.sources, conf)
    extracts.create_output_dir()

    tasks = []
    for dep_fn, extract in extracts.content_iter():
        t = Task(job=write_extract_file,
                 args=(extract, extract.target),
                 description="generating extract file: " + extract.target,
                 target=extract.target,
                 dependency=dep_fn)
        tasks.append(t)

        include_statement = get_include_statement(extract.target_project_path)

        for verb, adjc, files in [(prepend_to_file, 'prepend',
                                   extract.prepend),
                                  (append_to_file, 'append', extract.append)]:
            # have to run appends and prepends always, because the rsync that
            # populates build/<branch>/source should and does overwrite these
            # files on every source generation step. None in the dep list does this.
            for fn in files:
                msg = "{} extract include for '{}' to '{}'".format(
                    adjc, extract.target, fn)
                t = Task(job=verb,
                         args=(fn, include_statement),
                         target=fn,
                         dependency=[None, dep_fn],
                         description=msg)
                tasks.append(t)

    logger.debug('added tasks for {0} extract generation tasks'.format(
        len(tasks)))

    return tasks
Ejemplo n.º 3
0
    def test_finalizers_nested(self):
        t = Task(job=sum,
                 args=((1, 2, 3), 0))

        self.assertEqual(t.finalizers, [])

        t.finalizers = [
            Task(job=sum,
                 args=((4, 5, 6, i), 0))
            for i in range(10)
        ]
        self.assertEqual(len(t.finalizers), 10)

        for task in t.finalizers:
            task.finalizers.extend([
                Task(job=sum,
                     args=((4, 5, 6, i), 0))
                for i in range(10)
            ])
            self.assertEqual(len(task.finalizers), 10)

        self.app.add(t)
        results = self.app.run()
        self.assertEqual(len(results), 111)
        self.assertEqual(results[0], 6)
        self.assertEqual(results[1], 15)

        for result in results:
            self.assertIsInstance(result, numbers.Number)
            self.assertTrue(result >= 6)
            self.assertTrue(result <= 24)
Ejemplo n.º 4
0
 def test_add_existing_task_object(self):
     self.assertEqual(self.app.queue, [])
     t = Task()
     self.app.add(t)
     self.assertIs(t, self.app.queue[0])
     self.assertIsNot(t, Task())
     self.assertIsNot(Task(), self.app.queue[0])
Ejemplo n.º 5
0
    def test_single_runner_task(self):
        self.assertEqual(self.app.queue, [])
        self.assertEqual(self.app.results, [])

        t = Task()
        t.job = sum
        t.description = 'test task'
        t.args = [[1, 2], 0]

        self.app.add(t)
        self.app.run(t)
        self.assertEqual(self.app.results[0], 3)
Ejemplo n.º 6
0
def option_tasks(conf):
    o = OptionDataCache(conf.system.content.options.sources, conf)
    o.create_output_dir()

    tasks = []
    for dep_fn, option in o.content_iter():
        program = option.program.replace(' ', '-')

        if option.has_field('filename'):
            option_name = option.filename
        else:
            option_name = option.name

        output_fn = os.path.join(
            conf.system.content.options.fn_prefix, ''.join(
                (option.directive, '-', program, '-', option_name + '.rst')))

        t = Task(job=write_options,
                 args=(option, output_fn, conf),
                 description='generating option file "{0}" from "{1}"'.format(
                     output_fn, dep_fn),
                 target=output_fn,
                 dependency=[dep_fn])
        tasks.append(t)

    logger.debug('added tasks for {0} option generation tasks'.format(
        len(tasks)))
    return tasks
Ejemplo n.º 7
0
 def setUp(self):
     self.c = Configuration()
     self.c.runstate = RuntimeStateConfig()
     self.task = Task()
     self.task.job = sum
     self.Task = Task
     self.task.conf = self.c
Ejemplo n.º 8
0
 def test_conf_object_consistent_in_new_task(self):
     self.assertEqual(self.app.queue, [])
     t = Task()
     self.assertIsNone(t.conf)
     self.app.add(t)
     self.assertIsNotNone(t.conf)
     self.assertIs(self.c, self.app.queue[0].conf)
     self.assertIs(self.c, t.conf)
Ejemplo n.º 9
0
    def test_finalizers_simple(self):
        t = Task(job=sum,
                 args=((1, 2, 3), 0))

        self.assertEqual(t.finalizers, [])

        t.finalizers = [
            Task(job=sum,
                 args=((4, 5, 6, i), 0))
            for i in range(10)
        ]

        self.app.add(t)
        results = self.app.run()
        self.assertEqual(len(results), 11)
        self.assertEqual(results,
                         [6, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24])
Ejemplo n.º 10
0
def step_clean(conf):
    return [
        Task(job=shutil.rmtree,
             args=[conf.system.content.steps.output_dir],
             target=True,
             dependency=[conf.system.content.steps.output_dir],
             description='removing {0}'.format(
                 conf.system.content.steps.output_dir))
    ]
Ejemplo n.º 11
0
    def add(self, task=None, conf=None):
        """
        Adds a new :class:`~giza.app.BuildApp()` or :class:`~giza.task.Task()`
        to the :class:`~giza.app.BuildApp()` object.

        :param string,Task,BuildApp task: Optional. If not specified,
           :meth:`~giza.app.BuildApp.add()` creates and returns a new
           :class:`~giza.task.Task()` object. You can pass the string ``task``
           or the class :class:`~giza.task.Task` to explicitly create a new
           Task, or pass an existing :class:`~giza.task.Task()` instance to add
           that task to the :class:`~giza.app.BuildApp()` instance. You can
           also pass the string ``app`` or the :class:`~giza.app.BuildApp`
           class, to create and add new :class:`~giza.app.BuildApp()`: pass an
           existing :class:`~giza.app.BuildApp()` instance to add that that
           operation grouping to the queue.

        :returns: A reference to a :class:`~giza.app.BuildApp()` or
           :class:`~giza.task.Task()` object in the :class:`~giza.app.BuildApp()`

        :raises: :exc:`TypeError` if the ``task`` argument is invalid.

        """
        if conf is not None:
            self.conf = conf

        if task is None or task in (Task, 'task'):
            t = Task()
            t.conf = self.conf
            t.force = self.force
            self.queue.append(t)
            return t
        elif task in (MapTask, 'map'):
            t = MapTask()
            t.conf = self.conf
            t.force = self.force
            self.queue.append(t)
            return t
        elif task in (BuildApp, 'app'):
            t = self.sub_app()
            self.queue.append(t)
            return t
        else:
            if isinstance(task, Task):
                task.force = self.force
                if task.conf is None:
                    task.conf = self.conf

                self.queue.append(task)
                return task
            elif isinstance(task, BuildApp):
                task.root_app = False
                task.defualt_pool = self.default_pool
                task.force = self.force
                task.pool = self.pool
                self.queue.append(task)
                return task
            else:
                raise TypeError('invalid task type')
Ejemplo n.º 12
0
def sphinx_content_preperation(app, conf):
    # Download embedded git repositories and then run migrations before doing
    # anything else.
    with app.context() as asset_app:
        asset_app.extend_queue(assets_tasks(conf))

    with app.context() as migration_app:
        migration_app.extend_queue(migration_tasks(conf))

    # Copy all source to the ``build/<branch>/source`` directory.
    with Timer('migrating source to build'):
        with app.context(randomize=True) as source_app:
            for (_, (build_config,
                     sconf)) in get_restricted_builder_jobs(conf):
                source_app.extend_queue(source_tasks(build_config, sconf))

    # load all generated content and create tasks.
    with Timer('loading generated content'):
        for (_, (build_config, sconf)) in get_restricted_builder_jobs(conf):
            for content, func in build_config.system.content.task_generators:
                app.add(Task(job=func, args=[build_config], target=True))

        app.randomize = True
        results = app.run()
        app.reset()

        for task_group in results:
            app.extend_queue(task_group)

    for ((edition, language, builder),
         (build_config, sconf)) in get_restricted_builder_jobs(conf):
        # these functions all return tasks
        app.extend_queue(image_tasks(build_config, sconf))
        for content_generator in (robots_txt_tasks, intersphinx_tasks,
                                  table_tasks, hash_tasks):
            app.extend_queue(content_generator(build_config))

        dependency_refresh_app = app.add('app')
        dependency_refresh_app.extend_queue(
            refresh_dependency_tasks(build_config))

        # once the source is prepared, we dump a dict with md5 hashes of all
        # files, so we can do better dependency resolution the next time.
        app.extend_queue(dump_file_hash_tasks(build_config))

        # we transfer images to the latex directory directly because offset
        # images are included using raw latex, and Sphinx doesn't know how
        # to copy images in this case.
        app.extend_queue(latex_image_transfer_tasks(build_config, sconf))

        msg = 'added source tasks for ({0}, {1}, {2}) in {3}'
        logger.debug(
            msg.format(builder, language, edition,
                       build_config.paths.branch_source))
Ejemplo n.º 13
0
def option_clean(conf):
    register_options(conf)

    tasks = []
    for fn in conf.system.options.sources:
        t = Task(job=verbose_remove,
                 args=[fn],
                 target=fn,
                 dependency=None,
                 description='removing {0}'.format(fn))

        tasks.append(t)

    return tasks
Ejemplo n.º 14
0
def glossary_tasks(conf):
    terms = GlossaryDataCache(conf.system.content.glossary.sources, conf)
    terms.create_output_dir()

    tasks = []
    for fn, glossary_file in terms.file_iter():
        tasks.append(
            Task(job=write_glossary,
                 args=(glossary_file, glossary_file.target(fn)),
                 description='generate glossary for: ' + fn,
                 target=glossary_file.target(fn),
                 dependency=fn))

    logger.debug('add {0} glossary tasks'.format(len(tasks)))
    return tasks
Ejemplo n.º 15
0
    def test_finalizers_simple(self):
        t = self.Task(job=sum, args=((1, 2, 3), 0))

        self.assertEqual(t.finalizers, [])

        t.finalizers = [
            Task(job=sum, args=((4, 5, 6, i), 0)) for i in range(10)
        ]

        self.assertEqual(len(t.finalizers), 10)
        self.assertEqual(t.run(), 6)
        self.assertEqual(len(t.finalizers), 10)
        finals = t.finalize()
        self.assertEqual(len(finals), 10)
        self.assertEqual(len(t.finalizers), 10)
        self.assertEqual(finals, [15, 16, 17, 18, 19, 20, 21, 22, 23, 24])
Ejemplo n.º 16
0
def release_tasks(conf):
    rel = ReleaseDataCache(conf.system.content.releases.sources, conf)
    rel.create_output_dir()

    tasks = []
    for dep_fn, release in rel.content_iter():
        t = Task(job=write_release_file,
                 args=(release, release.target, conf),
                 description='generating release spec file: ' + release.target,
                 target=release.target,
                 dependency=dep_fn)
        tasks.append(t)

    logger.debug('added tasks for {0} release generation tasks'.format(
        len(tasks)))
    return tasks
Ejemplo n.º 17
0
def step_tasks(conf):
    s = StepDataCache(conf.system.content.steps.sources, conf)
    s.create_output_dir()

    tasks = []
    for fn, stepf in s.file_iter():
        t = Task(job=write_steps,
                 args=(stepf, stepf.target(fn), conf),
                 description='generate a stepfile for ' + fn,
                 target=stepf.target(fn),
                 dependency=fn)
        tasks.append(t)

    logger.debug('added tasks for {0} step generation tasks'.format(
        len(tasks)))
    return tasks
Ejemplo n.º 18
0
def sphinx_tasks(sconf, conf):
    # Projects that use the append functionality in extracts or similar content
    # generators will rebuild this task every time.

    deps = [os.path.join(conf.paths.projectroot, 'conf.py')]
    deps.extend(conf.system.files.get_configs('sphinx_local'))
    deps.extend(
        expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.branch_source),
            'txt'))

    return Task(job=run_sphinx,
                args=(sconf.builder, sconf, conf),
                target=os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output, sconf.builder),
                dependency=deps,
                description='building {0} with sphinx'.format(sconf.builder))
Ejemplo n.º 19
0
def example_tasks(conf):
    d = ExampleDataCache(conf.system.content.examples.sources, conf)
    d.create_output_dir()

    tasks = []
    for fn, exmpf in d.file_iter():
        out_fn = os.path.join(
            conf.system.content.examples.output_dir,
            conf.system.content.examples.get_basename(fn)) + '.rst'

        t = Task(job=write_full_example,
                 args=(exmpf.collection, exmpf.examples, out_fn),
                 description='generate an example for ' + fn,
                 target=out_fn,
                 dependency=fn)
        tasks.append(t)

    logger.debug('added tasks for {0} example generation tasks'.format(
        len(tasks)))
    return tasks
Ejemplo n.º 20
0
def apiarg_tasks(conf):
    a = ApiArgDataCache(conf.system.content.apiargs.sources, conf)
    a.create_output_dir()

    tasks = []
    for dep_fn, apiargs in a.file_iter():
        basename = conf.system.content.steps.get_basename(dep_fn)[2:]
        out_fn = os.path.join(conf.system.content.apiargs.output_dir,
                              basename) + '.rst'

        t = Task(job=write_apiargs,
                 args=(apiargs, out_fn),
                 target=out_fn,
                 dependency=dep_fn,
                 description="write apiarg table for: " + dep_fn)
        tasks.append(t)

    logger.debug('added tasks for {0} apiarg table generation tasks'.format(
        len(tasks)))

    return tasks
Ejemplo n.º 21
0
def run_make_operations(targets, conf):
    """
    :param list targets: A list of tuples in the form of ``(<action>, [option,
         option])`` that define build targets.

    :param Configuration conf: The top level configuration object.

    Parses the ``targets`` list and runs tasks defined, including all specified
    sphinx targets, all ``push`` deployment targets, and will create the ``env``
    packages. Noteworthy behavior:

    - The order of options *except* for the action in the first option is not
      important.

    - If you run ``push`` target with the ``deploy`` option
      (i.e. ``push-deploy`` or ``push-<edition>-deploy``), ``giza`` will *not*
      run the ``publish`` Sphinx build.

    - This interface assumes that all deployment targets (defined in each
      project begin with ``push-`` or ``stage-``.) If you have a project with
      different deployment targets, you will need to call ``giza deploy``
      directly.

    - The ``env`` cache targets take the same options as the Sphinx builders and
      package the environment for only those builders. If you specify ``env``
      after a Sphinx target, ``giza`` will build the cache for only that
      package.
    """

    sphinx_opts = {"languages": set(), "editions": set(), "builders": set()}
    push_opts = {"targets": set(), "type": None}
    packaging_opts = {}

    sphinx_builders = available_sphinx_builders()

    if 'push' in conf.system.files.data:
        deploy_configs = dict(
            (item['target'], item) for item in conf.system.files.data.push)
    else:
        deploy_configs = []

    tasks = []
    for action, options in targets:
        if action in sphinx_builders:
            tasks.append(sphinx_opts)

            add_sphinx_build_options(sphinx_opts, action, options, conf)
        elif action in ('stage', 'push'):
            tasks.append(push_opts)
            push_opts['type'] = action

            if 'deploy' not in options:
                sphinx_opts['builders'].add('publish')
                tasks.append(sphinx_opts)
                add_sphinx_build_options(sphinx_opts, action, options, conf)
                conf.runstate.fast = False

            if action in deploy_configs:
                push_opts['targets'].add(action)

            for build_option in options:
                deploy_target_name = '-'.join((action, build_option))

                if build_option in deploy_configs:
                    push_opts['targets'].add(build_option)
                elif deploy_target_name in deploy_configs:
                    push_opts['targets'].add(deploy_target_name)
        elif action.startswith('env'):
            if len(packaging_opts) > 0:
                packaging_opts = copy.copy(sphinx_opts)

            tasks.append(packaging_opts)
            add_sphinx_build_options(packaging_opts, False, options, conf)
        else:
            logger.error(
                'target: {0} not defined in the make interface'.format(action))

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       force=conf.runstate.force,
                       pool_size=conf.runstate.pool_size)

    if sphinx_opts in tasks:
        conf.runstate.languages_to_build = list(sphinx_opts['languages'])
        conf.runstate.editions_to_build = list(sphinx_opts['editions'])
        conf.runstate.builder = list(sphinx_opts['builders'])

        if 'publish' in conf.runstate.builder:
            conf.runstate.fast = False

        derive_command('sphinx', conf)

        sphinx_publication(conf, app)

    if push_opts in tasks:
        if len(push_opts['targets']) == 0:
            for lang, edition in itertools.product(
                    conf.runstate.languages_to_build,
                    conf.runstate.editions_to_build):
                push_target_name = [push_opts['type']]
                for opt in (edition, lang):
                    if opt is not None:
                        push_target_name.append(opt)
                push_target_name = '-'.join(push_target_name)
                push_opts['targets'].add(push_target_name)

        conf.runstate.push_targets = list(push_opts['targets'])
        deploy_tasks(conf, app)
        derive_command('deploy', conf)

    if packaging_opts in tasks:
        derive_command('env', conf)

        app.add(
            Task(job=env_package_worker,
                 args=(conf.runstate, conf),
                 target=True,
                 dependency=None))

    if len(app.queue) >= 1:
        app.run()
Ejemplo n.º 22
0
def toc_tasks(conf):
    tocs = TocDataCache(conf.system.content.toc.sources, conf)
    tocs.create_output_dir()

    tasks = []
    for dep_fn, toc_data in tocs.file_iter():
        deps = [dep_fn]
        if 'ref-toc-' in dep_fn:
            base_offset = 8
            is_ref = True
        elif 'ref-spec-' in dep_fn:
            base_offset = 9
            is_ref = True
        else:
            base_offset = 4
            is_ref = False

        fn_basename = os.path.basename(dep_fn)[base_offset:].replace(
            'yaml', 'rst')

        toc_items = toc_data.ordered_items()

        if toc_data.is_spec() is False:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  fn_basename)

            t = Task(job=write_toc_tree_output,
                     args=(out_fn, toc_items, is_ref),
                     target=out_fn,
                     dependency=dep_fn,
                     description='writing toctree to "{0}"'.format(out_fn))
            tasks.append(t)
        else:
            deps.extend(toc_data.spec_deps())

        if 'ref-toc' in dep_fn:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  'table-' + fn_basename)

            reft = Task(
                job=write_toc_table,
                args=(out_fn, toc_items),
                target=out_fn,
                dependency=deps,
                description='write ref toc table to "{0}"'.format(out_fn))
            tasks.append(reft)
        elif 'ref-spec' in dep_fn:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  'table-spec-' + fn_basename)

            refspec = Task(
                job=write_toc_table,
                args=(out_fn, toc_items),
                target=out_fn,
                dependency=deps,
                description='write ref spec table to "{0}"'.format(out_fn))
            tasks.append(refspec)
        else:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  'dfn-list-' + fn_basename)
            dt = Task(job=write_dfn_list_output,
                      args=(out_fn, toc_items),
                      target=out_fn,
                      dependency=deps,
                      description='write definition list toc to "{0}"'.format(
                          out_fn))
            tasks.append(dt)

    logger.debug('added tasks for {0} toc generation tasks'.format(len(tasks)))

    return tasks
Ejemplo n.º 23
0
def changelog_tasks(conf):
    tasks = []

    if 'jira' not in conf.system.files.data:
        logger.debug('changelog generation is not configured.')
        return []

    dirname = os.path.join(conf.paths.projectroot, conf.paths.includes, 'changelogs')

    giza.tools.files.safe_create_directory(os.path.join(dirname, 'releases'))
    jira_config = os.path.join(conf.paths.projectroot, conf.paths.builddata, 'jira.yaml')
    major_versions = get_major_version_groupings(conf.system.files.data.jira.site.versions)

    # If no version listed in jira.yaml, just return; Should be same
    # as if jira is not configured  in conf.system.files.data
    # Also, log explicit message stating that 0 changelog tasks added.
    if not major_versions:
        logger.warning('changelog version is not configured in jira.yaml.')
        logger.info('added {0} changelog tasks.'.format(len(tasks)))
        return []

    # don't generate changelog content except on the most recent published
    # branch (i.e. master, typically.).
    if conf.git.branches.current != conf.git.branches.published[0]:
        logger.error('you must generate changelogs on the master branch and them backport them to another branch.')
        logger.info('added {0} changelog tasks'.format(len(tasks)))
        return tasks

    # only generate changelogs if there are credentials, even though we don't
    # really need an auth'ed connection, want to avoid making un-authed builds too long.
    if not os.path.exists(os.path.expanduser(conf.system.files.data.jira.site.credentials)):
        logger.warning('jira credentials are not configured for your user. not generating changelog tasks')
        logger.info('added {0} changelog tasks'.format(len(tasks)))
        return tasks

    # bump mtime of all existing files to avoid regenerating files that already committed files
    # exist.
    changelog_releases_dir = os.path.join(conf.paths.projectroot, conf.paths.includes, 'changelogs', 'releases')

    for fn in os.listdir(changelog_releases_dir):
        os.utime(os.path.join(changelog_releases_dir, fn), None)

    # add tasks for generating intermediate files for each major version. we do
    # this on all branches, and publishers need to backport the config changes.
    for version, releases in major_versions.items():
        fn = os.path.join(dirname, version + '.rst')
        t = Task(job=giza.content.changelog.views.render_intermediate_files,
                 args=(fn, version, releases, conf),
                 target=fn,
                 dependency=[jira_config])
        tasks.append(t)

    # create a task for each version defined. should never regenerate existing files.
    for version in conf.system.files.data.jira.site.versions:
        fn = os.path.join(conf.paths.projectroot, conf.paths.includes, 'changelogs', 'releases', version + '.rst')
        t = Task(job=giza.content.changelog.views.get_changelog_content,
                 args=(fn, version, conf),
                 dependency=[jira_config],
                 target=fn)
        tasks.append(t)

    logger.info('added {0} changelog tasks.'.format(len(tasks)))
    return tasks
Ejemplo n.º 24
0
def finalize_sphinx_build(sconf, conf):
    target = sconf.builder

    tasks = []
    if target == 'html' and not conf.runstate.fast:
        t = Task(job=html_tarball,
                 args=(sconf.name, sconf.build_output, conf),
                 target=[get_tarball_name('html', conf),
                         get_tarball_name('link-html', conf)],
                 dependency=None,
                 description="creating tarball for html archive")
        tasks.append(t)
    elif target == 'dirhtml' and not conf.runstate.fast:
        # We're experiencing some cases were giza seemingly randomly doesn't migrate.
        # Log this to help us figure out what's going on.
        logger.info('Going to migrate {} to {}'.format(
            sconf.fq_build_output,
            os.path.join(conf.paths.projectroot, conf.paths.public_site_output)))

        for job in (finalize_dirhtml_build, error_pages):
            t = Task(job=job,
                     args=(sconf, conf),
                     target=os.path.join(conf.paths.projectroot, conf.paths.public_site_output),
                     dependency=None)
            tasks.append(t)
    elif target == 'epub':
        t = Task(job=finalize_epub_build,
                 args=(target, conf),
                 description='finalizing epub build',
                 dependency=None,
                 target=True)
        tasks.append(t)
    elif target == 'man':
        t = Task(job=man_tarball,
                 args=(sconf.name, sconf.build_output, conf),
                 target=[get_tarball_name('man', conf),
                         get_tarball_name('link-man', conf)],
                 dependency=None,
                 description="creating tarball for manpages")

        tasks.extend(manpage_url_tasks(target, conf))
        tasks.append(('final', t))
    elif target == 'slides' and not conf.runstate.fast:
        tasks.extend(slide_tasks(sconf, conf))
    elif target == 'json':
        json_tasks, transfer_op = json_output_tasks(conf)
        tasks.extend(json_tasks)
        tasks.append(('final', transfer_op))  # this is less than ideal
    elif target == 'singlehtml':
        tasks.extend(finalize_single_html_tasks(target, conf))
    elif target == 'latex':
        tasks.extend(pdf_tasks(sconf, conf))
    elif target == 'gettext':
        tasks.extend(gettext_tasks(conf))
    elif target == 'linkcheck':
        msg_str = '{0}: See {1}/{0}/output.txt for output.'
        t = Task(job=printer,
                 args=[msg_str.format(target, conf.paths.branch_output)],
                 target=os.path.join(conf.paths.projectroot,
                                     conf.paths.branch_output, target, 'output.txt'),
                 dependency=None)
        tasks.append(t)

    logger.debug('adding {0} finalizing tasks for {1} build'.format(len(tasks), target))
    return tasks