Ejemplo n.º 1
0
def sphinx_build(targets, conf, sconf, finalize_fun):
    if len(targets) == 0:
        targets.append('html')

    target_jobs = []

    sync = StateAttributeDict()
    for target in targets:
        if target in sconf:
            lsconf = compute_sphinx_config(target, sconf, conf)
            lconf = edition_setup(lsconf.edition, conf)

            target_jobs.append({
                'job': build_worker,
                'args': [ target, lsconf, lconf, sync, finalize_fun]
            })
        else:
            print('[sphinx] [warning]: not building {0} without configuration.'.format(target))

    # a batch of prereq jobs go here.
    primer_migrate_pages(conf)
    build_process_prerequsites(conf)

    if len(target_jobs) <= 1:
        res = runner(target_jobs, pool=1)
    else:
        res = runner(target_jobs, parallel='threads')

    output_sphinx_stream('\n'.join([r for r in res if r is not None]), conf)

    print('[sphinx]: build {0} sphinx targets'.format(len(res)))
Ejemplo n.º 2
0
def sphinx_build(targets, conf, sconf, finalize_fun):
    build_prerequisites(conf)

    if len(targets) == 0:
        targets.append('html')

    target_jobs = []

    for target in targets:
        if target in sconf:
            target_jobs.append({
                'job': build_worker_wrapper,
                'args': [ target, sconf, conf, finalize_fun]
            })
        else:
            print('[sphinx] [warning]: not building {0} without configuration.'.format(target))

    if len(target_jobs) <= 1:
        res = runner(target_jobs, pool=1)
    else:
        res = runner(target_jobs, pool=len(target_jobs), parallel='threads')

    output_sphinx_stream('\n'.join([r for r in res if r is not None]), conf)

    print('[sphinx]: build {0} sphinx targets'.format(len(res)))
Ejemplo n.º 3
0
def push():
    "Runs 'tx push' command."
    resources = tx_resources()

    jobs = [ { 'job': logged_command,
               'args': ['push', ' '.join([ 'tx', 'push', '-s', '-r', page]) ] }
             for page in resources ]

    runner(jobs, parallel='thread', pool=12)
Ejemplo n.º 4
0
def cleaner(paths):
    if len(paths) <= cpu_count() + 1:
        workers = len(paths)
    else:
        workers = cpu_count()

    jobs = ( dict(target=path, dependency=None, job=_rm_rf, args=[path]) for path in paths )

    runner(jobs, pool=workers)
Ejemplo n.º 5
0
def cleaner(paths):
    if len(paths) <= cpu_count() + 1:
        workers = len(paths)
    else:
        workers = cpu_count()

    jobs = (dict(target=path, dependency=None, job=_rm_rf, args=[path])
            for path in paths)

    runner(jobs, pool=workers)
Ejemplo n.º 6
0
def push():
    "Runs 'tx push' command."
    resources = tx_resources()

    jobs = [{
        'job': logged_command,
        'args': ['push', ' '.join(['tx', 'push', '-s', '-r', page])]
    } for page in resources]

    runner(jobs, parallel='thread', pool=12)
Ejemplo n.º 7
0
def _generate_report(mask, output_file=None, conf=None, data=None):
    if conf is None:
        conf = get_conf()

    base_path = os.path.join(conf.paths.output, conf.git.branches.current, 'json')
    docs = expand_tree(base_path, '.json')

    if mask is not None:
        if mask.startswith('/'):
            mask = mask[1:]

        mask = os.path.join(base_path, mask)

    if data is None:
        output = runner( jobs=report_jobs(docs, mask),
                         retval='results')
    else:
        output = data

    if output_file is None:
        return output
    else:
        if output_file == 'print':
            print(json.dumps(output, indent=2))
        else:
            with open(output_file, 'w') as f:
                json.dump(output, f)
Ejemplo n.º 8
0
def sphinx_build(targets, conf, sconf, finalize_fun):
    if len(targets) == 0:
        targets.append('html')

    target_jobs = []

    sync = StateAttributeDict()
    for target in targets:
        if target in sconf:
            lsconf = compute_sphinx_config(target, sconf, conf)
            lconf = edition_setup(lsconf.edition, conf)

            target_jobs.append({
                'job': build_worker,
                'args': [ target, lsconf, lconf, sync, finalize_fun],
                'description': "sphinx build worker for {0}".format(target)
            })
        else:
            logger.warning('not building sphinx target {0} without configuration.'.format(target))

    # a batch of prereq jobs go here.
    primer_migrate_pages(conf)
    build_process_prerequsites(sync, conf)

    res = runner(target_jobs, parallel='threads')

    output_sphinx_stream('\n'.join([r[1] if isinstance(r, tuple) else r
                                    for r in res
                                    if r is not None]), conf)

    logger.info('build {0} sphinx targets'.format(len(res)))
Ejemplo n.º 9
0
def finalize_build(builder, sconf, conf):
    if 'language' in sconf:
        # reinitialize conf and builders for internationalization
        conf.paths = render_paths(conf, sconf.language)
        builder = sconf.builder
        target = builder
    else:
        # mms compatibility
        target = builder
        builder = builder.split('-', 1)[0]

    jobs = {
        'linkcheck': [
            { 'job': printer,
              'args': ['{0}: See {1}/{0}/output.txt for output.'.format(builder, conf.paths.branch_output)]
            }
        ],
        'dirhtml': [
            { 'job': finalize_dirhtml_build,
              'args': [target, conf]
            }
        ],
        'epub': [ { 'job': finalize_epub_build,
                    'args': [conf] }
        ],
        'json': json_output_jobs(conf),
        'singlehtml': finalize_single_html_jobs(target, conf),
        'latex': [
            { 'job': pdf_worker,
              'args': [target, conf]
            }
        ],
        'man': itertools.chain(manpage_url_jobs(conf), [
            { 'job': man_tarball,
              'args': [conf]
            }
        ]),
        'html': [
            { 'job': html_tarball,
              'args': [target, conf]
            }
        ],
        'gettext': gettext_jobs(conf),
        'all': [ ]
    }

    if builder not in jobs:
        jobs[builder] = []

    if conf.system.branched is True and conf.git.branches.current == 'master':
        jobs['dirhtml'].append(
            { 'job': create_manual_symlink,
              'args': [conf]
            }
        )

    logger.info('{0}: running sphinx post-processing steps.'.format(builder))
    res = runner(itertools.chain(jobs[builder], jobs['all']), pool=1)
    logger.info('{0}: completed {1} sphinx post-processing steps'.format(builder, len(res)))
Ejemplo n.º 10
0
def intersphinx():
    "Downloads all intersphinx files if out of date."

    conf = lazy_conf(None)

    res = runner( intersphinx_jobs(conf) )

    puts('[intersphinx]: processed {0} intersphinx inventories'.format(len(res)))
Ejemplo n.º 11
0
def finalize_build(builder, sconf, conf):
    if 'language' in sconf:
        builder = sconf.builder
        target = builder
    else:
        # mms compatibility
        target = builder
        builder = builder.split('-', 1)[0]

    jobs = {
        'linkcheck': [
            { 'job': printer,
              'args': ['{0}: See {1}/{0}/output.txt for output.'.format(builder, conf.paths.branch_output)]
            }
        ],
        'dirhtml': [
            { 'job': finalize_dirhtml_build,
              'args': [target, conf] },
            { 'job': error_pages,
              'args': [target, conf] }
        ],
        'epub': [ { 'job': finalize_epub_build,
                    'args': [target, conf] }
        ],
        'json': json_output_jobs(conf),
        'singlehtml': finalize_single_html_jobs(target, conf),
        'latex': [
            { 'job': pdf_worker,
              'args': [target, conf]
            }
        ],
        'man': itertools.chain(manpage_url_jobs(target, conf), [
            { 'job': man_tarball,
              'args': [target, conf]
            }
        ]),
        'html': [
            { 'job': html_tarball,
              'args': [target, conf]
            }
        ],
        'gettext': gettext_jobs(conf),
        'all': [ ]
    }

    if builder not in jobs:
        jobs[builder] = []

    if conf.system.branched is True and conf.git.branches.current == 'master':
        jobs['dirhtml'].append(
            { 'job': create_manual_symlink,
              'args': [conf]
            }
        )

    logger.info('{0}: running sphinx post-processing steps.'.format(builder))
    res = runner(itertools.chain(jobs[builder], jobs['all']), pool=1)
    logger.info('{0}: completed {1} sphinx post-processing steps'.format(builder, len(res)))
Ejemplo n.º 12
0
def build_prerequisites(conf):
    jobs = itertools.chain(
        build_prereq_jobs(conf),
        manpage_jobs(conf),
        table_jobs(conf),
        api_jobs(conf),
        toc_jobs(conf),
        option_jobs(conf),
        steps_jobs(conf),
        release_jobs(conf),
        intersphinx_jobs(conf),
        image_jobs(conf),
    )

    try:
        res = runner(jobs, parallel="process")
        print("[sphinx-prep]: built {0} pieces of content".format(len(res)))
    except PoolResultsError:
        print(
            "[WARNING]: sphinx prerequisites encountered errors. "
            "See output above. Continuing as a temporary measure."
        )

    runner(external_jobs(conf), parallel="thread")

    buildinfo_hash(conf)

    if conf.project.name != "mms":
        # we copy source manually for mms in makefile.mms, avoiding this
        # operation to clarify the artifacts directory
        transfer_source(conf)

    print("[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)")
    dep_count = refresh_dependencies(conf)
    print("[sphinx-prep]: bumped timestamps of {0} files".format(dep_count))

    command(build_platform_notification("Sphinx", "Build in progress pastb critical phase."), ignore=True)

    print("[sphinx-prep]: INFO - Build in progress past critical phase.")

    dump_file_hashes(conf.system.dependency_cache, conf)
    print("[sphinx-prep]: build environment prepared for sphinx.")
Ejemplo n.º 13
0
def build_process_prerequsites(sync, conf):
    pjobs = itertools.chain(table_jobs(conf), api_jobs(conf),
                            option_jobs(conf), release_jobs(conf),
                            intersphinx_jobs(conf))

    tjobs = itertools.chain(build_prereq_jobs(sync, conf), image_jobs(conf))

    tres = runner(tjobs, parallel='threads')

    try:
        pres = runner(pjobs, parallel='process')
        logger.info(
            'built {0} pieces of content to prep for sphinx build'.format(
                len(pres) + len(tres)))
    except PoolResultsError:
        logger.error('sphinx prerequisites encountered errors. '
                     'See output. Continuing as a temporary measure.')

    if not sync.satisfied('buildhash'):
        buildinfo_hash(conf)
        sync.buildhash = True
Ejemplo n.º 14
0
def build_job_prerequsites(sync, sconf, conf):
    runner(external_jobs(conf), parallel='thread')

    with update_source_lock:
        if conf.project.name != 'mms':
            if sync.satisfied('transfered_source') is False:
                transfer_source(sconf, conf)
                sync.transfered_source = True
            cond_toc = "build_toc"
        else:
            cond_name = 'transfered_' + sconf.edition
            cond_toc = 'build_toc_' + sconf.edition
            if sync.satisfied(cond_name) is False:
                cmd = 'make -C {0} {1}-source-dir={0}{2}{3} EDITION={1} generate-source-{1}'
                cmd = cmd.format(conf.paths.projectroot, sconf.edition, os.path.sep,
                                 conf.paths.branch_source)
                o = command(cmd, capture=True)
                if len(o.out.strip()) > 0:
                    print(o.out)

                sync[cond_name] = True

        if sync.satisfied(cond_toc) is False:
            # this has to go here so that MMS can generate different toc trees for
            # each edition.
            runner(toc_jobs(conf), parallel='process')
            sync[cond_toc] = True

    with update_deps_lock:
        if sync.satisfied('updated_deps') is False:
            print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)')
            dep_count = refresh_dependencies(conf)
            print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count))
            sync.updated_deps = True

            command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True)
            print('[sphinx-prep]: INFO - Build in progress past critical phase ({0})'.format(conf.paths.branch_source))
            dump_file_hashes(conf)

    print('[sphinx-prep]: build environment prepared for sphinx.')
Ejemplo n.º 15
0
def build_process_prerequsites(sync, conf):
    pjobs = itertools.chain(table_jobs(conf),
                            api_jobs(conf),
                            option_jobs(conf),
                            release_jobs(conf),
                            intersphinx_jobs(conf))

    tjobs = itertools.chain(build_prereq_jobs(sync, conf),
                            image_jobs(conf))

    tres = runner(tjobs, parallel='threads')

    try:
        pres = runner(pjobs, parallel='process')
        logger.info('built {0} pieces of content to prep for sphinx build'.format(len(pres) + len(tres)))
    except PoolResultsError:
        logger.error('sphinx prerequisites encountered errors. '
                     'See output. Continuing as a temporary measure.')

    if not sync.satisfied('buildhash'):
        buildinfo_hash(conf)
        sync.buildhash = True
Ejemplo n.º 16
0
def build_prerequisites(conf):
    jobs = itertools.chain(build_prereq_jobs(conf),
                           manpage_jobs(conf),
                           table_jobs(conf),
                           api_jobs(conf),
                           toc_jobs(conf),
                           option_jobs(conf),
                           steps_jobs(conf),
                           release_jobs(conf),
                           intersphinx_jobs(conf),
                           image_jobs(conf))

    try:
        res = runner(jobs, parallel='process')
        print('[sphinx-prep]: built {0} pieces of content'.format(len(res)))
    except PoolResultsError:
        print('[WARNING]: sphinx prerequisites encountered errors. '
              'See output above. Continuing as a temporary measure.')

    runner(external_jobs(conf), parallel='thread')

    buildinfo_hash(conf)

    if conf.project.name != 'mms':
        # we copy source manually for mms in makefile.mms, avoiding this
        # operation to clarify the artifacts directory
        transfer_source(conf)

    print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)')
    dep_count = refresh_dependencies(conf)
    print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count))

    command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True)

    print('[sphinx-prep]: INFO - Build in progress past critical phase.')

    dump_file_hashes(conf.system.dependency_cache, conf)
    print('[sphinx-prep]: build environment prepared for sphinx.')
Ejemplo n.º 17
0
def deploy(target, conf=None, pconf=None):
    """Deploys a site. Specifies the deployment target defined in 'push.yaml'"""

    conf = lazy_conf(conf)

    push_conf = ingest_yaml_list(os.path.join(conf.paths.projectroot,
                                              conf.paths.builddata,
                                              'push.yaml'))

    pconf = conf_from_list('target', push_conf)[target]


    if pconf['target'] != target:
        abort('[deploy] [ERROR]: this build environment does not support the {0} target'.format(target))

    res = runner(deploy_jobs(target, conf, pconf), pool=2)
    puts('[deploy]: pushed {0} targets'.format(len(res)))
Ejemplo n.º 18
0
def upload(path, conf=None):
    "Downloads, unwinds and deploys the docs build in a package."

    if conf is None:
        conf = get_conf()

    if env.deploy_target is None:
        abort("[deploy] [tarball] [ERROR]: cannot deploy without a deploy target.")

    tar_path, meta_conf = unwind(path, conf)

    pconf = meta_conf.push[env.deploy_target]
    conf = meta_conf.conf

    puts("[deploy] [tarball]: deploying from archive now.")
    count = runner(deploy_jobs(env.deploy_target, conf, pconf), pool=2)
    puts("[deploy]: pushed {0} targets".format(count))
    puts("[deploy] [tarball]: Deployed {0} from archive.".format(env.deploy_target))
Ejemplo n.º 19
0
def upload(path, conf=None):
    "Downloads, unwinds and deploys the docs build in a package."

    if conf is None:
        conf = get_conf()

    if env.deploy_target is None:
        abort('[deploy] [tarball] [ERROR]: cannot deploy without a deploy target.')

    tar_path, meta_conf = unwind(path, conf)

    pconf = meta_conf.push[env.deploy_target]
    conf = meta_conf.conf

    puts("[deploy] [tarball]: deploying from archive now.")
    count = runner(deploy_jobs(env.deploy_target, conf, pconf), pool=2)
    puts('[deploy]: pushed {0} targets'.format(count))
    puts("[deploy] [tarball]: Deployed {0} from archive.".format(env.deploy_target))
Ejemplo n.º 20
0
def build_process_prerequsites(conf):
    jobs = itertools.chain(build_prereq_jobs(conf),
                           manpage_jobs(conf),
                           table_jobs(conf),
                           api_jobs(conf),
                           option_jobs(conf),
                           steps_jobs(conf),
                           release_jobs(conf),
                           intersphinx_jobs(conf),
                           image_jobs(conf))

    try:
        res = runner(jobs, parallel='process')
        print('[sphinx-prep]: built {0} pieces of content'.format(len(res)))
    except PoolResultsError:
        print('[WARNING]: sphinx prerequisites encountered errors. '
              'See output above. Continuing as a temporary measure.')

    buildinfo_hash(conf)
Ejemplo n.º 21
0
def sphinx_build(targets, conf, sconf, finalize_fun):
    if len(targets) == 0:
        targets.append('html')

    sconf = render_sphinx_config(sconf)

    target_jobs = []

    sync = StateAttributeDict()
    for target in targets:
        if target in sconf:
            lsconf = compute_sphinx_config(target, sconf, conf)
            lconf = edition_setup(lsconf, conf)
            lconf = language_setup(lsconf, lconf)

            target_jobs.append({
                'job':
                build_worker,
                'args': [target, lsconf, lconf, sync, finalize_fun],
                'description':
                "sphinx build worker for {0}".format(target)
            })
        else:
            logger.warning(
                'not building sphinx target {0} without configuration.'.format(
                    target))

    # a batch of prereq jobs go here.
    primer_migrate_pages(conf)
    build_process_prerequsites(sync, conf)

    res = runner(target_jobs, parallel='threads')

    output_sphinx_stream(
        '\n'.join([
            r[1] if isinstance(r, tuple) else r for r in res if r is not None
        ]), conf)

    logger.info('build {0} sphinx targets'.format(len(res)))
Ejemplo n.º 22
0
def finalize_build(builder, sconf, conf):
    if "language" in sconf:
        # reinitialize conf and builders for internationalization
        conf.paths = render_paths(conf, sconf.language)
        builder = sconf.builder
        target = builder
    else:
        # mms compatibility
        target = builder
        builder = builder.split("-", 1)[0]

    jobs = {
        "linkcheck": [
            {
                "job": printer,
                "args": ["[{0}]: See {1}/{0}/output.txt for output.".format(builder, conf.paths.branch_output)],
            }
        ],
        "dirhtml": [{"job": finalize_dirhtml_build, "args": [target, conf]}],
        "json": json_output_jobs(conf),
        "singlehtml": finalize_single_html_jobs(target, conf),
        "latex": [{"job": pdf_worker, "args": [target, conf]}],
        "man": itertools.chain(manpage_url_jobs(conf), [{"job": man_tarball, "args": [conf]}]),
        "html": [{"job": html_tarball, "args": [target, conf]}],
        "gettext": gettext_jobs(conf),
        "all": [],
    }

    if builder not in jobs:
        jobs[builder] = []

    if conf.system.branched is True and conf.git.branches.current == "master":
        jobs["dirhtml"].append({"job": create_manual_symlink, "args": [conf]})

    print("[sphinx] [post] [{0}]: running post-processing steps.".format(builder))
    res = runner(itertools.chain(jobs[builder], jobs["all"]), pool=1)
    print("[sphinx] [post] [{0}]: completed {1} post-processing steps".format(builder, len(res)))
Ejemplo n.º 23
0
def build_job_prerequsites(sync, sconf, conf):
    runner(external_jobs(conf), parallel='thread')

    with update_source_lock:
        cond_toc = "build_toc"
        cond_name = 'transfered_source'
        cond_dep = 'updated_deps'
        cond_steps = 'build_step_files'

        if conf.project.name in ['mms', 'training']:
            cond_name += '_' + sconf.edition
            cond_toc += '_' + sconf.edition
            cond_dep += '_' + sconf.edition
            cond_steps += '_' + sconf.edition

        if sync.satisfied(cond_name) is False:
            transfer_source(sconf, conf)
            sync[cond_name] = True

        if 'excluded' in sconf:
            logger.info('removing excluded files')
            for fn in sconf.excluded:
                fqfn = os.path.join(conf.paths.projectroot, conf.paths.branch_source, fn[1:])
                if os.path.exists(fqfn):
                    if os.path.isdir(fqfn):
                        rmtree(fqfn)
                    else:
                        os.remove(fqfn)
                    logger.debug('removed {0}'.format(fqfn))

            logger.info('removed {0} files'.format(len(sconf.excluded)))

        with ProcessPool() as p:
            # these must run here so that MMS can generate different toc/steps/etc for
            # each edition.

            if sync.satisfied(cond_toc) is False:
                # even if this fails we don't want it to run more than once
                sync[cond_toc] = True
                tr = p.runner(toc_jobs(conf))
                logger.info('generated {0} toc files'.format(len(tr)))

            if sync.satisfied(cond_steps) is False:
                sync[cond_steps] = True
                sr = p.runner(steps_jobs(conf))
                logger.info('generated {0} step files'.format(len(sr)))

        if sync.satisfied(cond_dep) is False:
            logger.debug('using update deps lock.')

            logger.info('resolving all intra-source dependencies now. for sphinx build. (takes several seconds)')
            dep_count = refresh_dependencies(conf)
            logger.info('bumped {0} dependencies'.format(dep_count))
            sync[cond_dep] = True

            command(build_platform_notification('Sphinx', 'Build in progress past critical phase.'), ignore=True)
            logger.info('sphinx build in progress past critical phase ({0})'.format(conf.paths.branch_source))
            dump_file_hashes(conf)
        else:
            logger.debug('dependencies already updated, lock unneeded.')

        logger.debug('releasing dependency update lock.')

    logging.info('build environment prepared for sphinx build {0}.'.format(sconf.builder))
Ejemplo n.º 24
0
def releases():
    res = runner(release_jobs(), retval=True)
    print('[releases]: completed regenerating {0} release files.'.format(
        len(res)))
Ejemplo n.º 25
0
def images():
    res = runner(image_jobs(), retval=True)

    print('[image]: rebuilt {0} rst and image files'.format(len(res)))
Ejemplo n.º 26
0
def tables():
    res = runner(table_jobs(), retval=True)

    print('[table]: built {0} tables'.format(len(res)))
Ejemplo n.º 27
0
def toc():
    conf = lazy_conf()
    res = runner(toc_jobs(conf), retval=True)

    print('[toc]: built {0} tables of contents'.format(len(res)))
Ejemplo n.º 28
0
def options():
    conf = lazy_conf()

    res = runner(option_jobs(conf), retval=True)

    logging.info('options: rendered {0} options'.format(len(res)))
Ejemplo n.º 29
0
def images():
    res = runner( image_jobs(), retval=True)

    print('[image]: rebuilt {0} rst and image files'.format(len(res)))
Ejemplo n.º 30
0
def api():
    res = runner( api_jobs(), retval=True )

    print('[api]: generated {0} tables for api items'.format(len(res)))
Ejemplo n.º 31
0
def releases():
    res = runner( release_jobs(), retval=True )
    logging.info('completed regenerating {0} release files.'.format(len(res)))
Ejemplo n.º 32
0
def images():
    res = runner( image_jobs(), retval=True)
    logging.info('rebuilt {0} rst and image files'.format(len(res)))
Ejemplo n.º 33
0
def options():
    conf = lazy_conf()

    res = runner( option_jobs(conf), retval=True )

    print('[options]: rendered {0} options'.format(len(res)))
Ejemplo n.º 34
0
def images():
    res = runner(image_jobs(), retval=True)
    logging.info('rebuilt {0} rst and image files'.format(len(res)))
Ejemplo n.º 35
0
def api():
    res = runner(api_jobs(), retval=True)

    logging.info('generated {0} tables for api items'.format(len(res)))
Ejemplo n.º 36
0
def releases():
    res = runner( release_jobs(), retval=True )
    print('[releases]: completed regenerating {0} release files.'.format(len(res)))
Ejemplo n.º 37
0
def tables():
    res = runner( table_jobs(), retval=True )

    print('[table]: built {0} tables'.format(len(res)))
Ejemplo n.º 38
0
def api():
    res = runner(api_jobs(), retval=True)

    print('[api]: generated {0} tables for api items'.format(len(res)))
Ejemplo n.º 39
0
def options():
    conf = lazy_conf()

    res = runner(option_jobs(conf), retval=True)

    print('[options]: rendered {0} options'.format(len(res)))
Ejemplo n.º 40
0
def steps():
    conf = lazy_conf()

    res = runner(steps_jobs(conf))

    print('[steps]: rendered {0} step files'.format(len(res)))
Ejemplo n.º 41
0
def steps():
    conf = lazy_conf()

    res = runner(steps_jobs(conf))

    print('[steps]: rendered {0} step files'.format(len(res)))
Ejemplo n.º 42
0
def toc():
    conf = lazy_conf()
    res = runner( toc_jobs(conf), retval=True )

    print('[toc]: built {0} tables of contents'.format(len(res)))
Ejemplo n.º 43
0
def options():
    conf = lazy_conf()

    res = runner( option_jobs(conf), retval=True )

    logging.info('options: rendered {0} options'.format(len(res)))
Ejemplo n.º 44
0
def steps():
    conf = lazy_conf()

    res = runner(steps_jobs(conf))

    logging.info('rendered {0} step files'.format(len(res)))
Ejemplo n.º 45
0
def steps():
    conf = lazy_conf()

    res = runner(steps_jobs(conf))

    logging.info('rendered {0} step files'.format(len(res)))
Ejemplo n.º 46
0
def api():
    res = runner( api_jobs(), retval=True )

    logging.info('generated {0} tables for api items'.format(len(res)))
Ejemplo n.º 47
0
def tables():
    res = runner(table_jobs(), retval=True)

    logging.info('built {0} tables'.format(len(res)))
Ejemplo n.º 48
0
def tables():
    res = runner( table_jobs(), retval=True )

    logging.info('built {0} tables'.format(len(res)))
Ejemplo n.º 49
0
def releases():
    res = runner(release_jobs(), retval=True)
    logging.info('completed regenerating {0} release files.'.format(len(res)))
Ejemplo n.º 50
0
def build_job_prerequsites(sync, sconf, conf):
    runner(external_jobs(conf), parallel='thread')

    with update_source_lock:
        cond_toc = "build_toc"
        cond_name = 'transfered_source'
        cond_dep = 'updated_deps'
        cond_steps = 'build_step_files'

        if conf.project.name in ['mms', 'training']:
            cond_name += '_' + sconf.edition
            cond_toc += '_' + sconf.edition
            cond_dep += '_' + sconf.edition
            cond_steps += '_' + sconf.edition

        if sync.satisfied(cond_name) is False:
            transfer_source(sconf, conf)
            sync[cond_name] = True

        if 'excluded' in sconf:
            logger.info('removing excluded files')
            for fn in sconf.excluded:
                fqfn = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_source, fn[1:])
                if os.path.exists(fqfn):
                    if os.path.isdir(fqfn):
                        rmtree(fqfn)
                    else:
                        os.remove(fqfn)
                    logger.debug('removed {0}'.format(fqfn))

            logger.info('removed {0} files'.format(len(sconf.excluded)))

        with ProcessPool() as p:
            # these must run here so that MMS can generate different toc/steps/etc for
            # each edition.

            if sync.satisfied(cond_toc) is False:
                # even if this fails we don't want it to run more than once
                sync[cond_toc] = True
                tr = p.runner(toc_jobs(conf))
                logger.info('generated {0} toc files'.format(len(tr)))

            if sync.satisfied(cond_steps) is False:
                sync[cond_steps] = True
                sr = p.runner(steps_jobs(conf))
                logger.info('generated {0} step files'.format(len(sr)))

        if sync.satisfied(cond_dep) is False:
            logger.debug('using update deps lock.')

            logger.info(
                'resolving all intra-source dependencies now. for sphinx build. (takes several seconds)'
            )
            dep_count = refresh_dependencies(conf)
            logger.info('bumped {0} dependencies'.format(dep_count))
            sync[cond_dep] = True

            command(build_platform_notification(
                'Sphinx', 'Build in progress past critical phase.'),
                    ignore=True)
            logger.info(
                'sphinx build in progress past critical phase ({0})'.format(
                    conf.paths.branch_source))
            dump_file_hashes(conf)
        else:
            logger.debug('dependencies already updated, lock unneeded.')

        logger.debug('releasing dependency update lock.')

    logging.info('build environment prepared for sphinx build {0}.'.format(
        sconf.builder))