Ejemplo n.º 1
0
def build_prerequisites(conf):
    jobs = itertools.chain(
        build_prereq_jobs(conf),
        manpage_jobs(conf),
        table_jobs(conf),
        api_jobs(conf),
        toc_jobs(conf),
        option_jobs(conf),
        steps_jobs(conf),
        release_jobs(conf),
        intersphinx_jobs(conf),
        image_jobs(conf),
    )

    try:
        res = runner(jobs, parallel="process")
        print("[sphinx-prep]: built {0} pieces of content".format(len(res)))
    except PoolResultsError:
        print(
            "[WARNING]: sphinx prerequisites encountered errors. "
            "See output above. Continuing as a temporary measure."
        )

    runner(external_jobs(conf), parallel="thread")

    buildinfo_hash(conf)

    if conf.project.name != "mms":
        # we copy source manually for mms in makefile.mms, avoiding this
        # operation to clarify the artifacts directory
        transfer_source(conf)

    print("[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)")
    dep_count = refresh_dependencies(conf)
    print("[sphinx-prep]: bumped timestamps of {0} files".format(dep_count))

    command(build_platform_notification("Sphinx", "Build in progress pastb critical phase."), ignore=True)

    print("[sphinx-prep]: INFO - Build in progress past critical phase.")

    dump_file_hashes(conf.system.dependency_cache, conf)
    print("[sphinx-prep]: build environment prepared for sphinx.")
Ejemplo n.º 2
0
def build_job_prerequsites(sync, sconf, conf):
    runner(external_jobs(conf), parallel='thread')

    with update_source_lock:
        if conf.project.name != 'mms':
            if sync.satisfied('transfered_source') is False:
                transfer_source(sconf, conf)
                sync.transfered_source = True
            cond_toc = "build_toc"
        else:
            cond_name = 'transfered_' + sconf.edition
            cond_toc = 'build_toc_' + sconf.edition
            if sync.satisfied(cond_name) is False:
                cmd = 'make -C {0} {1}-source-dir={0}{2}{3} EDITION={1} generate-source-{1}'
                cmd = cmd.format(conf.paths.projectroot, sconf.edition, os.path.sep,
                                 conf.paths.branch_source)
                o = command(cmd, capture=True)
                if len(o.out.strip()) > 0:
                    print(o.out)

                sync[cond_name] = True

        if sync.satisfied(cond_toc) is False:
            # this has to go here so that MMS can generate different toc trees for
            # each edition.
            runner(toc_jobs(conf), parallel='process')
            sync[cond_toc] = True

    with update_deps_lock:
        if sync.satisfied('updated_deps') is False:
            print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)')
            dep_count = refresh_dependencies(conf)
            print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count))
            sync.updated_deps = True

            command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True)
            print('[sphinx-prep]: INFO - Build in progress past critical phase ({0})'.format(conf.paths.branch_source))
            dump_file_hashes(conf)

    print('[sphinx-prep]: build environment prepared for sphinx.')
Ejemplo n.º 3
0
def build_prerequisites(conf):
    jobs = itertools.chain(build_prereq_jobs(conf),
                           manpage_jobs(conf),
                           table_jobs(conf),
                           api_jobs(conf),
                           toc_jobs(conf),
                           option_jobs(conf),
                           steps_jobs(conf),
                           release_jobs(conf),
                           intersphinx_jobs(conf),
                           image_jobs(conf))

    try:
        res = runner(jobs, parallel='process')
        print('[sphinx-prep]: built {0} pieces of content'.format(len(res)))
    except PoolResultsError:
        print('[WARNING]: sphinx prerequisites encountered errors. '
              'See output above. Continuing as a temporary measure.')

    runner(external_jobs(conf), parallel='thread')

    buildinfo_hash(conf)

    if conf.project.name != 'mms':
        # we copy source manually for mms in makefile.mms, avoiding this
        # operation to clarify the artifacts directory
        transfer_source(conf)

    print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)')
    dep_count = refresh_dependencies(conf)
    print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count))

    command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True)

    print('[sphinx-prep]: INFO - Build in progress past critical phase.')

    dump_file_hashes(conf.system.dependency_cache, conf)
    print('[sphinx-prep]: build environment prepared for sphinx.')
Ejemplo n.º 4
0
def build_job_prerequsites(sync, sconf, conf):
    runner(external_jobs(conf), parallel='thread')

    with update_source_lock:
        cond_toc = "build_toc"
        cond_name = 'transfered_source'
        cond_dep = 'updated_deps'
        cond_steps = 'build_step_files'

        if conf.project.name in ['mms', 'training']:
            cond_name += '_' + sconf.edition
            cond_toc += '_' + sconf.edition
            cond_dep += '_' + sconf.edition
            cond_steps += '_' + sconf.edition

        if sync.satisfied(cond_name) is False:
            transfer_source(sconf, conf)
            sync[cond_name] = True

        if 'excluded' in sconf:
            logger.info('removing excluded files')
            for fn in sconf.excluded:
                fqfn = os.path.join(conf.paths.projectroot, conf.paths.branch_source, fn[1:])
                if os.path.exists(fqfn):
                    if os.path.isdir(fqfn):
                        rmtree(fqfn)
                    else:
                        os.remove(fqfn)
                    logger.debug('removed {0}'.format(fqfn))

            logger.info('removed {0} files'.format(len(sconf.excluded)))

        with ProcessPool() as p:
            # these must run here so that MMS can generate different toc/steps/etc for
            # each edition.

            if sync.satisfied(cond_toc) is False:
                # even if this fails we don't want it to run more than once
                sync[cond_toc] = True
                tr = p.runner(toc_jobs(conf))
                logger.info('generated {0} toc files'.format(len(tr)))

            if sync.satisfied(cond_steps) is False:
                sync[cond_steps] = True
                sr = p.runner(steps_jobs(conf))
                logger.info('generated {0} step files'.format(len(sr)))

        if sync.satisfied(cond_dep) is False:
            logger.debug('using update deps lock.')

            logger.info('resolving all intra-source dependencies now. for sphinx build. (takes several seconds)')
            dep_count = refresh_dependencies(conf)
            logger.info('bumped {0} dependencies'.format(dep_count))
            sync[cond_dep] = True

            command(build_platform_notification('Sphinx', 'Build in progress past critical phase.'), ignore=True)
            logger.info('sphinx build in progress past critical phase ({0})'.format(conf.paths.branch_source))
            dump_file_hashes(conf)
        else:
            logger.debug('dependencies already updated, lock unneeded.')

        logger.debug('releasing dependency update lock.')

    logging.info('build environment prepared for sphinx build {0}.'.format(sconf.builder))
Ejemplo n.º 5
0
def build_job_prerequsites(sync, sconf, conf):
    runner(external_jobs(conf), parallel='thread')

    with update_source_lock:
        cond_toc = "build_toc"
        cond_name = 'transfered_source'
        cond_dep = 'updated_deps'
        cond_steps = 'build_step_files'

        if conf.project.name in ['mms', 'training']:
            cond_name += '_' + sconf.edition
            cond_toc += '_' + sconf.edition
            cond_dep += '_' + sconf.edition
            cond_steps += '_' + sconf.edition

        if sync.satisfied(cond_name) is False:
            transfer_source(sconf, conf)
            sync[cond_name] = True

        if 'excluded' in sconf:
            logger.info('removing excluded files')
            for fn in sconf.excluded:
                fqfn = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_source, fn[1:])
                if os.path.exists(fqfn):
                    if os.path.isdir(fqfn):
                        rmtree(fqfn)
                    else:
                        os.remove(fqfn)
                    logger.debug('removed {0}'.format(fqfn))

            logger.info('removed {0} files'.format(len(sconf.excluded)))

        with ProcessPool() as p:
            # these must run here so that MMS can generate different toc/steps/etc for
            # each edition.

            if sync.satisfied(cond_toc) is False:
                # even if this fails we don't want it to run more than once
                sync[cond_toc] = True
                tr = p.runner(toc_jobs(conf))
                logger.info('generated {0} toc files'.format(len(tr)))

            if sync.satisfied(cond_steps) is False:
                sync[cond_steps] = True
                sr = p.runner(steps_jobs(conf))
                logger.info('generated {0} step files'.format(len(sr)))

        if sync.satisfied(cond_dep) is False:
            logger.debug('using update deps lock.')

            logger.info(
                'resolving all intra-source dependencies now. for sphinx build. (takes several seconds)'
            )
            dep_count = refresh_dependencies(conf)
            logger.info('bumped {0} dependencies'.format(dep_count))
            sync[cond_dep] = True

            command(build_platform_notification(
                'Sphinx', 'Build in progress past critical phase.'),
                    ignore=True)
            logger.info(
                'sphinx build in progress past critical phase ({0})'.format(
                    conf.paths.branch_source))
            dump_file_hashes(conf)
        else:
            logger.debug('dependencies already updated, lock unneeded.')

        logger.debug('releasing dependency update lock.')

    logging.info('build environment prepared for sphinx build {0}.'.format(
        sconf.builder))