def sphinx_builder(target): b = 'production' m.comment(target, block=b) ret_value = [target] fab_arg = [target] target_parts = target.split('-') if len(target_parts) > 3: print('[meta-build]: Invalid sphinx builder: ' + target) elif len(target_parts) == 1: builder = target clean_target = '-'.join(['clean', builder]) ret_value.append(clean_target) m.target(clean_target, block=b) m.job('fab clean.sphinx:{0}'.format(builder), block=b) m.newline(block=b) elif len(target_parts) <= 3 and len(target_parts) > 1: builder = target[0] m.target(target, block=b) if target == 'gettext' or 'gettext' in target: m.job('fab tx.update', block=b) else: base = 'fab' if site_conf.project.name == 'ecosystem': base += ' serial' m.job('{0} sphinx.target:{1}'.format(base, target), block=b) m.job(build_platform_notification('Sphinx', 'completed {0} build.'.format(target)), ignore=True, block=b) m.target(target + '-debug', block=b) ret_value.append(target + '-debug') if target == 'gettext' or 'gettext' in target: m.job('fab log.set:debug tx.update', block=b) else: m.job('fab log.set:debug sphinx.target:{0}'.format(target), block=b) m.job(build_platform_notification('Sphinx', 'completed {0} build.'.format(target)), ignore=True, block=b) return ret_value
def sphinx_builder(target): b = 'production' m.comment(target, block=b) ret_value = [ target ] fab_arg = [target] target_parts = target.split('-') if len(target_parts) > 3: print('[meta-build]: Invalid sphinx builder: ' + target) elif len(target_parts) == 1: builder = target clean_target = '-'.join(['clean', builder]) ret_value.append(clean_target) m.target(clean_target, block=b) m.job('fab clean.sphinx:{0}'.format(builder), block=b) m.newline(block=b) elif len(target_parts) <= 3 and len(target_parts) > 1: builder = target[0] m.target(target, block=b) if target == 'gettext' or 'gettext' in target: m.job('fab tx.update', block=b) else: base = 'fab' if site_conf.project.name == 'ecosystem': base += ' serial' m.job('{0} sphinx.target:{1}'.format(base, target), block=b) m.job(build_platform_notification('Sphinx', 'completed {0} build.'.format(target)), ignore=True, block=b) m.target(target + '-debug', block=b) ret_value.append(target + '-debug') if target == 'gettext' or 'gettext' in target: m.job('fab log.set:debug tx.update', block=b) else: m.job('fab log.set:debug sphinx.target:{0}'.format(target), block=b) m.job(build_platform_notification('Sphinx', 'completed {0} build.'.format(target)), ignore=True, block=b) return ret_value
def sphinx_builder(target): b = 'production' m.comment(target, block=b) fab_prefix = 'fab' ret_value = [target] fab_arg = [target] target_parts = target.split('-') if len(target_parts) > 3: raise Exception('[meta-build]: Invalid sphinx builder: ' + target) elif len(target_parts) == 1: builder = target clean_target = '-'.join(['clean', builder]) ret_value.append(clean_target) m.target(clean_target, block=b) m.job('fab clean.sphinx:{0}'.format(builder), block=b) m.newline(block=b) elif len(target_parts) <= 3 and len(target_parts) > 1: if target[1] == 'hosted' or target[1] == 'saas': fab_prefix += " sphinx.edition:" + target[1] builder = target[0] fab_arg.append('tag=' + target[1]) if target[1] == 'hosted': fab_arg.append('root=' + os.path.join( paths['output'], target[1], utils.get_branch())) elif target[1] == 'saas': fab_arg.append('root=' + os.path.join(paths['output'], target[1])) # m.target(target, 'sphinx-prerequisites', block=b) # m.job(fab_prefix + ' sphinx.build:' + ','.join(fab_arg), block=b) if target.endswith('saas') or target.endswith('hosted'): m.target(target, 'generate-source', block=b) else: m.target(target, block=b) if target == 'gettext' or 'gettext' in target: m.job('{0} tx.update'.format(fab_prefix), block=b) else: m.job('{0} sphinx.target:{1}'.format(fab_prefix, target), block=b) m.job(build_platform_notification('Sphinx', 'completed {0} build.'.format(target)), ignore=True, block=b) return ret_value
def sphinx_builder(target): b = 'production' m.comment(target, block=b) fab_prefix = 'fab' ret_value = [ target ] fab_arg = [target] target_parts = target.split('-') if len(target_parts) > 3: raise Exception('[meta-build]: Invalid sphinx builder: ' + target) elif len(target_parts) == 1: builder = target clean_target = '-'.join(['clean', builder]) ret_value.append(clean_target) m.target(clean_target, block=b) m.job('fab clean.sphinx:{0}'.format(builder), block=b) m.newline(block=b) elif len(target_parts) <= 3 and len(target_parts) > 1: if target[1] == 'hosted' or target[1] == 'saas': fab_prefix += " sphinx.edition:" + target[1] builder = target[0] fab_arg.append('tag=' + target[1]) if target[1] == 'hosted': fab_arg.append('root=' + os.path.join(paths['output'], target[1], utils.get_branch())) elif target[1] == 'saas': fab_arg.append('root=' + os.path.join(paths['output'], target[1])) # m.target(target, 'sphinx-prerequisites', block=b) # m.job(fab_prefix + ' sphinx.build:' + ','.join(fab_arg), block=b) if target.endswith('saas') or target.endswith('hosted'): m.target(target, 'generate-source', block=b) else: m.target(target, block=b) if target == 'gettext' or 'gettext' in target: m.job('{0} tx.update'.format(fab_prefix), block=b) else: m.job('{0} sphinx.target:{1}'.format(fab_prefix, target), block=b) m.job(build_platform_notification('Sphinx', 'completed {0} build.'.format(target)), ignore=True, block=b) return ret_value
def build_prerequisites(conf): jobs = itertools.chain( build_prereq_jobs(conf), manpage_jobs(conf), table_jobs(conf), api_jobs(conf), toc_jobs(conf), option_jobs(conf), steps_jobs(conf), release_jobs(conf), intersphinx_jobs(conf), image_jobs(conf), ) try: res = runner(jobs, parallel="process") print("[sphinx-prep]: built {0} pieces of content".format(len(res))) except PoolResultsError: print( "[WARNING]: sphinx prerequisites encountered errors. " "See output above. Continuing as a temporary measure." ) runner(external_jobs(conf), parallel="thread") buildinfo_hash(conf) if conf.project.name != "mms": # we copy source manually for mms in makefile.mms, avoiding this # operation to clarify the artifacts directory transfer_source(conf) print("[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)") dep_count = refresh_dependencies(conf) print("[sphinx-prep]: bumped timestamps of {0} files".format(dep_count)) command(build_platform_notification("Sphinx", "Build in progress pastb critical phase."), ignore=True) print("[sphinx-prep]: INFO - Build in progress past critical phase.") dump_file_hashes(conf.system.dependency_cache, conf) print("[sphinx-prep]: build environment prepared for sphinx.")
def build_job_prerequsites(sync, sconf, conf): runner(external_jobs(conf), parallel='thread') with update_source_lock: if conf.project.name != 'mms': if sync.satisfied('transfered_source') is False: transfer_source(sconf, conf) sync.transfered_source = True cond_toc = "build_toc" else: cond_name = 'transfered_' + sconf.edition cond_toc = 'build_toc_' + sconf.edition if sync.satisfied(cond_name) is False: cmd = 'make -C {0} {1}-source-dir={0}{2}{3} EDITION={1} generate-source-{1}' cmd = cmd.format(conf.paths.projectroot, sconf.edition, os.path.sep, conf.paths.branch_source) o = command(cmd, capture=True) if len(o.out.strip()) > 0: print(o.out) sync[cond_name] = True if sync.satisfied(cond_toc) is False: # this has to go here so that MMS can generate different toc trees for # each edition. runner(toc_jobs(conf), parallel='process') sync[cond_toc] = True with update_deps_lock: if sync.satisfied('updated_deps') is False: print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)') dep_count = refresh_dependencies(conf) print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count)) sync.updated_deps = True command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True) print('[sphinx-prep]: INFO - Build in progress past critical phase ({0})'.format(conf.paths.branch_source)) dump_file_hashes(conf) print('[sphinx-prep]: build environment prepared for sphinx.')
def build_prerequisites(conf): jobs = itertools.chain(build_prereq_jobs(conf), manpage_jobs(conf), table_jobs(conf), api_jobs(conf), toc_jobs(conf), option_jobs(conf), steps_jobs(conf), release_jobs(conf), intersphinx_jobs(conf), image_jobs(conf)) try: res = runner(jobs, parallel='process') print('[sphinx-prep]: built {0} pieces of content'.format(len(res))) except PoolResultsError: print('[WARNING]: sphinx prerequisites encountered errors. ' 'See output above. Continuing as a temporary measure.') runner(external_jobs(conf), parallel='thread') buildinfo_hash(conf) if conf.project.name != 'mms': # we copy source manually for mms in makefile.mms, avoiding this # operation to clarify the artifacts directory transfer_source(conf) print('[sphinx-prep]: resolving all intra-source dependencies now. (takes several seconds)') dep_count = refresh_dependencies(conf) print('[sphinx-prep]: bumped timestamps of {0} files'.format(dep_count)) command(build_platform_notification('Sphinx', 'Build in progress pastb critical phase.'), ignore=True) print('[sphinx-prep]: INFO - Build in progress past critical phase.') dump_file_hashes(conf.system.dependency_cache, conf) print('[sphinx-prep]: build environment prepared for sphinx.')
def build_job_prerequsites(sync, sconf, conf): runner(external_jobs(conf), parallel='thread') with update_source_lock: cond_toc = "build_toc" cond_name = 'transfered_source' cond_dep = 'updated_deps' cond_steps = 'build_step_files' if conf.project.name in ['mms', 'training']: cond_name += '_' + sconf.edition cond_toc += '_' + sconf.edition cond_dep += '_' + sconf.edition cond_steps += '_' + sconf.edition if sync.satisfied(cond_name) is False: transfer_source(sconf, conf) sync[cond_name] = True if 'excluded' in sconf: logger.info('removing excluded files') for fn in sconf.excluded: fqfn = os.path.join(conf.paths.projectroot, conf.paths.branch_source, fn[1:]) if os.path.exists(fqfn): if os.path.isdir(fqfn): rmtree(fqfn) else: os.remove(fqfn) logger.debug('removed {0}'.format(fqfn)) logger.info('removed {0} files'.format(len(sconf.excluded))) with ProcessPool() as p: # these must run here so that MMS can generate different toc/steps/etc for # each edition. if sync.satisfied(cond_toc) is False: # even if this fails we don't want it to run more than once sync[cond_toc] = True tr = p.runner(toc_jobs(conf)) logger.info('generated {0} toc files'.format(len(tr))) if sync.satisfied(cond_steps) is False: sync[cond_steps] = True sr = p.runner(steps_jobs(conf)) logger.info('generated {0} step files'.format(len(sr))) if sync.satisfied(cond_dep) is False: logger.debug('using update deps lock.') logger.info('resolving all intra-source dependencies now. for sphinx build. (takes several seconds)') dep_count = refresh_dependencies(conf) logger.info('bumped {0} dependencies'.format(dep_count)) sync[cond_dep] = True command(build_platform_notification('Sphinx', 'Build in progress past critical phase.'), ignore=True) logger.info('sphinx build in progress past critical phase ({0})'.format(conf.paths.branch_source)) dump_file_hashes(conf) else: logger.debug('dependencies already updated, lock unneeded.') logger.debug('releasing dependency update lock.') logging.info('build environment prepared for sphinx build {0}.'.format(sconf.builder))
def build_job_prerequsites(sync, sconf, conf): runner(external_jobs(conf), parallel='thread') with update_source_lock: cond_toc = "build_toc" cond_name = 'transfered_source' cond_dep = 'updated_deps' cond_steps = 'build_step_files' if conf.project.name in ['mms', 'training']: cond_name += '_' + sconf.edition cond_toc += '_' + sconf.edition cond_dep += '_' + sconf.edition cond_steps += '_' + sconf.edition if sync.satisfied(cond_name) is False: transfer_source(sconf, conf) sync[cond_name] = True if 'excluded' in sconf: logger.info('removing excluded files') for fn in sconf.excluded: fqfn = os.path.join(conf.paths.projectroot, conf.paths.branch_source, fn[1:]) if os.path.exists(fqfn): if os.path.isdir(fqfn): rmtree(fqfn) else: os.remove(fqfn) logger.debug('removed {0}'.format(fqfn)) logger.info('removed {0} files'.format(len(sconf.excluded))) with ProcessPool() as p: # these must run here so that MMS can generate different toc/steps/etc for # each edition. if sync.satisfied(cond_toc) is False: # even if this fails we don't want it to run more than once sync[cond_toc] = True tr = p.runner(toc_jobs(conf)) logger.info('generated {0} toc files'.format(len(tr))) if sync.satisfied(cond_steps) is False: sync[cond_steps] = True sr = p.runner(steps_jobs(conf)) logger.info('generated {0} step files'.format(len(sr))) if sync.satisfied(cond_dep) is False: logger.debug('using update deps lock.') logger.info( 'resolving all intra-source dependencies now. for sphinx build. (takes several seconds)' ) dep_count = refresh_dependencies(conf) logger.info('bumped {0} dependencies'.format(dep_count)) sync[cond_dep] = True command(build_platform_notification( 'Sphinx', 'Build in progress past critical phase.'), ignore=True) logger.info( 'sphinx build in progress past critical phase ({0})'.format( conf.paths.branch_source)) dump_file_hashes(conf) else: logger.debug('dependencies already updated, lock unneeded.') logger.debug('releasing dependency update lock.') logging.info('build environment prepared for sphinx build {0}.'.format( sconf.builder))