def htaccess(fn='.htaccess'): conf = lazy_conf() in_files = (i for i in expand_tree(conf.paths.builddata, 'yaml') if os.path.basename(i).startswith('htaccess')) sources = [] for i in in_files: sources.extend(ingest_yaml_list(i)) dirname = os.path.dirname(fn) if not dirname == '' and not os.path.exists(dirname): os.makedirs(dirname) lines = set([]) for redir in sources: lines.add( generate_redirects(process_redirect(redir, conf), conf=conf, match=False)) with open(fn, 'w') as f: f.writelines(lines) f.write('\n') f.writelines([ '<FilesMatch "\.(ttf|otf|eot|woff)$">', '\n', ' Header set Access-Control-Allow-Origin "*"', '\n' '</FilesMatch>', '\n' ]) logging.info( 'redirect: regenerated {0} with {1} redirects ({2} lines)'.format( fn, len(sources), len(lines)))
def am(obj, repo=None): "Runs 'git am' on a github object." if repo is None: repo = lazy_conf().git.remote.upstream cmd = [ 'curl', 'https://github.com/{0}/'.format(repo), '|', 'git', 'am', '--signoff --3way' if env.sign else '--3way' ] if env.branch is not None: local('git checkout {0}'.format(env.branch)) for obj in obj.split(','): if obj.startswith('http'): cmd[1] = obj if not obj.endswith('.patch'): cmd[1] += '.patch' local(' '.join(cmd)) elif re.search('[a-zA-Z]+', obj): cmd[1] = cmd[1] + 'commit/' + obj + '.patch' local(' '.join(cmd)) puts('[git]: merged commit {0} for {1} into {2}'.format( obj, repo, get_branch())) else: cmd[1] = cmd[1] + 'pull/' + obj + '.patch' local(' '.join(cmd)) puts('[git]: merged pull request #{0} for {1} into {2}'.format( obj, repo, get_branch())) if env.branch is not None: local('git checkout -')
def check(): conf = lazy_conf(None) tx_conf = os.path.join(conf.paths.projectroot, ".tx", 'config') with open(tx_conf, 'r') as f: files = [ l.rsplit(' ', 1)[1].strip() for l in f.readlines() if l.startswith('source_file')] errs = 0 for fn in files: fqfn = os.path.join(conf.paths.projectroot, fn) if not os.path.exists(fqfn): errs += 1 logger.error(fqfn + " does not exist.") if errs != 0: logger.warning("{0} files configured that don't exist.") else: logger.info('all configured translation source files exist') return errs
def get_branched_path(options, conf=None, *args): conf = lazy_conf(conf) if 'branched' in options: return os.path.join(os.path.sep.join(args), conf.git.branches.current) else: return os.path.sep.join(args)
def filter(mask): "Returns a subset of the dependency graph based on a required 'mask' argument." conf = lazy_conf() mask = resolve_mask(mask) render_for_console(includes_masked(mask=mask, conf=conf))
def update(): "Builds gettext and updates 'locale/' directory with new files." sphinx_builder = 'gettext' conf = lazy_conf(None) sconf = compute_sphinx_config(sphinx_builder, get_sconf(conf), conf) conf = edition_setup(sconf.edition, conf) sphinx_prereq(conf) try: rm_path = os.path.join(conf.paths.projectroot, conf.paths.branch_source, 'meta', 'includes.txt') os.remove(rm_path) print('[tx]: removed {0}'.format(rm_path)) except OSError: pass sphinx_build(builder=sphinx_builder, conf=conf, sconf=sconf, finalize_fun=None) print('[tx] [sphinx]: rebuild gettext targets') tx_cmd = "sphinx-intl update-txconfig-resources --pot-dir {path} --transifex-project-name={name}" command( tx_cmd.format(path=os.path.join(conf.paths.branch_output, sphinx_builder), name='-'.join(conf.project.title.lower().split()))) print('[tx] [sphinx-intl]: updated pot directory')
def update(): "Builds gettext and updates 'locale/' directory with new files." sphinx_builder = 'gettext' conf = lazy_conf(None) sconf = get_sconf(conf) sconf.builder = sphinx_builder sync = StateAttributeDict() if 'edition' in sconf: conf = edition_setup(sconf.edition, conf) # includes_file = os.path.join(conf.paths.branch_source, 'meta', 'includes.txt') # if os.path.exists(includes_file): # os.remove(includes_file) sphinx_build(builder=sphinx_builder, conf=conf, sconf=sconf, sync=sync, finalize_fun=None) print('[tx] [sphinx]: rebuild gettext targets') tx_cmd = "sphinx-intl update-txconfig-resources --pot-dir {path} --transifex-project-name={name}" command(tx_cmd.format(path=os.path.join(conf.paths.branch_output, sphinx_builder), name='-'.join(conf.project.title.lower().split()))) print('[tx] [sphinx-intl]: updated pot directory')
def target(*targets): "Builds one or more sphinx targets with prerequisites and post-processing." conf = lazy_conf() sconf = get_sconf(conf) sphinx_build(targets, conf, sconf, finalize_build)
def update(): "Builds gettext and updates 'locale/' directory with new files." sphinx_builder = "gettext" conf = lazy_conf(None) sconf = compute_sphinx_config(sphinx_builder, get_sconf(conf), conf) conf = edition_setup(sconf.edition, conf) sphinx_prereq(conf) try: rm_path = os.path.join(conf.paths.projectroot, conf.paths.branch_source, "meta", "includes.txt") os.remove(rm_path) print("[tx]: removed {0}".format(rm_path)) except OSError: pass sphinx_build(builder=sphinx_builder, conf=conf, sconf=sconf, finalize_fun=None) print("[tx] [sphinx]: rebuild gettext targets") tx_cmd = "sphinx-intl update-txconfig-resources --pot-dir {path} --transifex-project-name={name}" command( tx_cmd.format( path=os.path.join(conf.paths.branch_output, sphinx_builder), name="-".join(conf.project.title.lower().split()), ) ) print("[tx] [sphinx-intl]: updated pot directory")
def check(): conf = lazy_conf(None) tx_conf = os.path.join(conf.paths.projectroot, ".tx", 'config') with open(tx_conf, 'r') as f: files = [ l.rsplit(' ', 1)[1].strip() for l in f.readlines() if l.startswith('source_file') ] errs = 0 for fn in files: fqfn = os.path.join(conf.paths.projectroot, fn) if not os.path.exists(fqfn): errs += 1 logger.error(fqfn + " does not exist.") if errs != 0: logger.warning("{0} files configured that don't exist.") else: logger.info('all configured translation source files exist') return errs
def htaccess(fn='.htaccess'): conf = lazy_conf() in_files = ( i for i in expand_tree(conf.paths.builddata, 'yaml') if os.path.basename(i).startswith('htaccess') ) sources = [] for i in in_files: sources.extend(ingest_yaml_list(i)) dirname = os.path.dirname(fn) if not dirname == '' and not os.path.exists(dirname): os.makedirs(dirname) lines = set( [ ] ) for redir in sources: lines.add(generate_redirects(process_redirect(redir, conf), conf=conf, match=False)) with open(fn, 'w') as f: f.writelines(lines) f.write('\n') f.writelines( ['<FilesMatch "\.(ttf|otf|eot|woff)$">','\n', ' Header set Access-Control-Allow-Origin "*"', '\n' '</FilesMatch>', '\n'] ) logging.info('redirect: regenerated {0} with {1} redirects ({2} lines)'.format(fn, len(sources), len(lines)))
def builds(days=14): "Cleans all builds older than 'n' number of days. Defaults to 14." days = time.time() - 60 * 60 * 24 * int(days) path = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'build')) builds = [path + o for o in os.listdir(path) if os.path.isdir(path + o)] conf = lazy_conf() for build in builds: branch = build.rsplit('/', 1)[1] if branch in conf.git.branches.published: continue elif branch == get_branch(): continue elif branch == 'public': continue elif os.stat(build).st_mtime < days: _rm_rf(build) _rm_rf(path + "public/" + branch) logger.warning('removed stale build artifact: ' + build)
def intersphinx(): "Downloads all intersphinx files if out of date." conf = lazy_conf(None) res = runner( intersphinx_jobs(conf) ) puts('[intersphinx]: processed {0} intersphinx inventories'.format(len(res)))
def migrate(conf=None): "Migrates all manual files to primer according to the spec. As needed." conf = lazy_conf(conf) migration_paths = get_migration_specifications(conf) if conf.project.name != 'primer': return False elif len(migration_paths) == 0: return False else: migrations = ingest_yaml_list(*migration_paths) truncate_jobs = [] munge_jobs = [] migration_jobs = [] append_jobs = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) fq_target = os.path.join(conf.paths.projectroot, conf.paths.source, page['target']) fq_source = os.path.join(conf.paths.manual_source, page['source']) migration_jobs.append(build_migration_job(fq_target, fq_source)) if 'truncate' in page: truncate_jobs.append( build_truncate_job(page['truncate'], fq_target)) if 'transform' in page: migration_jobs[-1]['job'] = copy_always munge_jobs.append( build_transform_job(page['transform'], fq_target)) if 'append' in page: migration_jobs[-1]['job'] = copy_always append_jobs.append( build_append_job(page, fq_target, migration_paths)) with ProcessPool() as p: migration_res = p.runner(migration_jobs) munge_res = p.runner(post_process_jobs(tasks=munge_jobs)) truncate_res = p.runner(truncate_jobs) append_res = p.runner(append_jobs) msg = '[primer]: migrated {0}, munged {1}, truncated {2}, and appended to {3} pages.' print( msg.format(len(migration_res), len(munge_res), len(truncate_res), len(append_res))) return True
def target(*targets): "Builds a sphinx target with prerequisites and post-processing." conf = lazy_conf() sconf = get_sconf(conf) primer_migrate_pages(conf) sphinx_build(targets, conf, sconf, finalize_build)
def cleanup(): conf = lazy_conf(None) for fn in include_files_unused(conf): fn = os.path.join(conf.paths.source, fn[1:]) if os.path.exists(fn): os.remove(fn) logger.info("removed {0}, which was an unused include file.".format(fn)) else: logger.error('{0} does not exist'.format(fn))
def tx_resources(): conf = lazy_conf(None) tx_conf = os.path.join(conf.paths.projectroot, ".tx", 'config') with open(tx_conf, 'r') as f: resources = [ l.strip()[1:-1] for l in f.readlines() if l.startswith('[') ][1:] return resources
def cleanup(): conf = lazy_conf() for fn in include_files_unused(conf=conf): fn = os.path.join(conf.paths.source, fn[1:]) if os.path.exists(fn): os.remove(fn) logger.info( "removed {0}, which was an unused include file.".format(fn)) else: logger.error('{0} does not exist'.format(fn))
def tx_resources(): conf = lazy_conf(None) tx_conf = os.path.join(conf.paths.projectroot, ".tx", 'config') with open(tx_conf, 'r') as f: resources = [ l.strip()[1:-1] for l in f.readlines() if l.startswith('[')][1:] return resources
def sphinx(builder='html', conf=None): "Removes a specific sphinx build and associated artifacts. Defaults to 'html'. " conf = lazy_conf(conf) root = conf.paths.branch_output cleaner([ os.path.join(root, 'doctrees' + '-' + builder), os.path.join(root, builder) ] ) logger.warning('removed all files supporting the {0} build'.format(builder))
def build(builder='html'): "Build a single sphinx target. Does not build prerequisites." conf = lazy_conf() sconf = BuildConfiguration(filename='sphinx.yaml', directory=os.path.join(conf.paths.projectroot, conf.paths.builddata)) primer_migrate_pages(conf) build_worker_wrapper(builder, sconf, conf, finalize_build)
def migrate(conf=None): "Migrates all manual files to primer according to the spec. As needed." conf = lazy_conf(conf) migration_paths = get_migration_specifications(conf) if conf.project.name != 'primer': return False elif len(migration_paths) == 0: return False else: migrations = ingest_yaml_list(*migration_paths) truncate_jobs = [] munge_jobs = [] migration_jobs = [] append_jobs = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) fq_target = os.path.join(conf.paths.projectroot, conf.paths.source, page['target']) fq_source = os.path.join(conf.paths.manual_source, page['source']) migration_jobs.append(build_migration_job(fq_target, fq_source)) if 'truncate' in page: truncate_jobs.append(build_truncate_job(page['truncate'], fq_target)) if 'transform' in page: migration_jobs[-1]['job'] = copy_always munge_jobs.append(build_transform_job(page['transform'], fq_target)) if 'append' in page: migration_jobs[-1]['job'] = copy_always append_jobs.append(build_append_job(page, fq_target, migration_paths)) with ProcessPool() as p: migration_res = p.runner(migration_jobs) munge_res = p.runner(post_process_jobs(tasks=munge_jobs)) truncate_res = p.runner(truncate_jobs) append_res = p.runner(append_jobs) msg = '[primer]: migrated {0}, munged {1}, truncated {2}, and appended to {3} pages.' print(msg.format(len(migration_res), len(munge_res), len(truncate_res), len(append_res))) return True
def primer_migrate_pages(conf=None): "Migrates all manual files to primer according to the spec. As needed." conf = lazy_conf(conf) migration_paths = get_migration_specifications(conf) if conf.project.name != "primer": return False elif len(migration_paths) == 0: return False else: migrations = ingest_yaml_list(*migration_paths) truncate_jobs = [] munge_jobs = [] migration_jobs = [] append_jobs = [] for page in migrations: if "sources" in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) fq_target = os.path.join(conf.paths.projectroot, conf.paths.source, page["target"]) fq_source = os.path.abspath(os.path.join(conf.paths.projectroot, "..", "source", page["source"])) migration_jobs.append(build_migration_job(fq_target, fq_source)) if "truncate" in page: truncate_jobs.append(build_truncate_job(page["truncate"], fq_target)) if "transform" in page: migration_jobs[-1]["job"] = copy_always munge_jobs.append(build_transform_job(page["transform"], fq_target)) if "append" in page: migration_jobs[-1]["job"] = copy_always append_jobs.append(build_append_job(page, fq_target, migration_paths)) with ProcessPool() as p: migration_res = p.runner(migration_jobs) munge_res = p.runner(post_process_jobs(tasks=munge_jobs)) truncate_res = p.runner(truncate_jobs) append_res = p.runner(append_jobs) msg = "migrated {0}, munged {1}, truncated {2}, and appended to {3} pages." logger.info(msg.format(len(migration_res), len(munge_res), len(truncate_res), len(append_res))) return True
def pdf_worker(target=None, conf=None): conf = lazy_conf(conf) if target is None: target = 'latex' force = False with ThreadPool() as p: res = [] for it, queue in enumerate(pdf_jobs(target, conf)): res.extend(p.runner(queue)) logger.info("pdf: completed {0} pdf jobs, in stage {1}".format(len(queue), it))
def sphinx(builder='html', conf=None): "Removes a specific sphinx build and associated artifacts. Defaults to 'html'. " conf = lazy_conf(conf) root = conf.paths.branch_output cleaner([ os.path.join(root, 'doctrees' + '-' + builder), os.path.join(root, builder) ]) logger.warning( 'removed all files supporting the {0} build'.format(builder))
def pdf_worker(target=None, conf=None): conf = lazy_conf(conf) if target is None: target = 'latex' force = False with ProcessPool() as p: res = [] for it, queue in enumerate(pdf_jobs(target, conf)): res.extend(p.runner(queue)) print("[pdf]: completed {0} pdf jobs, in stage {1}".format( len(queue), it))
def check(site, conf=None): conf = lazy_conf(conf) if site.startswith('stag'): env.release_info_url = 'http://test.docs.10gen.cc/{0}/release.txt'.format(str(get_branch())) elif site == 'ecosystem': env.release_info_url = 'http://docs.mongodb.org/ecosystem/release.txt' elif site.startswith('prod') or site.startswith('pub'): env.release_info_url = 'http://docs.mongodb.org/{0}/release.txt'.format(conf.git.branches.current) r = urlopen(env.release_info_url).readlines()[0].split('\n')[0] if get_commit() == r: raise PublicationError('ERROR: the current published version of is the same as the current commit. Make a new commit before publishing.') else: print('[build]: the current commit is different than the published version on.')
def deploy(target, conf=None, pconf=None): """Deploys a site. Specifies the deployment target defined in 'push.yaml'""" conf = lazy_conf(conf) push_conf = ingest_yaml_list(os.path.join(conf.paths.projectroot, conf.paths.builddata, 'push.yaml')) pconf = conf_from_list('target', push_conf)[target] if pconf['target'] != target: abort('[deploy] [ERROR]: this build environment does not support the {0} target'.format(target)) res = runner(deploy_jobs(target, conf, pconf), pool=2) puts('[deploy]: pushed {0} targets'.format(len(res)))
def check(site, conf=None): conf = lazy_conf(conf) if site.startswith('stag'): env.release_info_url = 'http://test.docs.10gen.cc/{0}/release.txt'.format(str(get_branch())) elif site == 'ecosystem': env.release_info_url = 'http://docs.mongodb.org/ecosystem/release.txt' elif site.startswith('prod') or site.startswith('pub'): env.release_info_url = 'http://docs.mongodb.org/{0}/release.txt'.format(conf.git.branches.current) r = urlopen(env.release_info_url).readlines()[0].split('\n')[0] if get_commit() == r: msg = 'ERROR: the current published version of is the same as the current commit. Make a new commit before publishing.' logger.critical(msg) raise PublicationError(msg) else: logger.info('the current commit is different than the published version on.')
def run(): if env.regex is None: abort('must specify a regex') if env.replacement is None: abort('must specify a replacement') conf = lazy_conf() source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) files = expand_tree(path=source_dir, input_extension=None) results = [] with ProcessPool() as p: for fn in files: r = p.apply_async(munge_page, args=[fn, (env.regex, env.replacement), fn, 'editing' ]) results.append(r)
def update(): "Builds gettext and updates 'locale/' directory with new files." sphinx_builder = 'gettext' conf = lazy_conf(None) sconf = get_sconf(conf) sconf.builder = sphinx_builder sync = StateAttributeDict() if 'edition' in sconf: conf = edition_setup(sconf.edition, conf) # includes_file = os.path.join(conf.paths.branch_source, 'meta', 'includes.txt') # if os.path.exists(includes_file): # os.remove(includes_file) sphinx_build(builder=sphinx_builder, conf=conf, sconf=sconf, sync=sync, finalize_fun=None) logger.info('rebuilt gettext targets') tx_cmd = "sphinx-intl update-txconfig-resources --pot-dir {path} --transifex-project-name={name}" logger.info('updating translation artifacts. Long running.') r = command(tx_cmd.format( path=os.path.join(conf.paths.branch_output, sphinx_builder), name='-'.join(conf.project.title.lower().split())), capture=True, ignore=True) if r.return_code != 0: logger.critical('uploading translations failed.') logger.warning(r.err) raise SystemExit else: logger.info(r.out) logger.info( 'sphinx_intl completed successfully: translation uploaded.') logger.info('sphinx-intl: updated pot directory') check() logger.info('completed translation file check.')
def conf(*modifications): "Returns the build configuration object for visual introspection. Optionally specify 'edition' argument." conf = lazy_conf(None) if modifications: if isinstance(modifications, tuple) and len(modifications) <= 2: pass else: modifications = [modifications] for mod in modifications: if len(mod) == 2: sconf = AttributeDict({'language': mod}) conf = language_setup(sconf, conf) else: conf = edition_setup(mod, conf) puts(json.dumps(conf, indent=3))
def run(): if env.regex is None: abort('must specify a regex') if env.replacement is None: abort('must specify a replacement') conf = lazy_conf() source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) files = expand_tree(path=source_dir, input_extension=None) results = [] with ProcessPool() as p: for fn in files: r = p.apply_async( munge_page, args=[fn, (env.regex, env.replacement), fn, 'editing']) results.append(r)
def clean(conf=None): "Removes all migrated primer files according to the current spec." conf = lazy_conf(conf) migration_paths = get_migration_specifications(conf) migrations = ingest_yaml_list(*migration_paths) targets = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) targets.append(os.path.join(conf.paths.projectroot, conf.paths.source, page['target'])) map(verbose_remove, targets) print('[clean] [primer]: removed {0} files'.format(len(targets)))
def clean(conf=None): "Removes all migrated primer files according to the current spec." conf = lazy_conf(conf) migration_paths = get_migration_specifications(conf) migrations = ingest_yaml_list(*migration_paths) targets = [] for page in migrations: if 'sources' in page: migrations.extend(convert_multi_source(page)) continue page = fix_migration_paths(page) targets.append(os.path.join(conf.paths.projectroot, conf.paths.source, page['target'])) map(verbose_remove, targets) logger.info('clean: removed {0} files'.format(len(targets)))
def update(): "Builds gettext and updates 'locale/' directory with new files." sphinx_builder = 'gettext' conf = lazy_conf(None) sconf = get_sconf(conf) sconf.builder = sphinx_builder sync = StateAttributeDict() if 'edition' in sconf: conf = edition_setup(sconf.edition, conf) # includes_file = os.path.join(conf.paths.branch_source, 'meta', 'includes.txt') # if os.path.exists(includes_file): # os.remove(includes_file) sphinx_build(builder=sphinx_builder, conf=conf, sconf=sconf, sync=sync, finalize_fun=None) logger.info('rebuilt gettext targets') tx_cmd = "sphinx-intl update-txconfig-resources --pot-dir {path} --transifex-project-name={name}" logger.info('updating translation artifacts. Long running.') r = command(tx_cmd.format(path=os.path.join(conf.paths.branch_output, sphinx_builder), name='-'.join(conf.project.title.lower().split())), capture=True, ignore=True) if r.return_code != 0: logger.critical('uploading translations failed.') logger.warning(r.err) raise SystemExit else: logger.info(r.out) logger.info('sphinx_intl completed successfully: translation uploaded.') logger.info('sphinx-intl: updated pot directory') check() logger.info('completed translation file check.')
def intersphinx_jobs(conf=None): conf = lazy_conf(conf) data_file = os.path.join(conf.paths.projectroot, conf.paths.builddata, 'intersphinx.yaml') if not os.path.exists(data_file): return intersphinx_mapping = ingest_yaml_list(data_file) for i in intersphinx_mapping: f = os.path.join(conf.paths.projectroot, conf.paths.output, i['path']) s = i['url'] + 'objects.inv' yield { 'target': f, 'dependency': None, 'job': download, 'args': { 'f': f, 's': s } }
def am(obj, repo=None): "Runs 'git am' on a github object." if repo is None: repo = lazy_conf().git.remote.upstream cmd = [ "curl", "https://github.com/{0}/".format(repo), "|", "git", "am", "--signoff --3way" if env.sign else "--3way", ] if env.branch is not None: local("git checkout {0}".format(env.branch)) for obj in obj.split(","): if obj.startswith("http"): cmd[1] = obj if not obj.endswith(".patch"): cmd[1] += ".patch" local(" ".join(cmd)) elif re.search("[a-zA-Z]+", obj): cmd[1] = cmd[1] + "commit/" + obj + ".patch" local(" ".join(cmd)) puts("[git]: merged commit {0} for {1} into {2}".format(obj, repo, get_branch())) else: cmd[1] = cmd[1] + "pull/" + obj + ".patch" local(" ".join(cmd)) puts("[git]: merged pull request #{0} for {1} into {2}".format(obj, repo, get_branch())) if env.branch is not None: local("git checkout -")
def builds(days=14): "Cleans all builds older than 'n' number of days. Defaults to 14." days = time.time() - 60*60*24 * int(days) path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../build/')) + '/' builds = [ path + o for o in os.listdir(path) if os.path.isdir(path + o)] conf = lazy_conf() for build in builds: branch = build.rsplit('/', 1)[1] if branch in conf.git.branches.published: continue elif branch == get_branch(): continue elif branch == 'public': continue elif os.stat(build).st_mtime < days: _rm_rf(build) _rm_rf(path + "public/" + branch) logger.warning('removed stale build artifact: ' + build)
def include_index(): conf = lazy_conf() write_include_index(conf)
def steps(): conf = lazy_conf() res = runner(steps_jobs(conf)) logging.info('rendered {0} step files'.format(len(res)))
def options(): conf = lazy_conf() res = runner(option_jobs(conf), retval=True) logging.info('options: rendered {0} options'.format(len(res)))
def robots(fn): conf = lazy_conf() robots_txt_builder(fn, conf, override=True)
def changed(): "Returns a list of all files that include a file that has changed since the last commit." conf = lazy_conf() render_for_console(changed_includes(conf))
def prereq(): "Omnibus operation that builds all prerequisites for a Sphinx build." conf = lazy_conf() build_prerequisites(conf)
def source(conf=None): conf = lazy_conf(conf) transfer_source(conf)
def options(): conf = lazy_conf() res = runner(option_jobs(conf), retval=True) print('[options]: rendered {0} options'.format(len(res)))
def steps(): conf = lazy_conf() res = runner(steps_jobs(conf)) print('[steps]: rendered {0} step files'.format(len(res)))
def toc(): conf = lazy_conf() res = runner(toc_jobs(conf), retval=True) print('[toc]: built {0} tables of contents'.format(len(res)))