def main(): ui = user_input() conf = lazy_conf() lines = [] for doc in ingest_yaml(ui.data): if doc['type'] == 'redirect': lines.append( generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf)) if doc['type'] == 'draft': print( generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf)) if lines: dirname = os.path.dirname(ui.filename) if not os.path.exists(dirname): os.makedirs(dirname) with open(ui.filename, 'w') as f: for line in lines: f.write(line) print('[redirect]: regenerated ' + ui.filename + ' file.')
def main(): config = ingest_yaml(get_conf_file(file=__file__, directory=conf.paths.builddata)) make_all_sphinx(config) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify sphinx builders.')
def main(): push_conf = ingest_yaml(get_conf_file(file=__file__, directory=conf.paths.builddata)) generate_new_deploy_system(push_conf) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify dependencies files.')
def main(): conf_file = get_conf_file(file=__file__, directory=conf.paths.builddata) build_all_sphinx_migrations(ingest_yaml(conf_file)) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify sphinx migrations.')
def release_jobs(conf=None): conf = lazy_conf(conf) data_file = os.path.join(conf.paths.builddata, 'releases') + '.yaml' if 'release' in conf.version: release_version = conf.version.release else: release_version = conf.version.published[0] if not os.path.exists(data_file): return rel_data = ingest_yaml(os.path.join(conf.paths.builddata, 'releases') + '.yaml') deps = [ os.path.join(conf.paths.projectroot, conf.system.conf_file) ] for rel in rel_data['source-files']: target = os.path.join(conf.paths.projectroot, conf.paths.includes, 'install-curl-release-{0}.rst'.format(rel)) yield { 'target': target, 'dependency': deps, 'job': _generate_release_core, 'args': [ rel, target, release_version ] } target = os.path.join(conf.paths.projectroot, conf.paths.includes, 'install-untar-release-{0}.rst'.format(rel)) yield { 'target': target, 'dependency': deps, 'job': _generate_untar_core, 'args': [ rel, target, release_version ] } target = os.path.join(conf.paths.projectroot, conf.paths.includes, 'install-copy-release-{0}.rst'.format(rel)) yield { 'target': target, 'dependency': deps, 'job': _generate_copy_core, 'args': [ rel, target, release_version ] } for rel in rel_data['subscription-build']: target = 'source/includes/install-curl-release-ent-{0}.rst'.format(rel['system']) yield { 'target': target, 'dependency': deps, 'job': _generate_release_ent, 'args': [ rel, target, release_version ] }
def post_process_jobs(source_fn=None, tasks=None, conf=None): """ input documents should be: { 'transform': { 'regex': str, 'replace': str } 'type': <str> 'file': <str|list> } ``transform`` can be either a document or a list of documents. """ if tasks is None: conf = lazy_conf(conf) if source_fn is None: source_fn = os.path.join(conf.paths.project.root, conf.paths.builddata, 'processing.yaml') tasks = ingest_yaml(source_fn) elif not isinstance(tasks, collections.Iterable): raise ProcessingError( '[ERROR]: cannot parse post processing specification.') def rjob(fn, regex, type): return { 'target': fn, 'dependency': None, 'job': process_page, 'args': dict(fn=fn, output_fn=fn, regex=regex, builder=type) } for job in tasks: if not isinstance(job, dict): raise ProcessingError( '[ERROR]: invalid replacement specification.') elif not 'file' in job and not 'transform' in job: raise ProcessingError( '[ERROR]: replacement specification incomplete.') if 'type' not in job: job['type'] = 'processor' if isinstance(job['transform'], list): regex = [(re.compile(rs['regex']), rs['replace']) for rs in job['transform']] else: regex = (re.compile(job['transform']['regex']), job['transform']['replace']) if isinstance(job['file'], list): for fn in job['file']: yield rjob(fn, regex, job['type']) else: yield rjob(job['file'], regex, job['type'])
def main(): config = ingest_yaml( get_conf_file(file=__file__, directory=conf.paths.builddata)) make_all_sphinx(config) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify sphinx builders.')
def main(): push_conf = ingest_yaml( get_conf_file(file=__file__, directory=conf.paths.builddata)) generate_new_deploy_system(push_conf) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify dependencies files.')
def post_process_jobs(source_fn=None, tasks=None, conf=None): """ input documents should be: { 'transform': { 'regex': str, 'replace': str } 'type': <str> 'file': <str|list> } ``transform`` can be either a document or a list of documents. """ if tasks is None: conf = lazy_conf(conf) if source_fn is None: source_fn = os.path.join(conf.paths.project.root, conf.paths.builddata, 'processing.yaml') tasks = ingest_yaml(source_fn) elif not isinstance(tasks, collections.Iterable): raise ProcessingError('[ERROR]: cannot parse post processing specification.') def rjob(fn, regex, type): return { 'target': fn, 'dependency': None, 'job': process_page, 'args': dict(fn=fn, output_fn=fn, regex=regex, builder=type) } for job in tasks: if not isinstance(job, dict): raise ProcessingError('[ERROR]: invalid replacement specification.') elif not 'file' in job and not 'transform' in job: raise ProcessingError('[ERROR]: replacement specification incomplete.') if 'type' not in job: job['type'] = 'processor' if isinstance(job['transform'], list): regex = [ (re.compile(rs['regex']), rs['replace']) for rs in job['transform'] ] else: regex = (re.compile(job['transform']['regex']), job['transform']['replace']) if isinstance(job['file'], list): for fn in job['file']: yield rjob(fn, regex, job['type']) else: yield rjob(job['file'], regex, job['type'])
def main(): config = ingest_yaml(os.path.join(site_conf.paths.builddata, 'sphinx.yaml')) config = render_sphinx_config(config) make_all_sphinx(config) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify sphinx builders.')
def main(): fn = sys.argv[1] if os.path.isfile(fn): os.remove(fn) config = ingest_yaml(os.path.join(site_conf.paths.builddata, 'sphinx.yaml')) config = render_sphinx_config(config) make_all_sphinx(config)
def post_process_jobs(source_fn=None, tasks=None, conf=None): """ input documents should be: { 'transform': { 'regex': str, 'replace': str } 'type': <str> 'file': <str|list> } ``transform`` can be either a document or a list of documents. """ if tasks is None: conf = lazy_conf(conf) if source_fn is None: source_fn = os.path.join(conf.paths.project.root, conf.paths.builddata, "processing.yaml") tasks = ingest_yaml(source_fn) elif not isinstance(tasks, collections.Iterable): raise ProcessingError("[ERROR]: cannot parse post processing specification.") def rjob(fn, regex, type): return { "target": fn, "dependency": None, "job": process_page, "args": dict(fn=fn, output_fn=fn, regex=regex, builder=type), } for job in tasks: if not isinstance(job, dict): raise ProcessingError("[ERROR]: invalid replacement specification.") elif not "file" in job and not "transform" in job: raise ProcessingError("[ERROR]: replacement specification incomplete.") if "type" not in job: job["type"] = "processor" if isinstance(job["transform"], list): regex = [(re.compile(rs["regex"]), rs["replace"]) for rs in job["transform"]] else: regex = (re.compile(job["transform"]["regex"]), job["transform"]["replace"]) if isinstance(job["file"], list): for fn in job["file"]: yield rjob(fn, regex, job["type"]) else: yield rjob(job["file"], regex, job["type"])
def get_sphinx_builders(conf=None): conf = lazy_conf(conf) path = os.path.join(conf.paths.builddata, 'sphinx.yaml') sconf = ingest_yaml(path) if 'builders' in sconf: return sconf['builders'] else: for i in ['prerequisites', 'generated-source']: if i in sconf: del sconf[i] return sconf.keys()
def get_sphinx_builders(conf=None): conf = lazy_conf(conf) path = os.path.join(conf.paths.builddata, 'sphinx.yaml') sconf = ingest_yaml(path) sconf = render_sphinx_config(sconf) if 'builders' in sconf: return sconf['builders'] else: return [ i for i in sconf.keys() if i not in ['prerequisites', 'generated-source', 'root-base', 'web-base', 'print-base'] ]
def get_sphinx_builders(conf=None): conf = lazy_conf(conf) path = os.path.join(conf.paths.builddata, 'sphinx.yaml') sconf = ingest_yaml(path) sconf = render_sphinx_config(sconf) if 'builders' in sconf: return sconf['builders'] else: return [ i for i in sconf.keys() if i not in [ 'prerequisites', 'generated-source', 'root-base', 'web-base', 'print-base' ] ]
def main(): ui = user_input() conf = lazy_conf() lines = [] for doc in ingest_yaml(ui.data): if doc['type'] == 'redirect': lines.append(generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf)) if doc['type'] == 'redirect-draft': print(generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf)) if lines: dirname = os.path.dirname(ui.filename) if not os.path.exists(dirname): os.makedirs(dirname) with open(ui.filename, 'w') as f: for line in lines: f.write(line) print('[redirect]: regenerated ' + ui.filename + ' file.' )
def main(): conf_file = get_conf_file(__file__) config = ingest_yaml(conf_file) if 'base' in config: generate_integration_targets(config['base']) for lang, lang_config in config.iteritems(): if lang == 'base': continue if 'inherit' in lang_config: new_config = config[lang_config['inherit']] new_config.update(lang_config) gennerate_translation_integration_targets(lang, new_config) else: gennerate_translation_integration_targets(lang, lang_config) else: generate_integration_targets(config) m.write(sys.argv[1]) print('[meta-build]: build "' + sys.argv[1] + '" to specify integration targets.')