def finalize_single_html_jobs(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) if not os.path.exists(single_html_dir): os.makedirs(single_html_dir) try: manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'contents.html'), output_file=pjoin(single_html_dir, 'index.html')) except (IOError, OSError): manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'index.html'), output_file=pjoin(single_html_dir, 'index.html')) copy_if_needed(source_file=pjoin(conf.paths.branch_output, builder, 'objects.inv'), target_file=pjoin(single_html_dir, 'objects.inv')) single_path = pjoin(single_html_dir, '_static') for fn in expand_tree(pjoin(conf.paths.branch_output, builder, '_static'), None): yield { 'job': copy_if_needed, 'args': [fn, pjoin(single_path, os.path.basename(fn))], 'target': None, 'dependency': None }
def toc_jobs(conf): paths = copy(conf.paths) for fn in expand_tree(paths.includes, 'yaml'): if fn.startswith(os.path.join(paths.includes, 'table')): continue elif fn.startswith(os.path.join(paths.includes, 'step')): continue elif len(fn) >= 24: base_name = _get_toc_base_name(fn) fmt = fn[20:24] if fmt != 'spec': fmt = fn[16:19] o = { 'dependency': os.path.join(paths.projectroot, fn), 'job': _generate_toc_tree, 'target': [], 'args': [fn, fmt, base_name, paths, conf] } if fmt != 'spec': o['target'].append(_get_toc_output_name(base_name, 'toc', paths)) is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), 'ref-spec')) if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'): o['target'].append(_get_toc_output_name(base_name, 'dfn-list', paths)) elif fmt == 'ref' or is_ref_spec: o['target'].append(_get_toc_output_name(base_name, 'table', paths)) yield o
def dump_file_hashes(conf=None): conf = lazy_conf(conf) output = conf.system.dependency_cache o = { 'conf': conf, 'time': datetime.datetime.utcnow().strftime("%s"), 'files': {} } files = expand_tree( os.path.join(conf.paths.projectroot, conf.paths.source), None) fmap = o['files'] for fn in files: if os.path.exists(fn): fmap[fn] = md5_file(fn) output_dir = os.path.dirname(output) if not os.path.exists(output_dir): os.makedirs(output_dir) with open(output, 'w') as f: json.dump(o, f) logger.info('wrote dependency cache to: {0}'.format(output))
def manpage_url_jobs(conf): project_source = os.path.join(conf.paths.projectroot, conf.paths.source) top_level_items = set() for fs_obj in os.listdir(project_source): if fs_obj.startswith('.static') or fs_obj == 'index.txt': continue if os.path.isdir(os.path.join(project_source, fs_obj)): top_level_items.add(fs_obj) if fs_obj.endswith('.txt'): top_level_items.add(fs_obj[:-4]) top_level_items = '/'+ r'[^\s]*|/'.join(top_level_items) + r'[^\s]*' re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-') subst = conf.project.url + '/' + conf.project.tag + r'\2' regex_obj = (re.compile(re_string), subst) for manpage in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.output, conf.git.branches.current, 'man'), ['1', '5']): yield dict(target=manpage, dependency=None, job=manpage_url, args=[regex_obj, manpage])
def dump_file_hashes(conf=None): conf = lazy_conf(conf) output = conf.system.dependency_cache o = { 'conf': conf, 'time': datetime.datetime.utcnow().strftime("%s"), 'files': { } } files = expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source), None) fmap = o['files'] for fn in files: if os.path.exists(fn): fmap[fn] = md5_file(fn) output_dir = os.path.dirname(output) if not os.path.exists(output_dir): os.makedirs(output_dir) with open(output, 'w') as f: json.dump(o, f) logger.info('wrote dependency cache to: {0}'.format(output))
def include_files(conf=None): conf = lazy_config(conf) source_dir = os.path.join(conf.paths.projectroot, conf.paths.source) inc_re = re.compile(r'\.\. include:: /(.*)') matches = grep(inc_re, expand_tree(source_dir, None)) def tuple_sort(k): return k[1] matches.sort(key=tuple_sort) files = dict() for i in groupby(matches, operator.itemgetter(1)): files[i[0]] = set() for src in i[1]: if not src[0].endswith('~'): files[i[0]].add(src[0]) files[i[0]] = list(files[i[0]]) files.update(generated_includes(conf)) return files
def manpage_url_jobs(builder, conf): project_source = os.path.join(conf.paths.projectroot, conf.paths.source) top_level_items = set() for fs_obj in os.listdir(project_source): if fs_obj.startswith('.static') or fs_obj == 'index.txt': continue if os.path.isdir(os.path.join(project_source, fs_obj)): top_level_items.add(fs_obj) if fs_obj.endswith('.txt'): top_level_items.add(fs_obj[:-4]) top_level_items = '/' + r'[^\s]*|/'.join(top_level_items) + r'[^\s]*' re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-') subst = conf.project.url + '/' + conf.project.tag + r'\2' regex_obj = (re.compile(re_string), subst) for manpage in expand_tree( os.path.join(conf.paths.projectroot, conf.paths.output, conf.git.branches.current, builder), ['1', '5']): yield dict(target=manpage, dependency=None, job=manpage_url, args=[regex_obj, manpage])
def toc_jobs(conf): paths = copy(conf.paths) for fn in expand_tree(paths.includes, "yaml"): if fn.startswith(os.path.join(paths.includes, "table")): continue elif fn.startswith(os.path.join(paths.includes, "step")): continue elif len(fn) >= 24: base_name = _get_toc_base_name(fn) fmt = fn[20:24] if fmt != "spec": fmt = fn[16:19] o = { "dependency": os.path.join(paths.projectroot, fn), "job": _generate_toc_tree, "target": [], "args": [fn, fmt, base_name, paths, conf], "description": "generating {0} toc from {0}".format(fmt, fn), } if fmt != "spec": o["target"].append(_get_toc_output_name(base_name, "toc", paths)) is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), "ref-spec")) if not is_ref_spec and (fmt == "toc" or fmt == "spec"): o["target"].append(_get_toc_output_name(base_name, "dfn-list", paths)) elif fmt == "ref" or is_ref_spec: o["target"].append(_get_toc_output_name(base_name, "table", paths)) yield o
def main(files): options = Options() for fn in expand_tree("./", "yaml"): options.ingest(fn) for opt in options.iterator(): renderer = OptionRendered(opt) renderer.render(os.path.dirname(fn))
def include_file_data(conf): inc_path = os.path.join(conf.paths.includes) include_file_list = expand_tree(path=inc_path, input_extension=None) include_graph = include_files(conf=conf) recursive_use = included_recusively(include_graph) generated = generated_includes(conf) omni = {} for idx, fn in enumerate(include_file_list): incf = fn[len(conf.paths.source) :] if fn.endswith("~"): continue for prefix in suppressed_page_prefixes: if incf.startswith(prefix): break else: omni[incf] = {"id": idx, "name": os.path.splitext(incf)[0], "path": incf} if incf in generated: omni[incf]["generated"] = True else: omni[incf]["generated"] = False if incf in recursive_use: omni[incf]["recursive"] = True else: omni[incf]["recursive"] = False if incf in include_graph: omni[incf]["num_clients"] = len(include_graph[incf]) omni[incf]["clients"] = [] for cl in include_graph[incf]: cl, ext = os.path.splitext(cl) if ext == "yaml": continue if cl.startswith("/includes/generated/overview") or cl.startswith("/includes/manpage-"): continue omni[incf]["clients"].append(cl) if len(omni[incf]["clients"]) == 0: omni[incf]["yaml_only"] = True else: omni[incf]["yaml_only"] = False else: omni[incf]["clients"] = dict() omni[incf]["num_clients"] = 0 with open(fn, "r") as f: omni[incf]["content"] = [ln.rstrip() for ln in f.readlines()] return omni
def main(files): options = Options() for fn in expand_tree('./', 'yaml'): options.ingest(fn) for opt in options.iterator(): renderer = OptionRendered(opt) renderer.render(os.path.dirname(fn))
def steps_jobs(conf): paths = conf.paths for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'): if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')): out_fn = _get_steps_output_fn(fn, paths) yield { 'dependency': fn, 'target': out_fn, 'job': render_step_file, 'args': [fn, out_fn] }
def api_jobs(conf=None): conf = lazy_conf(conf) for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source, 'reference'), 'yaml'): target = dot_concat(os.path.splitext(source)[0], 'rst') yield { 'target': target, 'dependency': source, 'job': _generate_api_param, 'args': [source, target, conf] }
def include_files_unused(conf, inc_files=None): inc_files = [ fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None) ] mapping = include_files(conf=conf) results = [] for fn in inc_files: if fn.endswith('yaml') or fn.endswith('~'): continue if fn not in mapping.keys(): results.append(fn) return results
def steps_jobs(conf): paths = conf.paths for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'): if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')): out_fn = _get_steps_output_fn(fn, paths) yield { 'dependency': fn, 'target': out_fn, 'job': render_step_file, 'args': [fn, out_fn, conf], 'description': 'generating step file {0} from {1}'.format(out_fn, fn) }
def jobs(conf): conf.km = AttributeDict() conf.km.spec = os.path.abspath(os.path.join(conf.paths.projectroot, '..', 'mongo-' + conf.version.release, 'modules')) data = runner([{'job': BuildConfiguration, 'args': [fn]} for fn in expand_tree(conf.km.spec)], force=False, pool=8) for item in data: yield { 'job': render_page, 'args': [item, conf] }
def table_jobs(conf=None): conf = lazy_conf(conf) for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.includes), 'yaml'): if os.path.basename(source).startswith('table'): target = _get_table_output_name(source) list_target = _get_list_table_output_name(source) yield { 'target': [ target, list_target ], 'dependency': source, 'job': _generate_tables, 'args': [ source, target, list_target ] }
def steps_jobs(conf): paths = conf.paths for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'): if fn.startswith( os.path.join(paths.projectroot, paths.includes, 'step')): out_fn = _get_steps_output_fn(fn, paths) yield { 'dependency': fn, 'target': out_fn, 'job': render_step_file, 'args': [fn, out_fn] }
def gettext_jobs(conf=None): conf = lazy_conf(conf) locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale, 'pot') branch_output = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'gettext') path_offset = len(branch_output) + 1 for fn in expand_tree(branch_output, None): yield { 'job': copy_if_needed, 'args': [ fn, os.path.join(locale_dirs, fn[path_offset:]), None] }
def option_jobs(conf): paths = conf.paths options = Options() options.source_dirname = os.path.join(paths.projectroot, paths.includes) base_path = os.path.join(paths.projectroot, paths.includes) output_path = os.path.join(base_path, 'option') for fn in expand_tree(base_path, 'yaml'): if fn.startswith(output_path): options.ingest(fn) if not os.path.exists(output_path): os.makedirs(output_path) for opt in options.iterator(): yield {'job': render_option_page, 'args': [opt, output_path]}
def gettext_jobs(conf=None): conf = lazy_conf(conf) locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale, 'pot') branch_output = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'gettext') path_offset = len(branch_output) + 1 for fn in expand_tree(branch_output, None): yield { 'job': copy_if_needed, 'args': [fn, os.path.join(locale_dirs, fn[path_offset:]), None], 'description': "migrating po file {0} if needed".format(fn) }
def json_output_jobs(conf): regexes = [ (re.compile(r'<a class=\"headerlink\"'), '<a'), (re.compile(r'<[^>]*>'), ''), (re.compile(r'“'), '"'), (re.compile(r'”'), '"'), (re.compile(r'‘'), "'"), (re.compile(r'’'), "'"), (re.compile(r'&#\d{4};'), ''), (re.compile(r' '), ''), (re.compile(r'>'), '>'), (re.compile(r'<'), '<') ] outputs = [] for fn in expand_tree('source', 'txt'): # path = build/<branch>/json/<filename> path = os.path.join(conf.paths.branch_output, 'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0]) fjson = dot_concat(path, 'fjson') json = dot_concat(path, 'json') if conf.project.name == 'mms': if not os.path.exists(fjson): continue yield { 'target': json, 'dependency': fjson, 'job': process_json_file, 'description': "processing json file".format(json), 'args': (fjson, json, regexes, conf) } outputs.append(json) list_file = os.path.join(conf.paths.branch_output, 'json-file-list') yield { 'target': list_file, 'dependency': None, 'description': 'generating json index list {0}'.format(list_file), 'job': generate_list_file, 'args': (outputs, list_file, conf) } json_output(conf)
def gettext_jobs(conf=None): conf = lazy_conf(conf) locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale, 'pot') branch_output = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'gettext') path_offset = len(branch_output) + 1 for fn in expand_tree(branch_output, None): yield { 'job': copy_if_needed, 'args': [ fn, os.path.join(locale_dirs, fn[path_offset:]), None], 'description': "migrating po file {0} if needed".format(fn) }
def json_output_jobs(conf): regexes = [(re.compile(r'<a class=\"headerlink\"'), '<a'), (re.compile(r'<[^>]*>'), ''), (re.compile(r'“'), '"'), (re.compile(r'”'), '"'), (re.compile(r'‘'), "'"), (re.compile(r'’'), "'"), (re.compile(r'&#\d{4};'), ''), (re.compile(r' '), ''), (re.compile(r'>'), '>'), (re.compile(r'<'), '<')] outputs = [] for fn in expand_tree('source', 'txt'): # path = build/<branch>/json/<filename> path = os.path.join(conf.paths.branch_output, 'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0]) fjson = dot_concat(path, 'fjson') json = dot_concat(path, 'json') if conf.project.name == 'mms': if not os.path.exists(fjson): continue yield { 'target': json, 'dependency': fjson, 'job': process_json_file, 'description': "processing json file".format(json), 'args': (fjson, json, regexes, conf) } outputs.append(json) list_file = os.path.join(conf.paths.branch_output, 'json-file-list') yield { 'target': list_file, 'dependency': None, 'description': 'generating json index list {0}'.format(list_file), 'job': generate_list_file, 'args': (outputs, list_file, conf) } json_output(conf)
def build_sffms(conf): munge_script = os.path.join(conf.paths.buildsystem, 'bin', 'sffms-cleanup') base_dir = os.path.join(conf.paths.projectroot, conf.paths.output, 'sffms') preprocess = [ { 'job': command, 'args': [' '.join([munge_script, fn])] } for fn in expand_tree(base_dir, 'tex') ] pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml')) count = runner(preprocess) print("[pdf] [sffms]: prepossessed {0} sffms files".format(count )) for pdf in pdfs: copy_if_needed(source_file=os.path.join(base_dir, pdf['input']), target_file=os.path.join(base_dir, pdf['output']), name='sffms') pdf_processor(conf, pdfs, None)
def option_jobs(conf): paths = conf.paths options = Options() options.source_dirname = os.path.join(paths.projectroot, paths.includes) base_path = os.path.join(paths.projectroot, paths.includes) output_path = os.path.join(base_path, 'option') for fn in expand_tree(base_path, 'yaml'): if fn.startswith(output_path): options.ingest(fn) if not os.path.exists(output_path): os.makedirs(output_path) for opt in options.iterator(): yield { 'job': render_option_page, 'args': [ opt, output_path ] }
def json_output_jobs(conf=None): conf = lazy_conf(conf) regexes = [ (re.compile(r'<a class=\"headerlink\"'), '<a'), (re.compile(r'<[^>]*>'), ''), (re.compile(r'“'), '"'), (re.compile(r'”'), '"'), (re.compile(r'‘'), "'"), (re.compile(r'’'), "'"), (re.compile(r'&#\d{4};'), ''), (re.compile(r' '), ''), (re.compile(r'>'), '>'), (re.compile(r'<'), '<') ] outputs = [] for fn in expand_tree('source', 'txt'): # path = build/<branch>/json/<filename> path = os.path.join(conf.paths.branch_output, 'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0]) fjson = dot_concat(path, 'fjson') json = dot_concat(path, 'json') if conf.project.name == 'mms': if not os.path.exists(fjson): continue yield dict(target=json, dependency=fjson, job=process_json_file, args=(fjson, json, regexes, conf)) outputs.append(json) list_file = os.path.join(conf.paths.branch_staging, 'json-file-list') yield dict(target=list_file, dependency=None, job=generate_list_file, args=(outputs, list_file, conf))
def steps_jobs(conf): paths = conf.paths for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'): if fn.startswith( os.path.join(paths.projectroot, paths.includes, 'step')): out_fn = _get_steps_output_fn(fn, paths) yield { 'dependency': fn, 'target': out_fn, 'job': render_step_file, 'args': [fn, out_fn, conf], 'description': 'generating step file {0} from {1}'.format(out_fn, fn) }
def generated_includes(conf): toc_spec_files = [] step_files = [] for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'): base = os.path.basename(fn) if base.startswith('toc-spec'): toc_spec_files.append(fn) elif base.startswith('ref-spec'): toc_spec_files.append(fn) elif base.startswith('steps'): step_files.append(fn) maskl = len(conf.paths.source) path_prefix = conf.paths.includes[len(conf.paths.source):] mapping = {} for spec_file in toc_spec_files: if os.path.exists(spec_file): data = ingest_yaml_doc(spec_file) else: continue deps = [os.path.join(path_prefix, i) for i in data['sources']] mapping[spec_file[maskl:]] = deps for step_def in step_files: data = ingest_yaml_list(step_def) deps = [] for step in data: if 'source' in step: deps.append(step['source']['file']) if len(deps) != 0: deps = [os.path.join(path_prefix, i) for i in deps] mapping[step_def[maskl:]] = deps return mapping
def option_jobs(conf): paths = conf.paths options = Options() options.source_dirname = os.path.join(paths.projectroot, paths.includes) base_path = os.path.join(paths.projectroot, paths.includes) output_path = os.path.join(base_path, "option") for fn in expand_tree(base_path, "yaml"): if fn.startswith(output_path): options.ingest(fn) if not os.path.exists(output_path): os.makedirs(output_path) for opt in options.iterator(): yield { "job": render_option_page, "args": [opt, output_path], "description": 'generating option "{0}" for "{1}"'.format(opt.name, opt.program), }
def generate_report(mask, output_file=None, conf=None, data=None): conf = lazy_config(conf) base_path = os.path.join( conf.build.paths.output, conf.git.branches.current, 'json') if mask is not None: if mask.startswith('/'): mask = mask[1:] mask = os.path.join(base_path, mask) if data is None: docs = expand_tree(base_path, '.json') output = runner(jobs=report_jobs(docs, mask), retval='results') else: output = data return output
def generated_includes(conf): toc_spec_files = [] step_files = [] for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'): base = os.path.basename(fn) if base.startswith('toc-spec'): toc_spec_files.append(fn) elif base.startswith('ref-spec'): toc_spec_files.append(fn) elif base.startswith('steps'): step_files.append(fn) maskl = len(conf.paths.source) path_prefix = conf.paths.includes[len(conf.paths.source):] mapping = {} for spec_file in toc_spec_files: if os.path.exists(spec_file): data = ingest_yaml_doc(spec_file) else: continue deps = [ os.path.join(path_prefix, i ) for i in data['sources']] mapping[spec_file[maskl:]] = deps for step_def in step_files: data = ingest_yaml_list(step_def) deps = [] for step in data: if 'source' in step: deps.append(step['source']['file']) if len(deps) != 0: deps = [ os.path.join(path_prefix, i ) for i in deps ] mapping[step_def[maskl:]] = deps return mapping
def toc_jobs(conf): paths = copy(conf.paths) for fn in expand_tree(paths.includes, 'yaml'): if fn.startswith(os.path.join(paths.includes, 'table')): continue elif fn.startswith(os.path.join(paths.includes, 'step')): continue elif len(fn) >= 24: base_name = _get_toc_base_name(fn) fmt = fn[20:24] if fmt != 'spec': fmt = fn[16:19] o = { 'dependency': os.path.join(paths.projectroot, fn), 'job': _generate_toc_tree, 'target': [], 'args': [fn, fmt, base_name, paths, conf], 'description': 'generating {0} toc from {0}'.format(fmt, fn) } if fmt != 'spec': o['target'].append( _get_toc_output_name(base_name, 'toc', paths)) is_ref_spec = fn.startswith( os.path.join(os.path.dirname(fn), 'ref-spec')) if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'): o['target'].append( _get_toc_output_name(base_name, 'dfn-list', paths)) elif fmt == 'ref' or is_ref_spec: o['target'].append( _get_toc_output_name(base_name, 'table', paths)) yield o
def external_jobs(conf=None): conf = lazy_conf(conf) ext_mod_path = os.path.join(conf.paths.projectroot, 'local') if not os.path.exists(ext_mod_path): raise StopIteration external_mods = [] for mod in expand_tree(ext_mod_path, 'py'): path, name = os.path.split(mod) name, _ = os.path.splitext(name) file, filename, data = imp.find_module(name, [path]) imp.load_module(name, file, mod, data) external_mods.append(name) for name in external_mods: mod = sys.modules[name] if 'jobs' in dir(mod) and 'stage' in dir(mod) and mod.stage.startswith('pre'): for task in mod.jobs(conf): yield task
def json_output_jobs(conf=None): conf = lazy_conf(conf) regexes = [(re.compile(r'<a class=\"headerlink\"'), '<a'), (re.compile(r'<[^>]*>'), ''), (re.compile(r'“'), '"'), (re.compile(r'”'), '"'), (re.compile(r'‘'), "'"), (re.compile(r'’'), "'"), (re.compile(r'&#\d{4};'), ''), (re.compile(r' '), ''), (re.compile(r'>'), '>'), (re.compile(r'<'), '<')] outputs = [] for fn in expand_tree('source', 'txt'): # path = build/<branch>/json/<filename> path = os.path.join(conf.paths.branch_output, 'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0]) fjson = dot_concat(path, 'fjson') json = dot_concat(path, 'json') if conf.project.name == 'mms': if not os.path.exists(fjson): continue yield dict(target=json, dependency=fjson, job=process_json_file, args=(fjson, json, regexes, conf)) outputs.append(json) list_file = os.path.join(conf.paths.branch_staging, 'json-file-list') yield dict(target=list_file, dependency=None, job=generate_list_file, args=(outputs, list_file, conf))
def external_jobs(conf=None): conf = lazy_conf(conf) ext_mod_path = os.path.join(conf.paths.projectroot, 'local') if not os.path.exists(ext_mod_path): raise StopIteration external_mods = [] for mod in expand_tree(ext_mod_path, 'py'): path, name = os.path.split(mod) name, _ = os.path.splitext(name) file, filename, data = imp.find_module(name, [path]) imp.load_module(name, file, mod, data) external_mods.append(name) for name in external_mods: mod = sys.modules[name] if 'jobs' in dir(mod) and 'stage' in dir(mod) and mod.stage.startswith( 'pre'): for task in mod.jobs(conf): yield task
def include_file_data(conf): inc_path = os.path.join(conf.paths.includes) include_file_list = expand_tree(path=inc_path, input_extension=None) include_graph = include_files(conf=conf) recursive_use = included_recusively(include_graph) generated = generated_includes(conf) omni = {} for idx, fn in enumerate(include_file_list): incf = fn[len(conf.paths.source):] if fn.endswith('~'): continue for prefix in suppressed_page_prefixes: if incf.startswith(prefix): break else: omni[incf] = { 'id': idx, 'name': os.path.splitext(incf)[0], 'path': incf, } if incf in generated: omni[incf]['generated'] = True else: omni[incf]['generated'] = False if incf in recursive_use: omni[incf]['recursive'] = True else: omni[incf]['recursive'] = False if incf in include_graph: omni[incf]['num_clients'] = len(include_graph[incf]) omni[incf]['clients'] = [] for cl in include_graph[incf]: cl, ext = os.path.splitext(cl) if ext == 'yaml': continue if (cl.startswith('/includes/generated/overview') or cl.startswith('/includes/manpage-')): continue omni[incf]['clients'].append(cl) if len(omni[incf]['clients']) == 0: omni[incf]['yaml_only'] = True else: omni[incf]['yaml_only'] = False else: omni[incf]['clients'] = dict() omni[incf]['num_clients'] = 0 with open(fn, 'r') as f: omni[incf]['content'] = [ ln.rstrip() for ln in f.readlines() ] return omni
def include_file_data(conf): inc_path = os.path.join(conf.paths.includes) include_file_list = expand_tree(path=inc_path, input_extension=None) include_graph = include_files(conf=conf) recursive_use = included_recusively(include_graph) generated = generated_includes(conf) omni = {} for idx, fn in enumerate(include_file_list): incf = fn[len(conf.paths.source):] if fn.endswith('~'): continue for prefix in suppressed_page_prefixes: if incf.startswith(prefix): break else: omni[incf] = { 'id': idx, 'name': os.path.splitext(incf)[0], 'path': incf, } if incf in generated: omni[incf]['generated'] = True else: omni[incf]['generated'] = False if incf in recursive_use: omni[incf]['recursive'] = True else: omni[incf]['recursive'] = False if incf in include_graph: omni[incf]['num_clients'] = len(include_graph[incf]) omni[incf]['clients'] = [] for cl in include_graph[incf]: cl, ext = os.path.splitext(cl) if ext == 'yaml': continue if (cl.startswith('/includes/generated/overview') or cl.startswith('/includes/manpage-')): continue omni[incf]['clients'].append(cl) if len(omni[incf]['clients']) == 0: omni[incf]['yaml_only'] = True else: omni[incf]['yaml_only'] = False else: omni[incf]['clients'] = dict() omni[incf]['num_clients'] = 0 with open(fn, 'r') as f: omni[incf]['content'] = [ln.rstrip() for ln in f.readlines()] return omni