def manpage_url_jobs(): conf = get_conf() project_source = os.path.join(conf.build.paths.projectroot, conf.build.paths.source) top_level_items = set() for fs_obj in os.listdir(project_source): if fs_obj.startswith('.static') or fs_obj == 'index.txt': continue if os.path.isdir(os.path.join(project_source, fs_obj)): top_level_items.add(fs_obj) if fs_obj.endswith('.txt'): top_level_items.add(fs_obj[:-4]) top_level_items = '/'+ r'[^\s]*|/'.join(top_level_items) + r'[^\s]*' re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-') subst = conf.project.url + '/' + conf.project.tag + r'\2' regex_obj = (re.compile(re_string), subst) for manpage in expand_tree(os.path.join(conf.build.paths.projectroot, conf.build.paths.output, conf.git.branches.current, 'man'), ['1', '5']): yield dict(target=manpage, dependency=None, job=manpage_url, args=[regex_obj, manpage])
def generate_table_build_rules(): m.section_break('generated table build rules') list_tables = [] rst_tables = [] for source in expand_tree(paths['includes'], 'yaml'): if os.path.basename(source).startswith('table'): m.comment('standard rst table') target = '.'.join([os.path.splitext(source)[0], 'rst']) m.target(target, source) m.job('$(PYTHONBIN) {0}/rstcloth/table.py {1} {2}'.format(paths['buildsystem'], source, target)) m.msg('[tables]: regenerated {0}'.format(target)) m.newline() rst_tables.append(target) m.comment('list table') target = '.'.join([os.path.splitext(source)[0] + '-list', 'rst']) m.target(target, source) m.job('$(PYTHONBIN) {0}/rstcloth/table.py {1} {2} --type list'.format(paths['buildsystem'], source, target)) m.msg('[tables]: regenerated {0}'.format(target)) m.newline() list_tables.append(target) targets = rst_tables + list_tables m.section_break('meta targets for generated tables') m.target('rst-tables', rst_tables) m.target('list-tables', list_tables) m.target('clean-tables') m.job('rm -f ' + ' '.join(targets), ignore=True) m.msg('[tables-clean]: removed all generated tables.') m.target('.PHONY', 'rst-tables list-tables tables clean-tables')
def list_tasks(query, dir, ext): notes = expand_tree(dir, ext) tasks = [] worker_pool(notes, render_list_item, query, tasks) return tasks
def tags(): conf = get_conf() regexp_fn = os.path.join(os.path.join(conf.build.paths.projectroot, conf.build.paths.tools, 'etags.regexp')) if not os.path.exists(regexp_fn): abort('[dev]: cannot regenerate TAGS: no {0} file'.format(regexp_fn)) source = expand_tree(os.path.join(conf.build.paths.projectroot, conf.build.paths.source), 'txt') if len(source) == 0: abort('[dev]: no source files in {0}'.format(source)) source = ' '.join(source) local('etags -I --language=none --regex=@{0} {1}'.format(regexp_fn, source)) regexps = [ (re.compile(r'\.\. (.*):: \$*(.*)'), r'\1.\2'), (re.compile(r'\.\. _(.*)'), r'ref.\1') ] munge_page(fn=os.path.join(conf.build.paths.projectroot, 'TAGS'), regex=regexps, tag='dev')
def toc_jobs(): paths = render_paths('obj') for fn in expand_tree(paths.includes, 'yaml'): if fn.startswith(os.path.join(paths.includes, 'table')): pass elif len(fn) >= 24: base_name = _get_toc_base_name(fn) fmt = fn[20:24] if fmt != 'spec': fmt = fn[16:19] o = { 'dependency': fn, 'job': _generate_toc_tree, 'target': [], 'args': [fn, fmt, base_name, paths] } if fmt != 'spec': o['target'].append(_get_toc_output_name(base_name, 'toc', paths)) is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), 'ref-spec')) if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'): o['target'].append(_get_toc_output_name(base_name, 'dfn-list', paths)) elif fmt == 'ref' or is_ref_spec: o['target'].append(_get_toc_output_name(base_name, 'table', paths)) yield o
def stat(locale_dir, language=(), out=sys.stdout): """ Print statistics for all po files. :param locale_dir: a locale directry. required. :param language: tuple of language. if empty, all languages are specified. :param out: file like object for displaying information. :return: None """ if not language: language = get_lang_dirs(locale_dir) for lang in language: lang_dir = os.path.join(locale_dir, lang) for po_file in expand_tree(path=lang_dir, extension='po'): po = polib.pofile(po_file) print_(po_file, ':', ("%d translated, " "%d fuzzy, " "%d untranslated." % ( len(po.translated_entries()), len(po.fuzzy_entries()), len(po.untranslated_entries()), )), file=out)
def main(): source_files = utils.expand_tree(paths['branch-output'] + '/json/', 'fjson') outputs = [] paths['branch-json-list-file'] = '/'.join( [paths['branch-output'], 'json-file-list']) paths[ 'public-json-list-file'] = paths['branch-staging'] + '/json/.file_list' if len(source_files) > 0: msg = '[json]: processed all json files.' for source in source_files: base_fn = source.split('/', 2)[2].rsplit('.', 1)[0] output_file = '/'.join([paths['branch-output'], base_fn]) + '.json' outputs.append(output_file) generate_json_target(source, output_file) generate_list_file(outputs, paths['branch-json-list-file']) else: msg = '[json]: please build json output from sphinx using "make json" before processing the output.' generate_meta(outputs, msg)
def update(locale_dir, pot_dir=None, language=(), out=sys.stdout): """ Update specified language's po files from pot. :param locale_dir: a locale directry. required. :param pot_dir: a pot directry. if negative, use `pot` directory under `locale_dir`. :param language: tuple of language. if empty, all languages are specified. :param out: file like object for displaying information. :return: None """ locale_dir = locale_dir.rstrip() if not pot_dir: pot_dir = os.path.join(locale_dir, 'pot') if not os.path.exists(pot_dir): msg = ("%(pot_dir)r is not exist. Please specify pot directory with " "-p option, or preparing your pot files in %(pot_dir)r." % locals()) raise RuntimeError(msg) if not language: language = get_lang_dirs(locale_dir) if not language: msg = ("No languages are found. Please specify language with -l " "option, or preparing language directories in %(locale_dir)r." % locals()) raise RuntimeError(msg) for pot_file in expand_tree(path=pot_dir, extension='pot'): pot_file = os.path.join(dirpath, filename) base, ext = os.path.splitext(pot_file) basename = relpath(base, pot_dir) for lang in language: po_dir = os.path.join(locale_dir, lang, 'LC_MESSAGES') po_file = os.path.join(po_dir, basename + ".po") outdir = os.path.dirname(po_file) if not os.path.exists(outdir): os.makedirs(outdir) pot = polib.pofile(pot_file) if os.path.exists(po_file): po = polib.pofile(po_file) msgids = set([str(m) for m in po]) po.merge(pot) new_msgids = set([str(m) for m in po]) if msgids != new_msgids: added = new_msgids - msgids deleted = msgids - new_msgids print_('Update:', po_file, "+%d, -%d" % ( len(added), len(deleted)), file=out) po.save(po_file) else: print_('Not Changed:', po_file, file=out) else: po = polib.POFile() po.metadata = pot.metadata print_('Create:', po_file, file=out) po.merge(pot) po.save(po_file)
def main(): conf_file = utils.get_conf_file(__file__) build_all_pdfs(utils.ingest_yaml_list(conf_file)) m.target("pdfs", utils.expand_tree(os.path.join(paths["branch-output"], "latex"), "tex")) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify pdf builders.')
def collect_source_files(): output = [] for i in utils.expand_tree('./source/includes', 'yaml'): if i.startswith('./source/includes/ref-toc-'): output.append((i, 'table')) if i.startswith('./source/includes/toc-'): output.append((i, 'dfn')) return output
def composite_jobs(): files = expand_tree('source', 'txt') inc_pattern = re.compile(r'\.\. include:: (.*\.(?:txt|rst))') for fn in files: yield { 'target': fn, 'dependency': None, 'job': check_deps, 'args': [ fn, inc_pattern ] }
def main(): conf_file = utils.get_conf_file(__file__) build_all_pdfs(utils.ingest_yaml_list(conf_file)) m.target( 'pdfs', utils.expand_tree(os.path.join(paths['branch-output'], 'latex'), 'tex')) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify pdf builders.')
def steps_jobs(): paths = render_paths('obj') for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'): if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')): out_fn = _get_steps_output_fn(fn, paths) yield { 'dependency': fn, 'target': out_fn, 'job': render_step_file, 'args': [fn, out_fn] }
def api_jobs(): paths = render_paths('obj') for source in expand_tree(os.path.join(paths.source, 'reference'), 'yaml'): target = dot_concat(os.path.splitext(source)[0], 'rst') yield { 'target': target, 'dependency': source, 'job': _generate_api_param, 'args': [source, target] }
def api_jobs(conf=None): if conf is None: conf = get_conf() for source in expand_tree(os.path.join(conf.build.paths.projectroot, conf.build.paths.source, 'reference'), 'yaml'): target = dot_concat(os.path.splitext(source)[0], 'rst') yield { 'target': target, 'dependency': source, 'job': _generate_api_param, 'args': [source, target, conf] }
def table_jobs(): paths = render_paths('obj') for source in expand_tree(paths.includes, 'yaml'): if os.path.basename(source).startswith('table'): target = _get_table_output_name(source) list_target = _get_list_table_output_name(source) yield { 'target': [ target, list_target ], 'dependency': source, 'job': _generate_tables, 'args': [ source, target, list_target ] }
def table_jobs(): paths = get_conf().build.paths for source in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'): if os.path.basename(source).startswith('table'): target = _get_table_output_name(source) list_target = _get_list_table_output_name(source) yield { 'target': [ target, list_target ], 'dependency': source, 'job': _generate_tables, 'args': [ source, target, list_target ] }
def graph_search(problem, heuristics): node = nodeTree(problem.agent, None, None, heuristics[problem.agent]) frontier = PriorityQueue() frontier.insert(node) explored = [] while frontier: node = frontier.pop() if problem.goal_test(node): return soloution(node) explored.append(node) children = expand_tree(problem, node, heuristics) for child in children: if not frontier.has_item(child) and child not in explored: frontier.insert(child) return
def makefile_footer(): b = 'meta' m.target('pdfs', utils.expand_tree(os.path.join(paths['branch-output'], 'latex'), 'tex'), block=b) m.newline() m.target('%.pdf', '%.tex', block=b) m.job("{0} $(LATEXOPTS) '$<' >|[email protected]".format(pdf_latex_command), block=b) m.msg("[pdf]: \(1/4\) pdflatex $<", block=b) m.job("makeindex -s $(output)/latex/python.ist '$(basename $<).idx' >>[email protected] 2>&1", ignore=True, block=b) m.msg("[pdf]: \(2/4\) Indexing: $(basename $<).idx", block=b) m.job("{0} $(LATEXOPTS) '$<' >|[email protected]".format(pdf_latex_command), block=b) m.msg("[pdf]: \(3/4\) pdflatex $<", block=b) m.job("{0} $(LATEXOPTS) '$<' >|[email protected]".format(pdf_latex_command), block=b) m.msg("[pdf]: \(4/4\) pdflatex $<", block=b) m.msg("[pdf]: see '[email protected]' for a full report of the pdf build process.", block=b)
def generate_api_build_rules(): m.section_break('reference yaml file conversion') targets = [] for source in expand_tree('source/reference', 'yaml'): target = '.'.join([os.path.splitext(source)[0], 'rst']) m.target(target, source) m.job('$(PYTHONBIN) {0}/rstcloth/param.py {1} {2}'.format(paths['buildsystem'], source, target)) m.newline() targets.append(target) m.section_break('api reference generation') m.target('api', targets) m.target('clean-api') m.job('rm -f ' + ' '.join(targets), ignore=True) m.msg('[api-clean]: removed generated api reference') m.target('.PHONY', 'api clean-api')
def generate_api_build_rules(): m.section_break('reference yaml file conversion') targets = [] for source in expand_tree('source/reference', 'yaml'): target = '.'.join([os.path.splitext(source)[0], 'rst']) m.target(target, source) m.job('$(PYTHONBIN) {0}/rstcloth/param.py {1} {2}'.format( paths['buildsystem'], source, target)) m.newline() targets.append(target) m.section_break('api reference generation') m.target('api', targets) m.target('clean-api') m.job('rm -f ' + ' '.join(targets), ignore=True) m.msg('[api-clean]: removed generated api reference') m.target('.PHONY', 'api clean-api')
def _generate_report(mask, output_file=None, conf=None): if conf is None: conf = get_conf() base_path = os.path.join(conf.build.paths.output, conf.git.branches.current, 'json') docs = expand_tree(base_path, '.json') if mask is not None and mask.startswith('/'): mask = mask[1:] output = [] p = Pool() for doc in docs: if doc.endswith('searchindex.json') or doc.endswith('globalcontext.json'): continue elif mask is None: output.append(p.apply_async( _render_report, kwds=dict(fn=doc))) else: if doc.startswith(os.path.join(base_path, mask)): output.append(p.apply_async( _render_report, args=(doc,))) p.close() p.join() stats = [ _output_report_yaml(o.get()) for o in output ] if len(stats) == 0: stats[0] = stats[0][4:] stats.append('...\n') if output_file is None: return (o.get() for o in output ) elif output_file == 'print': for ln in stats: print(ln[:-1]) else: with open(output_file, 'w') as f: for ln in stats: f.write(ln)
def json_output_jobs(conf=None): if conf is None: conf = get_conf() regexes = [ (re.compile(r'<a class=\"headerlink\"'), '<a'), (re.compile(r'<[^>]*>'), ''), (re.compile(r'“'), '"'), (re.compile(r'”'), '"'), (re.compile(r'‘'), "'"), (re.compile(r'’'), "'"), (re.compile(r'&#\d{4};'), ''), (re.compile(r' '), ''), (re.compile(r'>'), '>'), (re.compile(r'<'), '<') ] outputs = [] for fn in expand_tree('source', 'txt'): # path = build/<branch>/json/<filename> if conf.project.name == 'mms': path = os.path.join(conf.build.paths.branch_staging, 'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0]) else: path = os.path.join(conf.build.paths.branch_output, 'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0]) fjson = dot_concat(path, 'fjson') json = dot_concat(path, 'json') yield dict(target=json, dependency=fjson, job=process_json_file, args=(fjson, json, regexes, conf)) outputs.append(json) list_file = os.path.join(conf.build.paths.branch_staging, 'json-file-list') yield dict(target=list_file, dependency=None, job=generate_list_file, args=(outputs, list_file, conf))
def build(locale_dir, language=(), out=sys.stdout): """ Build all po files into mo file. :param locale_dir: a locale directry. required. :param language: tuple of language. if empty, all languages are specified. :param out: file like object for displaying information. :return: None """ if not language: language = get_lang_dirs(locale_dir) for lang in language: lang_dir = os.path.join(locale_dir, lang) for po_file in expand_tree(path=lang_dir, extension='po'): po_file = os.path.join(dirpath, filename) mo_file = po_file[:-2] + 'mo' print_('Build:', mo_file, file=out) po = polib.pofile(po_file) po.save_as_mofile(fpath=mo_file)
def update_txconfig_resources(transifex_project_name, locale_dir, pot_dir=None, out=sys.stdout): """ Update resource sections of `./.tx/config`. :param transifex_project_name: transifex project name. :param locale_dir: a locale directry. required. :param pot_dir: a pot directry. if negative, use `pot` directory under `locale_dir`. :param out: file like object for displaying information. :return: None """ if pot_dir is None: pot_dir = os.path.join(locale_dir, 'pot') tx_root = get_tx_root() args = { 'tmpl': ( '--auto-local -r %(transifex_project_name)s.%(resource_name)s ' '%(locale_dir)s/<lang>/LC_MESSAGES/%(resource_path)s.po ' '--source-lang en ' '--source-file %(locale_dir)s/pot/%(resource_path)s.pot ' '--execute' ), 'transifex_project_name': transifex_project_name, 'locale_dir': locale_dir, 'pot_dir': pot_dir, 'tx_root': tx_root } with ProcessPool() as p: result = p.map_async(set_pot_for_tx, zip(expand_tree(path=pot_dir, extension='pot'), itertools.cycle([args]))) for r in result: r.get() p.apply_async(txclib.utils.exec_command, args=('set', ['-t', 'PO'], tx_root)) print_('uploaded %d .pot files to transifex.' % len(result))
def main(): source_files = utils.expand_tree(paths['branch-output'] + '/json/', 'fjson') outputs = [] paths['branch-json-list-file'] = '/'.join([paths['branch-output'], 'json-file-list']) paths['public-json-list-file'] = paths['branch-staging'] + '/json/.file_list' if len(source_files) > 0: msg = '[json]: processed all json files.' for source in source_files: base_fn = source.split('/', 2)[2].rsplit('.', 1)[0] output_file = '/'.join([paths['branch-output'], base_fn]) + '.json' outputs.append(output_file) generate_json_target(source, output_file) generate_list_file(outputs, paths['branch-json-list-file'] ) else: msg = '[json]: please build json output from sphinx using "make json" before processing the output.' generate_meta(outputs, msg)
def makefile_footer(): b = 'meta' m.target('pdfs', utils.expand_tree(os.path.join(paths['branch-output'], 'latex'), 'tex'), block=b) m.newline() m.target('%.pdf', '%.tex', block=b) m.job("{0} $(LATEXOPTS) '$<' >|[email protected]".format(pdf_latex_command), block=b) m.msg("[pdf]: \(1/4\) pdflatex $<", block=b) m.job( "makeindex -s $(output)/latex/python.ist '$(basename $<).idx' >>[email protected] 2>&1", ignore=True, block=b) m.msg("[pdf]: \(2/4\) Indexing: $(basename $<).idx", block=b) m.job("{0} $(LATEXOPTS) '$<' >|[email protected]".format(pdf_latex_command), block=b) m.msg("[pdf]: \(3/4\) pdflatex $<", block=b) m.job("{0} $(LATEXOPTS) '$<' >|[email protected]".format(pdf_latex_command), block=b) m.msg("[pdf]: \(4/4\) pdflatex $<", block=b) m.msg("[pdf]: see '[email protected]' for a full report of the pdf build process.", block=b)
def htaccess(fn='.htaccess'): conf = load_conf() if env.input_file is None: in_files = [i for i in expand_tree(conf.build.paths.builddata, 'yaml') if os.path.basename(i).startswith('htaccess')] else: in_files = list(env.input_file) sources = [] for i in in_files: sources.extend(ingest_yaml_list(i)) dirname = os.path.dirname(fn) if not dirname == '' and not os.path.exists(dirname): os.makedirs(dirname) lines = set() for redir in sources: lines.add(generate_redirects(process_redirect(redir, conf), conf=conf, match=False)) with open(fn, 'w') as f: f.writelines(lines) puts('[redirect]: regenerated {0} with {1} redirects ({2} lines)'.format(fn, len(sources), len(lines)))