def build_tex_trans(lang): global ALL_TRANS tex_trans = [] for peufpath in (LANG_PEUF_DIR / lang).walk("file::*.peuf"): with ReadBlock(content=peufpath, mode='keyval:: =') as data: for kind, trans in data.mydict("std mini").items(): trans = normalize(trans) tex_trans += texify(kind, trans) ALL_TRANS[lang].update(trans) return tex_trans
\\IDarg{{2}} borne supérieure $b$ de l'intervalle $\\{macroname}{{a}}{{b}}$. """.strip() + "\n" # ------------ # # -- CONFIG -- # # ------------ # with open(file=TEX_FILE, mode='r', encoding='utf-8') as docfile: template_tex = docfile.read() with open(file=STY_FILE, mode='r', encoding='utf-8') as styfile: template_sty = styfile.read() with ReadBlock(content=PEUF_FILE, mode="k::=") as data: config = parse(data.mydict("std mini")) for interkind, defs in config.items(): fortexdoc = [] macrosdefs = [] for prefix, suffix, delimstart, delimend in defs: macrosdefs += ["\n", texmacro(prefix, suffix, delimstart, delimend)] fortexdoc += ["\n\n\\separation\n\n", "\n", texdoc(prefix, suffix)] fortexdoc = "".join(fortexdoc[1:]) macrosdefs = "".join(macrosdefs) interkind = interkind.replace('-', ' ')
for onesuf in latexsuffixes: latexdef.append( f"\\newcommand\\{latexname}{onesuf}{{\\ensuremath{{\\setspecial{{\\{latexname}}}{{{onesuf}}}}}}}" ) latexdef = "\n".join(latexdef) return latexdef # -------------------------- # # -- THE CONSTANTS TO ADD -- # # -------------------------- # with ReadBlock(content=PEUF_FILE, mode="verbatim") as data: config = { k: " ".join(v).split() for k, v in data.mydict("std mini").items() } classicalsets = config["classical-sets"] # ------------------------- # # -- TEMPLATES TO UPDATE -- # # ------------------------- # with open(file=STY_FILE, mode='r', encoding='utf-8') as styfile: template_sty = styfile.read() with open(file=TEX_FILE, mode='r', encoding='utf-8') as docfile:
def projects_to_do(cookiecutter_temp, peuf_dir, all_peuf_choosen): projects_to_do = [] already_build = [] for onepeuf in all_peuf_choosen: with ReadBlock(content=onepeuf, mode={"keyval:: =": "project"}) as datas: flatdict = datas.mydict("std nosep nonb")['project'] # The key 'name' becomes 'project_name': flatdict['project_name'] = flatdict['name'] del flatdict['name'] # We have to listify values of keys starting by _. for k, v in flatdict.items(): # Launches uses a very specific ways to store values. if k == "_launched_by_factory": command_n_patterns = [ p.strip() for p in v.split(":launch:") ] command_n_patterns[1] = [ p.strip() for p in command_n_patterns[1].split("|") ] flatdict[k] = command_n_patterns # Keys using a coma sperated syntax in the peuf file. elif k in ["_authors", "_for_test", "_for_factory"]: v = [[p.strip() for p in x.split(",")] for x in v.split(";")] if k == "_authors": v = [[', '.join(x[:-1]), x[-1]] for x in v] flatdict[k] = v # Ready to use list value. elif k[0] == "_": flatdict[k] = [x.strip() for x in v.split(";")] # Does we have nothing ? newprojectpath = cookiecutter_temp \ / ( (onepeuf.parent / onepeuf.stem) - peuf_dir ) if newprojectpath.is_dir(): already_build.append(newprojectpath) # We have something to do. else: projects_to_do.append({ 'lang': onepeuf.parent.parent.name, 'kind': onepeuf.parent.name, 'json': flatdict, 'relpath': onepeuf.parent - peuf_dir, }) # Some errors have been found. if already_build: error([ "Local project already build (erase it if you want to rebuild it): " ] + [f" + {p}" for p in already_build] + ['', "Nothing has been done !"]) exit(1) # Everything is ok. return projects_to_do
KEYWORDS_FINAL_DIR.create("dir") for peufpath in (KEYWORDS_DIR).walk("file::*.sty"): peufpath.copy_to(dest=KEYWORDS_FINAL_DIR / peufpath.name, safemode=False) # ------------------------- # # -- TEMPLATES TO UPDATE -- # # ------------------------- # with open(file=TEX_FILE, mode='r', encoding='utf-8') as docfile: template_tex = docfile.read() with ReadBlock(content=KEYWORDS_DIR / "config" / "for-doc[fr].peuf", mode={ 'verbatim': ":default:", 'keyval:: =': "titles", }) as data: docinfos = data.mydict("std mini") peuftitles = docinfos["titles"] del docinfos["titles"] with open(file=KEYWORDS_DIR / "english.sty", mode='r', encoding='utf-8') as docfile: lang_sty = docfile.read() # --------------------------------------- # # -- PREPARING THE UPDATING OF THE DOC -- # # --------------------------------------- #
# ------------- # # -- PARSING -- # # ------------- # print(' * Looking for parsing infos...') EXTRAS_POINTERS = [] HMS = {} JUMP = {} PARSE_EXTRAS_DIR = THIS_DIR / "config" / "parse_extras" for langfile in PARSE_EXTRAS_DIR.glob("*.peuf"): lang = langfile.stem with ReadBlock(content=langfile, mode="verbatim") as datas: datas = datas.mydict("std nosep nonb") hms = [] for kind in ["hour", "minute", "second"]: pieces = [] for onepiece in " ".join(datas[kind]).split(" "): onepiece = onepiece.strip() if onepiece: pieces.append(onepiece) if len(pieces) != 3: raise ValueError(
hardspecs = defaultdict(list) for peuffile in SETTINGS_DIR.walk("file::**.peuf"): hardspecs[peuffile.parent.name].append(peuffile) hardspecs = { k: sorted(v) for k, v in hardspecs.items() } specs = {} for dirname in hardspecs: for peuffile in hardspecs[dirname]: with ReadBlock( content = peuffile, mode = MODE ) as datas: specs[(dirname, peuffile.stem)] = datas.mydict( "tree std nosep nonb" ) specs = parsing.normalize_specs(specs) # ------------------------ # # -- UPDATING TEMPLATES -- # # ------------------------ # SUBSTEPS("Building templates.")
# ----------- # # -- TOOLS -- # # ----------- # def doubleslash(text): return text.replace("\\", "\\\\") # ---------------- # # -- EXTENSIONS -- # # ---------------- # data = ReadBlock( content=CONFIG_DIR / "extension.peuf", mode={"container": ":default:", "verbatim": ["extension", "comment"]} ) data.build() dicoview = data.recudict(nosep=True) data.remove() onetab = " " * 4 twotab = onetab * 2 TEXT_CLASS_EXT = [ """ #!/usr/bin/env python3 from collections import OrderedDict
# ----------- # # -- TOOLS -- # # ----------- # def doubleslash(text): return text.replace("\\", "\\\\") # ---------------- # # -- EXTENSIONS -- # # ---------------- # with ReadBlock(content=CONFIG_DIR / "extension.peuf", mode={ 'container': ":default:", 'verbatim': ["extension", "comment"] }) as data: dicoview = data.mydict("tree mini") onetab = ' ' * 4 twotab = onetab * 2 TEXT_CLASS_EXT = [ """ #!/usr/bin/env python3 from collections import OrderedDict # Note: the following variables were automatically built.
TEX_FILE = STY_FILE.parent / (STY_FILE.stem + "[fr].tex") STY_CONTENT = [] TEX_CONTENT = [] PATTERN_FOR_PEUF = re.compile("\d+-(.*)") match = re.search(PATTERN_FOR_PEUF, STY_FILE.stem) PEUF_FILE = STY_FILE.parent / (match.group(1).strip() + ".peuf") DECO = " " * 4 # -------------------------- # # -- THE OPERATORS TO ADD -- # # -------------------------- # with ReadBlock(content=PEUF_FILE, mode='keyval:: =') as data: INFOS = data.mydict("std mini") # ------------------- # # -- SHORT SYMBOLS -- # # ------------------- # if 'short' in INFOS: STY_CONTENT.append(""" % Source for the short sysmbols. % * https://tex.stackexchange.com/a/585267/6880 """.strip()) template_sty = """ \\newcommand\short{macroname}{{\mathrel{{\mathpalette\short{macroname}@{{.55}}}}}} \\newcommand{{\short{macroname}@}}[2]{{%