def proc_inst(self, multi_inst, inst_dic): """to process particular inst""" flow_root_dir = inst_dic["stage_dic"]["flow_root_dir"] stage_name = inst_dic["stage_dic"]["stage"] sub_stage_name = inst_dic["stage_dic"]["sub_stage"] if self.restore_dic: sub_stage_name = f".restore_{sub_stage_name}" dst_file = os.path.join( flow_root_dir, "scripts", stage_name, multi_inst, sub_stage_name) if multi_inst else os.path.join( flow_root_dir, "scripts", stage_name, sub_stage_name) inst_dic["tmp_dic"]["local"]["_multi_inst"] = multi_inst pcom.mkdir(LOG, os.path.dirname(dst_file)) LOG.info(":: generating file %s ...", dst_file) if os.path.isfile(dst_file): shutil.copyfile(dst_file, f"{dst_file}.pre") pcom.ren_tempfile(LOG, inst_dic["tmp_file"], dst_file, inst_dic["tmp_dic"]) if os.path.isfile(f"{dst_file}.pre"): pre_cur_same_flg = cmp(f'{dst_file}.pre', dst_file) if not pre_cur_same_flg: if self.cfm_yes or self.cfm_flg: LOG.warning( "%s is modified, " "confirmed to re-run the stage", os.path.join(multi_inst, sub_stage_name)) else: LOG.info( "%s is modified, " "pleasse confirm to re-run the stage and the following stages", os.path.join(multi_inst, sub_stage_name)) try: pcom.cfm() self.cfm_flg = True except: shutil.copyfile(f"{dst_file}.pre", dst_file) raise
def update_blocks(self, blk_lst): """to obtain blocks input data from release directory""" env_boot.EnvBoot.__init__(self) self.boot_env() LOG.info(":: updating blocks ...") for data_src in list( pcom.find_iter( self.ced["PROJ_RELEASE_TO_BLK"], "*", True)) + list( pcom.find_iter(self.ced["PROJ_RELEASE_TO_BLK"], "*")): blk_name = os.path.basename(data_src).split(os.extsep)[0] if not blk_name: continue if blk_lst: if blk_name not in blk_lst: continue blk_tar = data_src.replace( self.ced["PROJ_RELEASE_TO_BLK"], f"{self.ced['PROJ_ROOT']}{os.sep}{blk_name}") LOG.info("linking block files %s from %s", blk_tar, data_src) pcom.mkdir(LOG, os.path.dirname(blk_tar)) if not os.path.exists(blk_tar): os.symlink(data_src, blk_tar) elif os.path.islink(blk_tar): os.remove(blk_tar) os.symlink(data_src, blk_tar)
def fill_lib(self): """a function wrapper for inherited LibProc function""" env_boot.EnvBoot.__init__(self) self.boot_env() LOG.info(":: library mapping ...") pcom.mkdir(LOG, self.ced["PROJ_LIB"]) self.gen_link_liblist(self.ced["PROJ_LIB"], self.dir_cfg_dic["lib"]["DEFAULT"])
def fill_blocks(self, blk_lst): """to fill blocks config dir after initialization""" env_boot.EnvBoot.__init__(self) self.boot_env() for blk_name in blk_lst: LOG.info(":: filling block %s ...", blk_name) os.environ["BLK_NAME"] = blk_name os.environ[ "BLK_ROOT"] = blk_root_dir = f"{self.ced['PROJ_ROOT']}{os.sep}{blk_name}" pcom.mkdir(LOG, blk_root_dir) proj_cfg_dir = os.path.expandvars(settings.PROJ_CFG_DIR).rstrip( os.sep) blk_cfg_dir = os.path.expandvars(settings.BLK_CFG_DIR).rstrip( os.sep) for cfg_kw in self.cfg_dic: if cfg_kw in settings.BLK_CFG_UNFILL_LST: continue proj_cfg = f"{proj_cfg_dir}{os.sep}{cfg_kw}.cfg" blk_cfg = f"{blk_cfg_dir}{os.sep}{cfg_kw}.cfg" LOG.info("generating block config %s", blk_cfg) pcom.mkdir(LOG, os.path.dirname(blk_cfg)) with open(proj_cfg) as pcf, open(blk_cfg, "w") as bcf: for line in pcom.gen_pcf_lst(pcf): bcf.write(line) for dir_cfg_kw in self.dir_cfg_dic: if dir_cfg_kw == "lib": continue proj_dir_cfg = f"{proj_cfg_dir}{os.sep}{dir_cfg_kw}" blk_dir_cfg = f"{blk_cfg_dir}{os.sep}{dir_cfg_kw}{os.sep}DEFAULT" LOG.info("generating block config directory %s", blk_dir_cfg) if os.path.isdir(blk_dir_cfg): LOG.info( "block level config directory %s already exists, " "please confirm to overwrite it", blk_dir_cfg) pcom.cfm() shutil.rmtree(blk_dir_cfg, True) shutil.copytree(proj_dir_cfg, blk_dir_cfg) for blk_cfg in pcom.find_iter(blk_dir_cfg, "*.cfg", cur_flg=True): with open(blk_cfg) as ocf: blk_lines = pcom.gen_pcf_lst(ocf) with open(blk_cfg, "w") as ncf: for line in blk_lines: ncf.write(line) proj_share_dir = os.path.expandvars(settings.PROJ_SHARE).rstrip( os.sep) proj_blk_cmn_dir = f"{proj_share_dir}{os.sep}block_common" blk_cmn_dir = f"{blk_root_dir}{os.sep}block_common" if not os.path.isdir(proj_blk_cmn_dir): continue if os.path.isdir(blk_cmn_dir): LOG.info( "block level common directory %s already exists, " "please confirm to overwrite it", blk_cmn_dir) pcom.cfm() shutil.rmtree(blk_cmn_dir, True) shutil.copytree(proj_blk_cmn_dir, blk_cmn_dir)
def fill_proj(self): """to fill project config and template dir after initialization""" LOG.info(":: filling project %s repo ...", self.repo_dic["init_proj_name"]) proj_gi_file = f"{self.repo_dic['repo_dir']}{os.sep}.gitignore" with open(proj_gi_file, "w") as g_f: g_f.write(settings.GITIGNORE) proj_flg_file = f"{self.repo_dic['repo_dir']}{os.sep}{settings.FLG_FILE}" LOG.info("generating op project flag file %s", settings.FLG_FILE) with open(proj_flg_file, "w") as f_f: f_f.write(self.repo_dic["init_proj_name"]) LOG.info("generating op project level configs and templates") env_boot.EnvBoot.__init__(self) suite_dict = dict( enumerate(os.listdir(os.path.expandvars(settings.OP_PROJ)))) pcom.pp_list(suite_dict) index_str = f"please input from {list(suite_dict)} to specify the suite for project" while True: index_rsp = input(f"{index_str}{os.linesep}--> ") if index_rsp.isdigit() and int(index_rsp) in suite_dict: break else: LOG.warning("please input a correct index") suite_name = suite_dict[int(index_rsp)] suite_dst_dir = os.path.expandvars(settings.PROJ_SHARE) if os.path.isdir(suite_dst_dir): LOG.info( "project share dir %s already exists, continue to initialize the project will " "overwrite the current project configs, templates and plugins", suite_dst_dir) pcom.cfm() shutil.rmtree(suite_dst_dir, True) shutil.copytree(f"{settings.OP_PROJ}{os.sep}{suite_name}", suite_dst_dir) self.boot_env() utils_dst_dir = self.ced.get("PROJ_UTILS", "") if not utils_dst_dir: LOG.error("project level proj.cfg env PROJ_UTILS is not defined") raise SystemExit() if os.path.isdir(utils_dst_dir): LOG.info( "project utils dir %s already exists, continue to initialize the project will " "overwrite the current project utils", utils_dst_dir) pcom.cfm() shutil.rmtree(utils_dst_dir, True) shutil.copytree(settings.OP_UTILS, utils_dst_dir) for prex_dir_k in (self.cfg_dic["proj"]["prex_admin_dir"] if "prex_admin_dir" in self.cfg_dic["proj"] else {}): prex_dir = pcom.rd_cfg(self.cfg_dic["proj"], "prex_admin_dir", prex_dir_k, True) LOG.info("generating pre-set admin directory %s", prex_dir) pcom.mkdir(LOG, prex_dir) LOG.info("please perform the git commit and git push actions " "after project and block items are settled down")
def gen_json_file(self): """convert information into json file""" infor_dic = {} infor_dic['user'] = [] for mail_domain in settings.MAIL_DOMAIN_LST: for user_name in self.user_lst: infor_dic['user'].append(f"{user_name}{mail_domain}") pcom.mkdir(LOG, settings.AUTO_MAIL_DIR) file_name = os.path.join(settings.AUTO_MAIL_DIR, f"{self.fn_str}.json") infor_dic['title'] = self.title infor_dic['content'] = self.content with open(file_name, 'w') as write_json: json.dump(infor_dic, write_json)
def check_env(self): """to check backup env""" user = os.environ.get("USER") host = platform.node() if user != backup_cfg.BACKUP_USER: LOG.critical("Sorry, you are not the user who could run project backup") raise SystemExit() if host != backup_cfg.BACKUP_HOST: LOG.critical("Sorry, you are not in the host which could run project backup") raise SystemExit() if not os.path.isdir(self.src_root): LOG.error("backup source directory %s is NA", self.src_root) raise SystemExit() pcom.mkdir(LOG, self.dst_root)
def proc_prex(self, stage_dic): """to process prex defined directory in proj.cfg""" prex_dir_sec = ( self.cfg_dic["proj"]["prex_dir"] if "prex_dir" in self.cfg_dic["proj"] else {}) prex_dir_dic = {} for prex_dir_k in prex_dir_sec: prex_dir = pcom.ren_tempstr( LOG, pcom.rd_sec(prex_dir_sec, prex_dir_k, True), stage_dic) pcom.mkdir(LOG, prex_dir) prex_dir_dic[prex_dir_k] = prex_dir for prex_dir_k, prex_dir_v in prex_dir_dic.items(): if prex_dir_k in stage_dic: continue stage_dic[prex_dir_k] = prex_dir_v
def link_src_dst(self, src_file, dst_dir, src_base): """to perform source to destination link action""" dst_file = src_file.replace(src_base.rstrip(os.sep), dst_dir.rstrip(os.sep)) if os.path.islink(dst_file): os.unlink(dst_file) elif os.path.isfile(dst_file): LOG.warning("dst file %s is not a link", dst_file) self.match_lst.append(dst_file) return else: pcom.mkdir(LOG, os.path.dirname(dst_file)) os.symlink(src_file, dst_file) LOG.info("linking src file %s as dst file %s", src_file, dst_file) self.match_lst.append(dst_file)
def merge_files(self, dcmp): """ launch diff for different files """ proj_cfg_str = f"{os.sep}share{os.sep}" for right_only in dcmp.right_only: if right_only.startswith("."): continue LOG.info("only in destination dir (%s) : %s", dcmp.right, right_only) for left_only in dcmp.left_only: if left_only.startswith("."): continue LOG.info("only in source dir (%s) : %s", dcmp.left, left_only) choice = input("Sync to destination dir? [y/N]: ").lower() src = f"{dcmp.left}{os.sep}{left_only}" dst = f"{dcmp.right}{os.sep}{left_only}" if choice and choice[0] == 'y': if os.path.isdir(src): shutil.copytree(src, dst) elif os.path.isfile(src): pcom.mkdir(LOG, os.path.dirname(dst)) if proj_cfg_str in dcmp.left and proj_cfg_str not in dcmp.right: with open(src) as src_f, open(dst, "w") as dst_f: for line in pcom.gen_pcf_lst(src_f): dst_f.write(line) else: shutil.copyfile(src, dst) for name in dcmp.diff_files: if name.startswith("."): continue LOG.info("different file %s found in %s", name, dcmp.left) src = f"{dcmp.left}{os.sep}{name}" dst = f"{dcmp.right}{os.sep}{name}" if proj_cfg_str in dcmp.left and proj_cfg_str not in dcmp.right: ntf = tempfile.NamedTemporaryFile("w") with open(src) as src_f: for line in pcom.gen_pcf_lst(src_f): ntf.write(line) ntf.seek(0) if not cmp(ntf.name, dst): diff_str = f"{self.tool} {ntf.name} {dst}" subprocess.run(diff_str, shell=True) ntf.close() else: diff_str = f"{self.tool} {src} {dst}" subprocess.run(diff_str, shell=True) for sub_dcmp in dcmp.subdirs.values(): self.merge_files(sub_dcmp)
def proc_release(self, flow_release_lst): """to process release info""" relz_blk_json_dir = f"{self.ced['PROJ_RELEASE_TO_TOP']}{os.sep}.json" pcom.mkdir(LOG, relz_blk_json_dir) for flow_release in flow_release_lst: flow_release = flow_release.strip("""'":""") err_log_str = ( "begin sub-stage format is incorrect, " "it should be <flow>::<stage>:<sub_stage>") flow_relz_dic = {} try: flow_relz_dic["flow"], stage_str = flow_release.split("::") flow_relz_dic["stage"], flow_relz_dic["sub_stage"] = stage_str.split(":") except ValueError: LOG.error(err_log_str) raise SystemExit() if not all([bool(c_c) for c_c in flow_relz_dic.values()]): LOG.error(err_log_str) raise SystemExit() _, ver_dic, err_lst = self.exp_stages_misc( [], {}, [], self.cfg_dic.get("flow", {}), flow_relz_dic["flow"]) if err_lst: LOG.error("flow %s has the following errors in flow.cfg", flow_relz_dic["flow"]) pcom.pp_list(err_lst) raise SystemExit() flow_relz_dic["user"] = self.ced["USER"] flow_relz_dic["block"] = self.ced["BLK_NAME"] flow_relz_dic["time"] = self.ced["DATETIME"].isoformat() flow_relz_dic["rtl_netlist"] = ver_dic.get("rtl_netlist", "") flow_relz_dic["proj_root"] = self.ced["PROJ_ROOT"] file_name = pcom.re_str("_".join( [flow_relz_dic["user"], flow_relz_dic["block"], flow_release])) relz_json_file = f"{relz_blk_json_dir}{os.sep}{file_name}.json" if os.path.isfile(relz_json_file): if self.cfm_yes: LOG.info( "flow %s already released, confimed to overwrite", flow_release) else: LOG.info( "flow %s already released, please confirm to overwrite", flow_release) pcom.cfm() with open(relz_json_file, "w") as rjf: json.dump(flow_relz_dic, rjf) LOG.info("flow %s released", flow_release)
def release(self): """to generate release directory""" env_boot.EnvBoot.__init__(self) self.boot_env() LOG.info(":: release content generating ...") for relz_json_file in pcom.find_iter( f"{self.ced['PROJ_RELEASE_TO_TOP']}{os.sep}.json", "*.json"): LOG.info("generating release of %s", relz_json_file) with open(relz_json_file) as rjf: relz_dic = json.load(rjf) relz_path = os.sep.join([ relz_dic["proj_root"], relz_dic["block"], "run", relz_dic["rtl_netlist"], relz_dic["flow"] ]) relz_file_lst = list(pcom.find_iter(relz_path, "*")) for relz_k in self.cfg_dic["proj"]["release"]: for relz_v in pcom.rd_cfg(self.cfg_dic["proj"], "release", relz_k): relz_pattern = ( f"{relz_path}{os.sep}*{relz_dic['stage']}*" f"{os.path.splitext(relz_dic['sub_stage'])[0]}*{relz_v}" ) match_relz_lst = fnmatch.filter(relz_file_lst, relz_pattern) if not match_relz_lst: LOG.warning("no %s %s files found", relz_k, relz_v) else: LOG.info("copying %s %s files", relz_k, relz_v) for relz_file in match_relz_lst: dst_dir = os.sep.join([ self.ced["PROJ_RELEASE_TO_TOP"], relz_dic["block"], self.ced["DATE"], relz_k ]) pcom.mkdir(LOG, dst_dir) if relz_v.endswith("/*"): dst_file = re.sub( rf".*?(?={relz_v[:-2]})", f"{dst_dir}{os.sep}{relz_dic['block']}", relz_file) else: dst_file = f"{dst_dir}{os.sep}{relz_dic['block']}{relz_v}" pcom.mkdir(LOG, os.path.dirname(dst_file)) shutil.copyfile(relz_file, dst_file) os.remove(relz_json_file)
def backup_date(self): """to backup date auto generation part of project""" LOG.info(":: backup date part") date_root = os.path.join(self.dst_root, dt.datetime.now().strftime("%Y_%m_%d")) pcom.mkdir(LOG, date_root) src_work_dir = f"{self.src_root}{os.sep}WORK" for user_path in pcom.find_iter(src_work_dir, "*", True, True): for block_path in pcom.find_iter(user_path, "*", True, True, ["share"]): can_lst = list(pcom.find_iter(block_path, "*")) for date_pat in backup_cfg.BACKUP_CFG_DATE_LST: glob_lst = fnmatch.filter(can_lst, os.path.join(block_path, date_pat)) glob_tup_lst = [(os.path.getmtime(c_c), c_c) for c_c in glob_lst] if glob_tup_lst: glob_max_tup = max(glob_tup_lst) src_file = glob_max_tup[-1] dst_file = src_file.replace(self.src_root, date_root) LOG.info("backup from %s to %s", src_file, dst_file) pcom.mkdir(LOG, os.path.dirname(dst_file)) shutil.copyfile(src_file, dst_file)
def proc_inst_ex(self, multi_inst, inst_dic): """to execute particular inst """ flow_root_dir = inst_dic["stage_dic"]["flow_root_dir"] flow_name = inst_dic["stage_dic"]["flow"] l_flow = inst_dic["stage_dic"]["l_flow"] stage_name = inst_dic["stage_dic"]["stage"] sub_stage_name = inst_dic["stage_dic"]["sub_stage"] local_dic = inst_dic["tmp_dic"]["local"] ver_dic = inst_dic["tmp_dic"]["ver"] pre_file_mt = inst_dic["pre_file_mt"] force_flg = inst_dic["force_flg"] file_mt = 0.0 if self.restore_dic: sub_stage_name = f".restore_{sub_stage_name}" dst_file = os.path.join( flow_root_dir, "scripts", stage_name, multi_inst, sub_stage_name) if multi_inst else os.path.join( flow_root_dir, "scripts", stage_name, sub_stage_name) dst_op_file = os.path.join( flow_root_dir, "sum", stage_name, multi_inst, f"{sub_stage_name}.op") if multi_inst else os.path.join( flow_root_dir, "sum", stage_name, f"{sub_stage_name}.op") pcom.mkdir(LOG, os.path.dirname(dst_op_file)) dst_run_file = f"{dst_op_file}.run" if "_exec_cmd" in local_dic: self._gen_job_tcl(dst_op_file, local_dic) tool_str = local_dic.get("_exec_tool", "") jcn_str = ( local_dic.get("_job_restore_cpu_number", "") if self.restore_dic else local_dic.get("_job_cpu_number", "")) jr_str = ( local_dic.get("_job_restore_resource", "") if self.restore_dic else local_dic.get("_job_resource", "")) jc_str = ( f"{local_dic.get('_job_cmd', '')} {local_dic.get('_job_queue', '')} " f"{jcn_str} {jr_str}" if "_job_cmd" in local_dic else "") jn_str = ( f"{self.ced['USER']}:::{self.ced['PROJ_NAME']}:::{self.ced['BLK_NAME']}:::" f"{flow_name}::{stage_name}:{sub_stage_name}:{multi_inst}") job_str = f"{jc_str} -J '{jn_str}'" if jc_str else "" cmd_str = local_dic.get("_exec_cmd", "") with open(dst_op_file, "w") as drf: drf.write( f"{tool_str}{os.linesep}{cmd_str} {dst_file}{os.linesep}") trash_dir = f"{os.path.dirname(dst_op_file)}{os.sep}.trash" pcom.mkdir(LOG, trash_dir) with open(dst_run_file, "w") as dbf: dbf.write( f"{job_str} xterm -title '{dst_file}' -e 'cd {trash_dir}; " f"source {dst_op_file} | tee {dst_run_file}.log; " f"touch {dst_run_file}.stat'{os.linesep}") err_kw_lst = pcom.rd_cfg( self.cfg_dic.get("filter", {}), stage_name, "error_keywords_exp") wav_kw_lst = pcom.rd_cfg( self.cfg_dic.get("filter", {}), stage_name, "waiver_keywords_exp") fin_str = self.ced.get("FIN_STR", "") filter_dic = { "err_kw_lst": err_kw_lst, "wav_kw_lst": wav_kw_lst, "fin_str": fin_str} if self.run_flg: file_mt = os.path.getmtime(dst_file) f_flg = False if file_mt > pre_file_mt else True if force_flg: f_flg = True if f_flg: # updated timestamp to fit auto-skip feature os.utime(dst_file) # stages followed up have to be forced run file_mt = os.path.getmtime(dst_file) file_p = file_proc.FileProc( {"src": dst_file, "file": dst_run_file, "l_flow": l_flow, "flow": flow_name, "stage": stage_name, "sub_stage": sub_stage_name, "multi_inst": multi_inst, "filter_dic": filter_dic, "flow_root_dir": flow_root_dir, "ced": self.ced, "ver_dic": ver_dic, "jn_str": jn_str}, f_flg) file_p.proc_set_log_par_env(inst_dic["tmp_dic"]) p_run = file_p.proc_run_file() if p_run is True: return p_run return file_mt
def gen_link_liblist(self, dst_root, lib_dir_cfg_dic): """to process project or block lib mapping links""" LOG.info(":: library mapping of files linking ...") liblist_var_dic = collections.defaultdict(dict) for process, process_sec in lib_dir_cfg_dic.get("process", {}).items(): if process == "DEFAULT": continue for lib_type, lib_type_sec in lib_dir_cfg_dic.get("search", {}).items(): if lib_type == "DEFAULT": continue can_root = pcom.ren_tempstr( LOG, pcom.rd_sec(lib_type_sec, "src", True), process_sec) if not can_root: LOG.warning( "library mapping search root path option 'src' of %s " "is not defined in lib/search.cfg, skipped", lib_type) continue elif not os.path.isdir(can_root): LOG.warning("library mapping search root path %s is NA", can_root) continue can_tar = pcom.rd_sec(lib_type_sec, "dst", True) if not can_tar: LOG.warning( "library mapping destination directory option 'dst' of %s " "is not defined in lib/search.cfg, skipped", lib_type) continue LOG.info("library mapping for part %s", lib_type) pcom.mkdir(LOG, can_tar) can_ignore_lst = pcom.rd_sec(lib_type_sec, "ignore") can_lst = list( pcom.find_iter(can_root, "*", i_lst=can_ignore_lst)) for lib_type_k in lib_type_sec: if not lib_type_k.startswith("pattern"): continue pattern_search = pcom.rd_sec(lib_type_sec, lib_type_k, True) var_set = set(re.findall(r"{{(.*?)}}", pattern_search)) for var_dic in pcom.prod_vs_iter(var_set, process_sec): pattern_search_str = pcom.ren_tempstr( LOG, pattern_search, var_dic) for match_file in fnmatch.filter( can_lst, f"{can_root}{os.sep}{pattern_search_str}"): self.link_src_dst(match_file, can_tar, can_root) match_lst_file = f"{dst_root}{os.sep}{process}.match_lst" LOG.info("generating library map list file %s", match_lst_file) with open(match_lst_file, "w") as mlf: json.dump(self.match_lst, mlf, indent=4) liblist_dir = f"{dst_root}{os.sep}liblist" pcom.mkdir(LOG, liblist_dir) var_name_line_dic = {} liblist_cfg = lib_dir_cfg_dic.get("liblist", {}) try: custom_dic = { c_k: pcom.rd_cfg(liblist_cfg, f"custom:{process}", c_k) for c_k in liblist_cfg[f"custom:{process}"] } except KeyError: custom_dic = {} if "var" not in liblist_cfg: LOG.error("var section is NA in lib/liblist.cfg") raise SystemExit() for var_name in liblist_cfg["var"]: match_file_lst = [] match_pattern_lst = (custom_dic[var_name] if var_name in custom_dic else pcom.rd_cfg( liblist_cfg, "var", var_name)) for match_pattern in match_pattern_lst: var_set = set(re.findall(r"{{(.*?)}}", match_pattern)) for var_dic in pcom.prod_vs_iter(var_set, process_sec): fnmatch_lst = ([ pcom.ren_tempstr(LOG, match_pattern, var_dic) ] if var_name in custom_dic else fnmatch.filter( self.match_lst, pcom.ren_tempstr(LOG, match_pattern, var_dic))) match_file_lst.extend(fnmatch_lst) var_name_line_dic[var_name] = match_file_lst LOG.info("generating library liblist files in %s", liblist_dir) #file generation and liblist dic generation for templates tcl_line_lst = [] for var_name, match_file_lst in var_name_line_dic.items(): liblist_var_dic[process][var_name] = f" \\{os.linesep} ".join( match_file_lst) tcl_value_str = f" \\{os.linesep}{' '*(6+len(var_name))}".join( match_file_lst) tcl_line_lst.append(f'set {var_name} "{tcl_value_str}"') with open(f"{liblist_dir}{os.sep}{process}.tcl", "w") as lltf: lltf.write(os.linesep.join(tcl_line_lst)) self.match_lst = [] with open(f"{liblist_dir}{os.sep}liblist.json", "w") as lljf: json.dump(liblist_var_dic, lljf, indent=4)