예제 #1
0
 def get_multi_inst_lst(self, flow_dic):
     """getting multi_inst_lst by reading config file"""
     flow_name = flow_dic.get('flow', '*')
     stage_name = flow_dic.get('stage', '*')
     sub_stage_name = flow_dic.get('sub_stage', '*')
     # read config for multi-instance names and retrun them
     return (pcom.rd_cfg(
         self.dir_cfg_dic.get("flow", {}).get(flow_name,
                                              {}).get(stage_name, {}),
         sub_stage_name, "_multi_inst"))
예제 #2
0
 def fill_proj(self):
     """to fill project config and template dir after initialization"""
     LOG.info(":: filling project %s repo ...",
              self.repo_dic["init_proj_name"])
     proj_gi_file = f"{self.repo_dic['repo_dir']}{os.sep}.gitignore"
     with open(proj_gi_file, "w") as g_f:
         g_f.write(settings.GITIGNORE)
     proj_flg_file = f"{self.repo_dic['repo_dir']}{os.sep}{settings.FLG_FILE}"
     LOG.info("generating op project flag file %s", settings.FLG_FILE)
     with open(proj_flg_file, "w") as f_f:
         f_f.write(self.repo_dic["init_proj_name"])
     LOG.info("generating op project level configs and templates")
     env_boot.EnvBoot.__init__(self)
     suite_dict = dict(
         enumerate(os.listdir(os.path.expandvars(settings.OP_PROJ))))
     pcom.pp_list(suite_dict)
     index_str = f"please input from {list(suite_dict)} to specify the suite for project"
     while True:
         index_rsp = input(f"{index_str}{os.linesep}--> ")
         if index_rsp.isdigit() and int(index_rsp) in suite_dict:
             break
         else:
             LOG.warning("please input a correct index")
     suite_name = suite_dict[int(index_rsp)]
     suite_dst_dir = os.path.expandvars(settings.PROJ_SHARE)
     if os.path.isdir(suite_dst_dir):
         LOG.info(
             "project share dir %s already exists, continue to initialize the project will "
             "overwrite the current project configs, templates and plugins",
             suite_dst_dir)
         pcom.cfm()
         shutil.rmtree(suite_dst_dir, True)
     shutil.copytree(f"{settings.OP_PROJ}{os.sep}{suite_name}",
                     suite_dst_dir)
     self.boot_env()
     utils_dst_dir = self.ced.get("PROJ_UTILS", "")
     if not utils_dst_dir:
         LOG.error("project level proj.cfg env PROJ_UTILS is not defined")
         raise SystemExit()
     if os.path.isdir(utils_dst_dir):
         LOG.info(
             "project utils dir %s already exists, continue to initialize the project will "
             "overwrite the current project utils", utils_dst_dir)
         pcom.cfm()
         shutil.rmtree(utils_dst_dir, True)
     shutil.copytree(settings.OP_UTILS, utils_dst_dir)
     for prex_dir_k in (self.cfg_dic["proj"]["prex_admin_dir"] if
                        "prex_admin_dir" in self.cfg_dic["proj"] else {}):
         prex_dir = pcom.rd_cfg(self.cfg_dic["proj"], "prex_admin_dir",
                                prex_dir_k, True)
         LOG.info("generating pre-set admin directory %s", prex_dir)
         pcom.mkdir(LOG, prex_dir)
     LOG.info("please perform the git commit and git push actions "
              "after project and block items are settled down")
예제 #3
0
 def exp_stages_misc(self, s_lst, s_ver_dic, s_err_lst, cfg, sec, p_s=None):
     """to expand stages and misc details of flow config"""
     if sec == "" or p_s == "":
         v_rtl = s_ver_dic.get("rtl", "")
         v_netlist = s_ver_dic.get("netlist", "")
         if not v_rtl and not v_netlist:
             s_err_lst.append("!!!! rtl and netlist version are both NA")
         v_rn = f"r{v_rtl}_n{v_netlist}"
         s_ver_dic['rtl_netlist'] = v_rn
         return s_lst, s_ver_dic, s_err_lst
     if not os.path.isdir(f"{self.ced['BLK_CFG_FLOW']}{os.sep}{sec}"):
         s_err_lst.insert(0, f"!!!! flow {sec} not initialized")
     pre_flow = pcom.rd_cfg(cfg, sec, "pre_flow", True)
     pre_stage = pcom.rd_cfg(cfg, sec, "pre_stage", True)
     stage_lst = pcom.rd_cfg(cfg, sec, "stages")
     for opt_k, opt_v in cfg[sec].items():
         if opt_k.startswith("VERSION_"):
             ver_key = opt_k.replace("VERSION_", "").lower()
             if not opt_v:
                 s_ver_dic[ver_key] = ""
             else:
                 key_dir = f"{self.ced['BLK_ROOT']}{os.sep}{ver_key}{os.sep}{opt_v}"
                 if not os.path.isdir(key_dir):
                     s_err_lst.append(f"!!!! {ver_key} version dir {key_dir} is NA")
                 if not os.listdir(key_dir):
                     s_err_lst.append(f"!!!! {ver_key} version dir {key_dir} is empty")
                 if not s_ver_dic.get(ver_key):
                     s_ver_dic[ver_key] = opt_v
         if opt_k == "LIB" and not s_ver_dic.get("LIB"):
             s_ver_dic["LIB"] = opt_v
     if p_s:
         if p_s not in stage_lst:
             s_err_lst.append(f"!!!! stage {p_s} not in flow {sec}")
         else:
             stage_lst = stage_lst[:stage_lst.index(p_s)+1]
     stage_lst = [
         {"flow": sec, "stage": c_c.split(":")[0].strip(),
          "sub_stage": c_c.split(":")[-1].strip()}
         for c_c in stage_lst if c_c]
     stage_lst.extend(s_lst)
     return self.exp_stages_misc(stage_lst, s_ver_dic, s_err_lst, cfg, pre_flow, pre_stage)
예제 #4
0
 def list_flow(self):
     """to list all current block available flows"""
     if not self.blk_flg:
         LOG.error("it's not in a block directory, please cd into one")
         raise SystemExit()
     LOG.info(":: all current available flows of block")
     lf_dic = {}
     for sec_k in self.cfg_dic.get("flow", {}):
         stage_dic_lst, ver_dic, err_lst = self.exp_stages_misc(
             [], {}, [], self.cfg_dic.get("flow", {}), sec_k)
         v_rn = ver_dic.get('rtl_netlist')
         lf_lst = ["---------- flow stages ----------"]
         lf_time_lst = [0]
         for flow_dic in stage_dic_lst:
             flow_name = flow_dic.get("flow", "")
             stage_name = flow_dic.get("stage", "")
             sub_stage_name = flow_dic.get("sub_stage", "")
             lf_lst.append(f"     {flow_name}::{stage_name}:{sub_stage_name}")
             run_dir = self.ced['BLK_RUN']
             multi_inst_lst = [c_c.strip() for c_c in pcom.rd_cfg(
                 self.dir_cfg_dic.get("flow", {}).get(flow_name, {}).get(stage_name, {}),
                 sub_stage_name, '_multi_inst') if c_c.strip()]
             if multi_inst_lst:
                 tmp_list = []
                 for multi_inst in multi_inst_lst:
                     dst_file = os.path.join(
                         run_dir, v_rn, flow_name, "scripts",
                         stage_name, multi_inst, sub_stage_name)
                     tmp_list.append(
                         os.path.getmtime(dst_file) if os.path.exists(dst_file) else 0)
                 lf_time_lst.append(max(tmp_list))
             else:
                 dst_file = os.path.join(
                     run_dir, v_rn, flow_name, "scripts", stage_name, sub_stage_name)
                 lf_time_lst.append(
                     os.path.getmtime(dst_file) if os.path.exists(dst_file) else 0)
         max_time_index = lf_time_lst.index(max(lf_time_lst))
         if max_time_index:
             lf_lst[max_time_index] = '(->) ' + lf_lst[max_time_index].strip()
         lf_lst.append("---------- flow misc ----------")
         lf_ver_dic = {
             k_k: v_v for k_k, v_v in ver_dic.items()
             if k_k != "LIB" and k_k != "rtl_netlist" and v_v}
         lf_lst.append({"LIB": ver_dic.get("LIB", ""), "VERSIONS": lf_ver_dic})
         if err_lst:
             err_lst.insert(0, "---------- DEFECT ----------")
             err_lst.extend(lf_lst)
             lf_dic[f"{sec_k} (X)"] = err_lst
         else:
             lf_dic[sec_k] = lf_lst
     pcom.pp_list(lf_dic)
예제 #5
0
 def release(self):
     """to generate release directory"""
     env_boot.EnvBoot.__init__(self)
     self.boot_env()
     LOG.info(":: release content generating ...")
     for relz_json_file in pcom.find_iter(
             f"{self.ced['PROJ_RELEASE_TO_TOP']}{os.sep}.json", "*.json"):
         LOG.info("generating release of %s", relz_json_file)
         with open(relz_json_file) as rjf:
             relz_dic = json.load(rjf)
         relz_path = os.sep.join([
             relz_dic["proj_root"], relz_dic["block"], "run",
             relz_dic["rtl_netlist"], relz_dic["flow"]
         ])
         relz_file_lst = list(pcom.find_iter(relz_path, "*"))
         for relz_k in self.cfg_dic["proj"]["release"]:
             for relz_v in pcom.rd_cfg(self.cfg_dic["proj"], "release",
                                       relz_k):
                 relz_pattern = (
                     f"{relz_path}{os.sep}*{relz_dic['stage']}*"
                     f"{os.path.splitext(relz_dic['sub_stage'])[0]}*{relz_v}"
                 )
                 match_relz_lst = fnmatch.filter(relz_file_lst,
                                                 relz_pattern)
                 if not match_relz_lst:
                     LOG.warning("no %s %s files found", relz_k, relz_v)
                 else:
                     LOG.info("copying %s %s files", relz_k, relz_v)
                 for relz_file in match_relz_lst:
                     dst_dir = os.sep.join([
                         self.ced["PROJ_RELEASE_TO_TOP"], relz_dic["block"],
                         self.ced["DATE"], relz_k
                     ])
                     pcom.mkdir(LOG, dst_dir)
                     if relz_v.endswith("/*"):
                         dst_file = re.sub(
                             rf".*?(?={relz_v[:-2]})",
                             f"{dst_dir}{os.sep}{relz_dic['block']}",
                             relz_file)
                     else:
                         dst_file = f"{dst_dir}{os.sep}{relz_dic['block']}{relz_v}"
                     pcom.mkdir(LOG, os.path.dirname(dst_file))
                     shutil.copyfile(relz_file, dst_file)
         os.remove(relz_json_file)
예제 #6
0
 def init(self, init_lst):
     """to perform flow initialization"""
     if not self.blk_flg:
         LOG.error("it's not in a block directory, please cd into one")
         raise SystemExit()
     for init_name in init_lst:
         LOG.info(":: initializing flow %s directories ...", init_name)
         parent_flow = pcom.rd_cfg(self.cfg_dic.get("flow", {}), init_name, "pre_flow", True)
         if parent_flow:
             src_dir = f"{self.ced['BLK_CFG_FLOW']}{os.sep}{parent_flow}"
             LOG.info("inheriting from %s", parent_flow)
         else:
             src_dir = f"{self.ced['PROJ_SHARE_CFG']}{os.sep}flow"
             LOG.info("inheriting from project share")
         dst_dir = f"{self.ced['BLK_CFG_FLOW']}{os.sep}{init_name}"
         if not os.path.isdir(src_dir):
             LOG.error("parent flow directory %s is NA", src_dir)
             raise SystemExit()
         if os.path.isdir(dst_dir):
             if self.cfm_yes:
                 LOG.warning(
                     "initializing flow directory %s already exists, "
                     "confirmed to overwrite the previous flow config and plugins", dst_dir)
             else:
                 LOG.info(
                     "initializing flow directory %s already exists, "
                     "please confirm to overwrite the previous flow config and plugins", dst_dir)
                 pcom.cfm()
             shutil.rmtree(dst_dir, True)
         shutil.copytree(src_dir, dst_dir)
         if not parent_flow:
             for blk_cfg in pcom.find_iter(dst_dir, "*.cfg", cur_flg=True):
                 with open(blk_cfg) as ocf:
                     blk_lines = pcom.gen_pcf_lst(ocf)
                 with open(blk_cfg, "w") as ncf:
                     for line in blk_lines:
                         ncf.write(line)
예제 #7
0
 def proc_inst_ex(self, multi_inst, inst_dic):
     """to execute particular inst """
     flow_root_dir = inst_dic["stage_dic"]["flow_root_dir"]
     flow_name = inst_dic["stage_dic"]["flow"]
     l_flow = inst_dic["stage_dic"]["l_flow"]
     stage_name = inst_dic["stage_dic"]["stage"]
     sub_stage_name = inst_dic["stage_dic"]["sub_stage"]
     local_dic = inst_dic["tmp_dic"]["local"]
     ver_dic = inst_dic["tmp_dic"]["ver"]
     pre_file_mt = inst_dic["pre_file_mt"]
     force_flg = inst_dic["force_flg"]
     file_mt = 0.0
     if self.restore_dic:
         sub_stage_name = f".restore_{sub_stage_name}"
     dst_file = os.path.join(
         flow_root_dir, "scripts", stage_name, multi_inst,
         sub_stage_name) if multi_inst else os.path.join(
             flow_root_dir, "scripts", stage_name, sub_stage_name)
     dst_op_file = os.path.join(
         flow_root_dir, "sum", stage_name, multi_inst,
         f"{sub_stage_name}.op") if multi_inst else os.path.join(
             flow_root_dir, "sum", stage_name, f"{sub_stage_name}.op")
     pcom.mkdir(LOG, os.path.dirname(dst_op_file))
     dst_run_file = f"{dst_op_file}.run"
     if "_exec_cmd" in local_dic:
         self._gen_job_tcl(dst_op_file, local_dic)
         tool_str = local_dic.get("_exec_tool", "")
         jcn_str = (
             local_dic.get("_job_restore_cpu_number", "") if self.restore_dic
             else local_dic.get("_job_cpu_number", ""))
         jr_str = (
             local_dic.get("_job_restore_resource", "") if self.restore_dic
             else local_dic.get("_job_resource", ""))
         jc_str = (
             f"{local_dic.get('_job_cmd', '')} {local_dic.get('_job_queue', '')} "
             f"{jcn_str} {jr_str}" if "_job_cmd" in local_dic else "")
         jn_str = (
             f"{self.ced['USER']}:::{self.ced['PROJ_NAME']}:::{self.ced['BLK_NAME']}:::"
             f"{flow_name}::{stage_name}:{sub_stage_name}:{multi_inst}")
         job_str = f"{jc_str} -J '{jn_str}'" if jc_str else ""
         cmd_str = local_dic.get("_exec_cmd", "")
         with open(dst_op_file, "w") as drf:
             drf.write(
                 f"{tool_str}{os.linesep}{cmd_str} {dst_file}{os.linesep}")
         trash_dir = f"{os.path.dirname(dst_op_file)}{os.sep}.trash"
         pcom.mkdir(LOG, trash_dir)
         with open(dst_run_file, "w") as dbf:
             dbf.write(
                 f"{job_str} xterm -title '{dst_file}' -e 'cd {trash_dir}; "
                 f"source {dst_op_file} | tee {dst_run_file}.log; "
                 f"touch {dst_run_file}.stat'{os.linesep}")
         err_kw_lst = pcom.rd_cfg(
             self.cfg_dic.get("filter", {}), stage_name, "error_keywords_exp")
         wav_kw_lst = pcom.rd_cfg(
             self.cfg_dic.get("filter", {}), stage_name, "waiver_keywords_exp")
         fin_str = self.ced.get("FIN_STR", "")
         filter_dic = {
             "err_kw_lst": err_kw_lst, "wav_kw_lst": wav_kw_lst, "fin_str": fin_str}
         if self.run_flg:
             file_mt = os.path.getmtime(dst_file)
             f_flg = False if file_mt > pre_file_mt else True
             if force_flg:
                 f_flg = True
             if f_flg:
                 # updated timestamp to fit auto-skip feature
                 os.utime(dst_file)
                 # stages followed up have to be forced run
                 file_mt = os.path.getmtime(dst_file)
             file_p = file_proc.FileProc(
                 {"src": dst_file, "file": dst_run_file, "l_flow": l_flow, "flow": flow_name,
                  "stage": stage_name, "sub_stage": sub_stage_name, "multi_inst": multi_inst,
                  "filter_dic": filter_dic, "flow_root_dir": flow_root_dir, "ced": self.ced,
                  "ver_dic": ver_dic, "jn_str": jn_str}, f_flg)
             file_p.proc_set_log_par_env(inst_dic["tmp_dic"])
             p_run = file_p.proc_run_file()
             if p_run is True:
                 return p_run
     return file_mt
예제 #8
0
 def proc_flow(self, l_flow):
     """to process particular flow"""
     proj_tmp_dir = self.ced["PROJ_SHARE_TMP"].rstrip(os.sep)
     liblist_var_dic = (
         self.load_liblist(self.ced["PROJ_LIB"], self.dir_cfg_dic["lib"]["DEFAULT"])
         if self.ced["PROJ_LIB"] else {})
     pre_stage_dic = {}
     pre_file_mt = 0.0
     force_flg = False
     flow_if_dic = {
         "flow": l_flow, "block": self.ced["BLK_NAME"], "proj": self.ced["PROJ_NAME"],
         "owner": self.ced["USER"], "created_time": self.ced["DATETIME"].isoformat(),
         "status": "running", "comment": self.comment}
     if self.run_flg:
         db_if.w_flow(flow_if_dic)
     flow_dic_lst, flow_ver_dic, flow_err_lst = self.exp_stages_misc(
         [], {}, [], self.cfg_dic.get("flow", {}), l_flow)
     if flow_err_lst:
         LOG.error("flow %s has the following errors in flow.cfg", l_flow)
         pcom.pp_list(flow_err_lst)
         raise SystemExit()
     signoff_dic = pcom.ch_cfg(self.cfg_dic.get("signoff", {})).get("DEFAULT", {})
     if self.force_dic and self.force_dic not in flow_dic_lst:
         LOG.error("force stage %s not in flow %s", self.force_dic, l_flow)
         raise SystemExit()
     if self.begin_dic and self.begin_dic not in flow_dic_lst:
         LOG.error("begin stage %s not in flow %s", self.begin_dic, l_flow)
         raise SystemExit()
     if self.end_dic and self.end_dic not in flow_dic_lst:
         LOG.error("end stage %s not in flow %s", self.end_dic, l_flow)
         raise SystemExit()
     if self.restore_dic and self.restore_dic not in flow_dic_lst:
         LOG.error("restore stage %s not in flow %s", self.restore_dic, l_flow)
         raise SystemExit()
     for flow_dic in flow_dic_lst:
         flow_name = flow_dic.get("flow", "")
         stage_name = flow_dic.get("stage", "")
         sub_stage_name = flow_dic.get("sub_stage", "")
         flow_ver = flow_ver_dic.get("rtl_netlist", "")
         flow_lib = flow_ver_dic.get("LIB", "")
         if not flow_lib:
             LOG.warning("option 'LIB' is NA in section %s of flow.cfg", l_flow)
         elif flow_lib not in liblist_var_dic:
             LOG.warning(
                 "flow LIB %s is not matched with any process of lib/process.cfg", flow_lib)
         # force_flg FSM
         if not force_flg:
             if self.force_dic == {} or self.force_dic == {
                     "flow": flow_name, "stage": stage_name,
                     "sub_stage": sub_stage_name}:
                 force_flg = True
         tmp_file = os.path.join(proj_tmp_dir, "flow", stage_name, sub_stage_name)
         if not os.path.isfile(tmp_file):
             LOG.warning(
                 "template file %s is NA, "
                 "used by flow %s stage %s", tmp_file, flow_name, stage_name)
             continue
         flow_root_dir = f"{self.ced['BLK_RUN']}{os.sep}{flow_ver}{os.sep}{flow_name}"
         local_dic = pcom.ch_cfg(
             self.dir_cfg_dic.get("flow", {}).get(flow_name, {}).get(stage_name, {})).get(
                 sub_stage_name, {})
         stage_dic = {
             "l_flow": l_flow, "flow": flow_name, "stage": stage_name,
             "sub_stage": sub_stage_name, "flow_root_dir": flow_root_dir,
             "flow_liblist_dir": self.ced["BLK_RUN"],
             "flow_scripts_dir": f"{flow_root_dir}{os.sep}scripts",
             "config_plugins_dir":
             f"{self.ced['BLK_CFG_FLOW']}{os.sep}{flow_name}{os.sep}plugins"}
         self.proc_prex(stage_dic)
         self.opvar_lst.append({
             "local": local_dic, "cur": stage_dic, "pre": pre_stage_dic, "ver": flow_ver_dic})
         tmp_dic = {
             "env": self.ced, "local": local_dic,
             "liblist": liblist_var_dic.get(flow_lib, {}),
             "cur": stage_dic, "pre": pre_stage_dic, "ver": flow_ver_dic,
             "signoff": signoff_dic}
         pre_stage_dic = stage_dic
         if self.restore_dic and self.restore_dic != {
                 "flow": flow_name, "stage": stage_name, "sub_stage": sub_stage_name}:
             continue
         if self.restore_dic:
             tmp_dic["cur"]["op_restore"] = "true"
         if self.begin_dic and self.begin_dic != {
                 "flow": flow_name, "stage": stage_name, "sub_stage": sub_stage_name}:
             self.opvar_lst.pop()
             continue
         self.begin_dic = {}
         inst_str = "_restore_inst" if self.restore_dic else "_multi_inst"
         multi_inst_lst = [c_c.strip() for c_c in pcom.rd_cfg(
             self.dir_cfg_dic.get("flow", {}).get(flow_name, {}).get(stage_name, {}),
             sub_stage_name, inst_str) if c_c.strip()]
         if not multi_inst_lst:
             multi_inst_lst = [""]
         inst_dic = {
             "stage_dic": stage_dic, "tmp_dic": tmp_dic,
             "tmp_file": tmp_file, "pre_file_mt": pre_file_mt,
             "force_flg": force_flg}
         if self.run_flg:
             try:
                 if settings.DEBUG:
                     file_mt_lst = []
                     for multi_inst in multi_inst_lst:
                         self.proc_inst(multi_inst, inst_dic)
                         file_mt_lst.append(self.proc_inst_ex(multi_inst, inst_dic))
                 else:
                     for multi_inst in multi_inst_lst:
                         self.proc_inst(multi_inst, inst_dic)
                     with Pool(settings.MP_POOL, initializer=pcom.sig_init) as mip:
                         file_mt_lst = mip.starmap(
                             self.proc_inst_ex,
                             zip(multi_inst_lst, [inst_dic]*len(multi_inst_lst)))
             except KeyboardInterrupt:
                 flow_if_dic["status"] = "failed"
                 db_if.w_flow(flow_if_dic)
                 db_if.d_flow(flow_if_dic)
                 raise KeyboardInterrupt
             if any([c_c is True for c_c in file_mt_lst if c_c]):
                 flow_if_dic["status"] = "failed"
                 db_if.w_flow(flow_if_dic)
                 db_if.d_flow(flow_if_dic)
                 raise SystemExit()
             pre_file_mt = max(file_mt_lst)
         else:
             for multi_inst in multi_inst_lst:
                 self.proc_inst(multi_inst, inst_dic)
                 self.proc_inst_ex(multi_inst, inst_dic)
         if self.end_dic and self.end_dic == {
                 "flow": flow_name, "stage": stage_name, "sub_stage": sub_stage_name}:
             break
     if self.run_flg:
         flow_if_dic["status"] = "passed"
         db_if.w_flow(flow_if_dic)
예제 #9
0
 def gen_link_liblist(self, dst_root, lib_dir_cfg_dic):
     """to process project or block lib mapping links"""
     LOG.info(":: library mapping of files linking ...")
     liblist_var_dic = collections.defaultdict(dict)
     for process, process_sec in lib_dir_cfg_dic.get("process", {}).items():
         if process == "DEFAULT":
             continue
         for lib_type, lib_type_sec in lib_dir_cfg_dic.get("search",
                                                           {}).items():
             if lib_type == "DEFAULT":
                 continue
             can_root = pcom.ren_tempstr(
                 LOG, pcom.rd_sec(lib_type_sec, "src", True), process_sec)
             if not can_root:
                 LOG.warning(
                     "library mapping search root path option 'src' of %s "
                     "is not defined in lib/search.cfg, skipped", lib_type)
                 continue
             elif not os.path.isdir(can_root):
                 LOG.warning("library mapping search root path %s is NA",
                             can_root)
                 continue
             can_tar = pcom.rd_sec(lib_type_sec, "dst", True)
             if not can_tar:
                 LOG.warning(
                     "library mapping destination directory option 'dst' of %s "
                     "is not defined in lib/search.cfg, skipped", lib_type)
                 continue
             LOG.info("library mapping for part %s", lib_type)
             pcom.mkdir(LOG, can_tar)
             can_ignore_lst = pcom.rd_sec(lib_type_sec, "ignore")
             can_lst = list(
                 pcom.find_iter(can_root, "*", i_lst=can_ignore_lst))
             for lib_type_k in lib_type_sec:
                 if not lib_type_k.startswith("pattern"):
                     continue
                 pattern_search = pcom.rd_sec(lib_type_sec, lib_type_k,
                                              True)
                 var_set = set(re.findall(r"{{(.*?)}}", pattern_search))
                 for var_dic in pcom.prod_vs_iter(var_set, process_sec):
                     pattern_search_str = pcom.ren_tempstr(
                         LOG, pattern_search, var_dic)
                     for match_file in fnmatch.filter(
                             can_lst,
                             f"{can_root}{os.sep}{pattern_search_str}"):
                         self.link_src_dst(match_file, can_tar, can_root)
         match_lst_file = f"{dst_root}{os.sep}{process}.match_lst"
         LOG.info("generating library map list file %s", match_lst_file)
         with open(match_lst_file, "w") as mlf:
             json.dump(self.match_lst, mlf, indent=4)
         liblist_dir = f"{dst_root}{os.sep}liblist"
         pcom.mkdir(LOG, liblist_dir)
         var_name_line_dic = {}
         liblist_cfg = lib_dir_cfg_dic.get("liblist", {})
         try:
             custom_dic = {
                 c_k: pcom.rd_cfg(liblist_cfg, f"custom:{process}", c_k)
                 for c_k in liblist_cfg[f"custom:{process}"]
             }
         except KeyError:
             custom_dic = {}
         if "var" not in liblist_cfg:
             LOG.error("var section is NA in lib/liblist.cfg")
             raise SystemExit()
         for var_name in liblist_cfg["var"]:
             match_file_lst = []
             match_pattern_lst = (custom_dic[var_name] if var_name
                                  in custom_dic else pcom.rd_cfg(
                                      liblist_cfg, "var", var_name))
             for match_pattern in match_pattern_lst:
                 var_set = set(re.findall(r"{{(.*?)}}", match_pattern))
                 for var_dic in pcom.prod_vs_iter(var_set, process_sec):
                     fnmatch_lst = ([
                         pcom.ren_tempstr(LOG, match_pattern, var_dic)
                     ] if var_name in custom_dic else fnmatch.filter(
                         self.match_lst,
                         pcom.ren_tempstr(LOG, match_pattern, var_dic)))
                     match_file_lst.extend(fnmatch_lst)
             var_name_line_dic[var_name] = match_file_lst
         LOG.info("generating library liblist files in %s", liblist_dir)
         #file generation and liblist dic generation for templates
         tcl_line_lst = []
         for var_name, match_file_lst in var_name_line_dic.items():
             liblist_var_dic[process][var_name] = f" \\{os.linesep} ".join(
                 match_file_lst)
             tcl_value_str = f" \\{os.linesep}{' '*(6+len(var_name))}".join(
                 match_file_lst)
             tcl_line_lst.append(f'set {var_name} "{tcl_value_str}"')
         with open(f"{liblist_dir}{os.sep}{process}.tcl", "w") as lltf:
             lltf.write(os.linesep.join(tcl_line_lst))
         self.match_lst = []
     with open(f"{liblist_dir}{os.sep}liblist.json", "w") as lljf:
         json.dump(liblist_var_dic, lljf, indent=4)