def __init__(self, leda_dic): if not shutil.which("leda"): raise Exception("leda is not loaded") self.leda_dic = leda_dic self.ced, self.cfg_dic = ced, _ = env_booter.EnvBooter().boot_env() src_dir = (os.path.abspath(os.path.expandvars(leda_dic["leda_src"])) if leda_dic["leda_src"] else f"{ced['FLOW_LEDA']}{os.sep}src") leda_cfg_file = f"{src_dir}{os.sep}leda.cfg" if not os.path.isfile(leda_cfg_file): raise Exception(f"the leda cfg file {leda_cfg_file} is NA.") self.leda_cfg = pcom.gen_cfg([leda_cfg_file]) leda_time_dir = ( f"{ced['FLOW_LEDA']}{os.sep}{os.path.basename(src_dir)}__{leda_dic['leda_top']}__" f"{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}") self.df_dic = { "src_dir": src_dir, "time_dir": leda_time_dir, "log_dir": f"{leda_time_dir}{os.sep}leda_logs", "log": f"{leda_time_dir}{os.sep}leda_logs{os.sep}leda.log", "flist_file": f"{leda_time_dir}{os.sep}leda.flist", "tcl": f"{leda_time_dir}{os.sep}leda.tcl", "bbox_file": f"{src_dir}{os.sep}bbox_file", "rule_dir": f"{src_dir}{os.sep}leda_rule", "waiver_dir": f"{src_dir}{os.sep}leda_waiver" }
def __init__(self, icc_dic): if not shutil.which("icc_shell"): raise Exception("icc is not loaded") self.icc_dic = icc_dic ced, _ = env_booter.EnvBooter().boot_env() src_dir = ( os.path.abspath(os.path.expandvars(self.icc_dic["src"])) if self.icc_dic["src"] else f"{ced['FLOW_ICC']}{os.sep}src") time_str = ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S') icct_dir = f"{ced['FLOW_ICC']}{os.sep}icc_{os.path.basename(src_dir)}_{time_str}" icc_dic_dir = os.path.abspath(os.path.expandvars(self.icc_dic["dir"])) if self.icc_dic["dir"]: if not os.path.isdir(icc_dic_dir): raise Exception(f"the input dir {icc_dic_dir} is NA.") icct_dir = icc_dic_dir if not self.icc_dic["dir"]: shutil.copytree(src_dir, icct_dir) self.icc_cfg_dic = pcom.gen_cfg([f"{icct_dir}{os.sep}icc.cfg"]) self.stage_lst = self.icc_cfg_dic.options("icc_stage") logs_dir = f"{icct_dir}{os.sep}logs" self.dir_f_dic = { "icct_dir": icct_dir, "src_dir": src_dir, "logs_dir": logs_dir, "err_rpt": f"{logs_dir}{os.sep}err.rpt", "warn_rpt": f"{logs_dir}{os.sep}warning_rpt", "time": time_str}
def __init__(self, dc_dic): if not shutil.which("dc_shell"): raise Exception("dc is not loaded") self.ced, _ = ced, _ = env_booter.EnvBooter().boot_env() self.dc_dic = dc_dic dc_pre_str = "dct" if dc_dic["topo"] else "dc" self.src_dir = src_dir = ( os.path.abspath(os.path.expandvars(self.dc_dic["src"])) if self.dc_dic["src"] else f"{ced['FLOW_DC']}{os.sep}src") self.dc_cfg_dic = dc_cfg_dic = pcom.gen_cfg([f"{src_dir}{os.sep}dc.cfg"]) for base_key, base_value in self.dc_cfg_dic["base_args"].items(): os.environ[base_key] = base_value self.ced[base_key] = base_value dc_time_dir = ( f"{ced['FLOW_DC']}{os.sep}{dc_pre_str}_{os.path.basename(src_dir)}_" f"{dc_cfg_dic['base_args']['DESIGN_NAME']}_" f"{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}") dc_dic_dir = os.path.abspath(os.path.expandvars(self.dc_dic["dir"])) if self.dc_dic["dir"]: if not os.path.isdir(dc_dic_dir): raise Exception(f"the input dir {dc_dic_dir} is NA.") dc_time_dir = dc_dic_dir os.makedirs(dc_time_dir, exist_ok=True) self.dir_dic = { "dc_time_dir": dc_time_dir, "reports_dir": f"{dc_time_dir}{os.sep}reports", "results_dir": f"{dc_time_dir}{os.sep}results", "tcl_dir": f"{dc_time_dir}{os.sep}tcl"}
def run_vplan(args): """to run vplan sub cmd""" if args.vplan_module and args.vplan_proc: ced, cfg_dic = env_booter.EnvBooter().module_env(args.vplan_module) VPlanProc(ced, cfg_dic, args.vplan_days).proc_vplan() LOG.info("processing vplan of %s module done", args.vplan_module) else: raise Exception("missing main arguments")
def __init__(self, run_dic): if not run_dic["module"]: run_dic["module"] = run_dic["case_lst"][0].split("__")[0] if not run_dic["module"]: raise Exception( f"case {run_dic['case_lst'][0]} is not in standard format <module__case>, " f"so module name must be specified") run_dic["case_lst"] = [ cc.split(".")[0] for cc in run_dic["case_lst"] if cc ] self.ced, self.cfg_dic = env_booter.EnvBooter().module_env( run_dic["module"]) self.regr_flg = regr_flg = True if run_dic["regr_type_lst"] else False self.std = subprocess.PIPE if regr_flg else None self.ow_dic = {"pre": {}, "ae": {}, "su": {}} self.run_dic = run_dic
def clean_module(args, module_name): """to clean verification module level dirs""" ced, _ = env_booter.EnvBooter().module_env(module_name) clean_lst = [] if args.clean_case_lst: clean_lst.extend([ f"{ced['MODULE_OUTPUT']}{os.sep}{cc}" for cc in args.clean_case_lst if cc ]) if args.clean_case: for case_dir in pcom.find_iter(ced["MODULE_OUTPUT"], "*", True, True): base_case_dir = os.path.basename(case_dir) if not (base_case_dir.startswith("__") and base_case_dir.endswith("__")): clean_lst.append(case_dir) if args.clean_cov: clean_lst.append(ced["OUTPUT_COV"]) if args.clean_output: clean_lst.append(ced["MODULE_OUTPUT"]) if args.clean_tb: LOG.info( "CAUTION!!! you are reverting %s directory, in which all modified/private " "files/directories will be removed!!!", ced["MODULE_TB"]) svn_tb_ver = pcom.gen_svn_ver(ced["MODULE_TB"]) clean_lst.append(ced["MODULE_TB"]) if args.clean_config: LOG.info( "CAUTION!!! you are reverting %s directory, in which all modified/private " "files/directories will be removed!!!", ced["MODULE_CONFIG"]) svn_cfg_ver = pcom.gen_svn_ver(ced["MODULE_CONFIG"]) clean_lst.append(ced["MODULE_CONFIG"]) rmtree(clean_lst) if args.clean_tb: subprocess.run(f"svn up {ced['MODULE_TB']} -{svn_tb_ver}", shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if args.clean_config: subprocess.run(f"svn up {ced['MODULE_CONFIG']} -{svn_cfg_ver}", shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def run_doc(args): """to run doc sub cmd""" if not shutil.which("NaturalDocs"): raise Exception("NaturalDocs is not loaded") if args.doc_module and args.doc_gen: ced, _ = env_booter.EnvBooter().module_env(args.doc_module) os.makedirs(ced["MODULE_DOC"], exist_ok=True) doc_str = "NaturalDocs -i ${PROJ_MODULE} -o HTML ${MODULE_DOC} -p ${MODULE_DOC}" subprocess.run(doc_str, shell=True) LOG.info("generating docs of module %s done", args.doc_module) elif args.doc_path and args.doc_gen: if not os.path.isdir(args.doc_path): raise Exception(f"doc path {args.doc_path} is NA") doc_dir = f"{args.doc_path}{os.sep}doc" os.makedirs(doc_dir, exist_ok=True) doc_str = f"NaturalDocs -i {args.doc_path} -o HTML {doc_dir} -p {doc_dir}" subprocess.run(doc_str, shell=True) LOG.info("generating docs of path %s done", args.doc_path) else: raise Exception("missing main arguments")
def run_cov(args): """to run cov sub cmd""" if args.cov_module and (args.cov_merge or args.cov_rpt or args.cov_verdi): ced, _ = env_booter.EnvBooter().module_env(args.cov_module) ef_str = "" for elf in pcom.find_iter(ced["MODULE_CONFIG"], "*.el"): ef_str += f"-elfile {elf} " cov_merge_vdb = f"{ced['COV_MERGE']}{os.sep}{ced['MODULE']}_merge.vdb" cd_str = f"cd {os.path.dirname(cov_merge_vdb)} && " cov_str = "" if args.cov_merge: vdb_lst = list(pcom.find_iter(ced["COV_CM"], "*.vdb", True)) if not vdb_lst: raise Exception("no coverage data found") merge_lst_file = f"{ced['COV_MERGE']}{os.sep}merge_lst" os.makedirs(os.path.dirname(merge_lst_file), exist_ok=True) with open(merge_lst_file, "w") as mlf: for vdb_dir in vdb_lst: mlf.write(f"{vdb_dir}{os.linesep}") if os.path.isdir(cov_merge_vdb): mlf.write(cov_merge_vdb) cov_str = (f"{cd_str} urg -full64 -f {merge_lst_file} -dbname " f"{os.path.basename(cov_merge_vdb)} -noreport {ef_str}") subprocess.run(cov_str, shell=True, check=True) for cc_dir in pcom.find_iter(ced["COV_CM"], "testdata", True): shutil.rmtree(cc_dir, ignore_errors=True) if args.cov_rpt: if not os.path.isdir(cov_merge_vdb): raise Exception(f"coverage merge dir {cov_merge_vdb} is NA") cov_str = f"{cd_str} urg -full64 -dir {cov_merge_vdb} {ef_str} -format both" subprocess.run(cov_str, shell=True) if args.cov_verdi: if not os.path.isdir(cov_merge_vdb): raise Exception(f"coverage merge dir {cov_merge_vdb} is NA") cov_str = f"{cd_str} verdi -cov -covdir {cov_merge_vdb} {ef_str} &" subprocess.run(cov_str, shell=True) LOG.info("running coverage of %s module done", args.cov_module) else: raise Exception("missing main arguments")
def run_fm(args): """to run fm sub cmd""" if args.fm_ref_file and args.fm_imp_file: if not shutil.which("fm_shell"): raise Exception("fm is not loaded") ced, _ = env_booter.EnvBooter().boot_env() fm_src_dir = (os.path.abspath(os.path.expandvars(args.fm_src)) if args.fm_src else f"{ced['FLOW_FM']}{os.sep}src") fm_cfg_dic = pcom.gen_cfg([f"{fm_src_dir}{os.sep}fm.cfg"]) LOG.info("gegerating ref related file and flag") ref_netlist_flag, ref_files = gen_fmfile(args.fm_ref_file) fm_cfg_dic["fm"]["ref_netlist_flag"] = str(ref_netlist_flag) fm_cfg_dic["fm"]["ref_filelist"] = ref_files LOG.info("gegerating imp related file and flag") imp_netlist_flag, imp_files = gen_fmfile(args.fm_imp_file) fm_cfg_dic["fm"]["imp_netlist_flag"] = str(imp_netlist_flag) fm_cfg_dic["fm"]["imp_filelist"] = imp_files fm_cfg_dic["fm"]["fm_top_dir"] = ced["FLOW_FM"] fm_cfg_dic["fm"]["fm_src_dir"] = fm_src_dir proc_fm(ced, fm_cfg_dic) else: raise Exception("missing main arguments")
def proc_cdc(cdc_src): """to process cdc flow""" if not shutil.which("vlog"): raise Exception("qsim is not loaded") if not shutil.which("qverify"): raise Exception("formal is not loaded") ced, _ = env_booter.EnvBooter().boot_env() cdc_src_dir = (os.path.abspath(os.path.expandvars(cdc_src)) if cdc_src else f"{ced['FLOW_CDC']}{os.sep}src") cdc_cfg_dic = pcom.gen_cfg([f"{cdc_src_dir}{os.sep}cdc.cfg"]) wv_cfg_dic = pcom.gen_cfg([f"{cdc_src_dir}{os.sep}waiver.cfg"]) cdc_flist = pcom.rd_cfg(cdc_cfg_dic, "cdc", "design_flist") top = pcom.rd_cfg(cdc_cfg_dic, "cdc", "design_top") if not cdc_flist or not top: raise Exception(f"no cdc filelist or top name found in cfg") cdc_time_dir = ( f"{ced['FLOW_CDC']}{os.sep}cdc_{os.path.basename(cdc_src_dir)}_" f"{top[0]}_{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}") os.makedirs(cdc_time_dir, exist_ok=True) pcom.ren_tempfile(f"{cdc_src_dir}{os.sep}template{os.sep}cdc.tcl", f"{cdc_time_dir}{os.sep}cdc.tcl", cdc_cfg_dic["cdc"]) compiler_err_pat = re.compile("|".join( [re.escape(cc) for cc in pcom.rd_cfg(cdc_cfg_dic, "cdc", "err_str")])) run_compiler(cdc_time_dir, cdc_flist, compiler_err_pat) if not os.path.isfile(f"{cdc_time_dir}{os.sep}failed"): cdc_str = (f"cd {cdc_time_dir} && " f"qverify -c -do {cdc_time_dir}{os.sep}cdc.tcl " f"-od {cdc_time_dir}{os.sep}Output_Results") subprocess.run(cdc_str, shell=True) dtl_rpt = f"{cdc_time_dir}{os.sep}Output_Results{os.sep}cdc_detail.rpt" if not os.path.isfile(dtl_rpt): raise Exception(f"report file {dtl_rpt} is not existed") result_lst = gen_post_wv(dtl_rpt, wv_cfg_dic) if result_lst: with open(f"{cdc_time_dir}{os.sep}post_waiver.rpt", "w") as pwr: for wv_rule in result_lst: pwr.write(wv_rule + os.linesep) LOG.warning( "post waived violations exists, please check post_waiver.rpt file")
def __init__(self, module, module_dir): self.module = module self.module_dir = module_dir self.ced, self.cfg_dic = env_booter.EnvBooter().boot_env() self.filter_lst = [] self.base_dic = {"module_name": self.module, "agt_name_lst": []}
def __init__(self): self.ced, self.cfg_dic = env_booter.EnvBooter().boot_env() self.local_address_set = set() self.global_address_dic = {} self.sw_rtl_dic = nested_dict()