def gen_makefile(self): """to generate top makefile""" mk_dic = {"CED": self.ced} mk_dic["clib_flg"] = self.mkg_dic["clib_flg"] mk_dic["csrc_flg"] = self.mkg_dic["csrc_flg"] self.cfg_dic["c"]["DEFAULT"].update(self.mkg_dic["ow_dic"]["pre"]) self.chk_c_cfg() mk_dic["base_comp_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "base_comp_opts")) mk_dic["lib_comp_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "lib_comp_opts")) mk_dic["src_comp_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "src_comp_opts")) mk_dic["src_run_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "src_run_opts")) mk_dic["lib_pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "lib_pre_cmd") mk_dic["lib_post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "lib_post_cmd") mk_dic["src_pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "src_pre_cmd") mk_dic["src_post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "src_post_cmd") mk_dir = self.ced["MODULE_OUTPUT"] mk_file = "Makefile" os.makedirs(mk_dir, exist_ok=True) LOG.info("output dir %s is generated", mk_dir) pcom.ren_tempfile(f"{self.ced['PJ_TEMPLATES']}{os.sep}{mk_file}", f"{mk_dir}{os.sep}{mk_file}", mk_dic) return mk_dir, mk_file
def proc_fm(ced, fm_cfg_dic): """to process and kick off fm flow""" ref_name = (fm_cfg_dic["fm"]["REF_NAME"] if fm_cfg_dic["fm"]["REF_NAME"] else fm_cfg_dic["fm"]["DESIGN_NAME"]) imp_name = (fm_cfg_dic["fm"]["IMP_NAME"] if fm_cfg_dic["fm"]["IMP_NAME"] else fm_cfg_dic["fm"]["DESIGN_NAME"]) fm_cfg_dic["fm"]["fm_time_dir"] = fm_time_dir = ( f"{fm_cfg_dic['fm']['fm_top_dir']}{os.sep}fm_" f"{os.path.basename(fm_cfg_dic['fm']['fm_src_dir'])}_{imp_name}_{ref_name}_" f"{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}") os.makedirs(fm_time_dir, exist_ok=True) LOG.info("generate fm.tcl file") pcom.ren_tempfile( f"{fm_cfg_dic['fm']['fm_src_dir']}{os.sep}fm_template{os.sep}fm.tcl", f"{fm_time_dir}{os.sep}fm.tcl", fm_cfg_dic["fm"]) fm_str = (f"cd {fm_time_dir} && fm_shell -f {fm_time_dir}{os.sep}fm.tcl " f"| tee -i {fm_time_dir}{os.sep}fm.log") subprocess.run(fm_str, shell=True) parse_rlt_dic = { "design_name": pcom.rd_cfg(fm_cfg_dic, "fm", "DESIGN_NAME", True), "proj": ced["PROJ_NAME"], "user": ced["USER_NAME"], "run_time": dt.datetime.timestamp(ced["TIME"]), "status": "passed" if os.path.isfile(f"{fm_time_dir}{os.sep}passed") else "failed" } query_url = f"{pcom.BE_URL}/pj_app/fm/db_query/query_insert_case/" if pcom.BACKEND: requests.post(query_url, json=parse_rlt_dic)
def proc_dc(self): """to process and kick off dc flow""" if self.dc_dic["gen_tcl"]: self.gen_dir_dic() LOG.info("generating the tcl&sdc files from templates") dc_temp_dir = f"{self.src_dir}{os.sep}dc_template" for tcl_name in self.dc_cfg_dic.sections(): if tcl_name == "base_args" or tcl_name == "set_args": pcom.ren_tempfile( f"{dc_temp_dir}{os.sep}set_args.tcl", f"{self.dir_dic['tcl_dir']}{os.sep}set_args.tcl", {"base_arg_dic": self.dc_cfg_dic["base_args"], "set_arg_dic": self.dc_cfg_dic["set_args"]}) else: tn_str = tcl_name.replace('__', '.') pcom.ren_tempfile( f"{dc_temp_dir}{os.sep}{tn_str}", f"{self.dir_dic['tcl_dir']}{os.sep}{tn_str}", {"dc_dic":self.dc_cfg_dic}) if self.dc_dic["run"]: dc_topo_str = ( f"cd {self.dir_dic['dc_time_dir']} && " f"dc_shell {'-topo' if self.dc_dic['topo'] else ''} " f"-f {self.dir_dic['tcl_dir']}{os.sep}dc.tcl " f"-output_log_file {self.dir_dic['dc_time_dir']}{os.sep}dc.log ") try: proc = subprocess.Popen(dc_topo_str, shell=True) while proc.poll() is None: time.sleep(180) dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log() dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log() except KeyboardInterrupt: dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log() proc.kill() if self.dc_dic["tm_flg"]: tm_rpt = os.path.join(self.dir_dic["reports_dir"], pcom.rd_cfg( self.dc_cfg_dic, "set_args", "DCRM_FINAL_TIMING_REPORT", True)) tm_log_parser.TmParser( {"dt_file": tm_rpt, "level": self.dc_dic["tm_level"]}).parse_tm_log() if self.dc_dic["formality"]: if not shutil.which("fm_shell"): raise Exception("fm is not loaded") LOG.info("updating the fm_dic and running formality") fm_cfg_dic = pcom.gen_cfg([f"{self.src_dir}{os.sep}fm.cfg"]) fm_cfg_dic["fm"]["imp_filelist"] = os.linesep.join( pcom.find_iter(self.dir_dic["results_dir"], "*.syn.v")) fm_cfg_dic["fm"]["set_svf"] = os.linesep.join( pcom.find_iter(self.dir_dic["results_dir"], "*.syn.svf")) fm_cfg_dic["fm"]["DESIGN_NAME"] = pcom.rd_cfg( self.dc_cfg_dic, "base_args", "DESIGN_NAME", True) fm_cfg_dic["fm"]["ref_filelist"] = pcom.rd_cfg( self.dc_cfg_dic, "set_args", "rtl_files", True) fm_cfg_dic["fm"]["fm_top_dir"] = self.dir_dic["dc_time_dir"] fm_cfg_dic["fm"]["fm_src_dir"] = self.src_dir fm_runner.proc_fm(self.ced, fm_cfg_dic)
def gen_simv_makefile(self, simv_dic): """to generate simv makefile""" ms_dic = { "CED": self.ced, "simv_dic": simv_dic, "ed": { "simv": simv_dic["name"] } } ms_dic["clib_flg"] = self.mkg_dic["clib_flg"] ms_dir = f"{self.ced['OUTPUT_SIMV']}{os.sep}{simv_dic['name']}" ms_file = "simv_makefile" os.makedirs(ms_dir, exist_ok=True) LOG.info("simv dir %s is generated", ms_dir) pcom.ren_tempfile(f"{self.ced['PJ_TEMPLATES']}{os.sep}{ms_file}", f"{ms_dir}{os.sep}{ms_file}", ms_dic) return ms_dir, ms_file
def gen_case_makefile(self, case_dic): """to generate case makefile""" mc_dic = { "CED": self.ced, "case_dic": case_dic, "ed": { "case": case_dic["name"], "seed": case_dic["seed"], "sem_key": case_dic["sem_key"] } } mc_dic["clib_flg"] = self.mkg_dic["clib_flg"] mc_dir = ( f"{self.ced['MODULE_OUTPUT']}{os.sep}{case_dic['name']}{os.sep}" f"{case_dic['simv']}__{case_dic['seed']}") mc_file = "case_makefile" os.makedirs(mc_dir, exist_ok=True) LOG.info("case dir %s is generated", mc_dir) pcom.ren_tempfile(f"{self.ced['PJ_TEMPLATES']}{os.sep}{mc_file}", f"{mc_dir}{os.sep}{mc_file}", mc_dic) return mc_dir, mc_file
def proc_cdc(cdc_src): """to process cdc flow""" if not shutil.which("vlog"): raise Exception("qsim is not loaded") if not shutil.which("qverify"): raise Exception("formal is not loaded") ced, _ = env_booter.EnvBooter().boot_env() cdc_src_dir = (os.path.abspath(os.path.expandvars(cdc_src)) if cdc_src else f"{ced['FLOW_CDC']}{os.sep}src") cdc_cfg_dic = pcom.gen_cfg([f"{cdc_src_dir}{os.sep}cdc.cfg"]) wv_cfg_dic = pcom.gen_cfg([f"{cdc_src_dir}{os.sep}waiver.cfg"]) cdc_flist = pcom.rd_cfg(cdc_cfg_dic, "cdc", "design_flist") top = pcom.rd_cfg(cdc_cfg_dic, "cdc", "design_top") if not cdc_flist or not top: raise Exception(f"no cdc filelist or top name found in cfg") cdc_time_dir = ( f"{ced['FLOW_CDC']}{os.sep}cdc_{os.path.basename(cdc_src_dir)}_" f"{top[0]}_{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}") os.makedirs(cdc_time_dir, exist_ok=True) pcom.ren_tempfile(f"{cdc_src_dir}{os.sep}template{os.sep}cdc.tcl", f"{cdc_time_dir}{os.sep}cdc.tcl", cdc_cfg_dic["cdc"]) compiler_err_pat = re.compile("|".join( [re.escape(cc) for cc in pcom.rd_cfg(cdc_cfg_dic, "cdc", "err_str")])) run_compiler(cdc_time_dir, cdc_flist, compiler_err_pat) if not os.path.isfile(f"{cdc_time_dir}{os.sep}failed"): cdc_str = (f"cd {cdc_time_dir} && " f"qverify -c -do {cdc_time_dir}{os.sep}cdc.tcl " f"-od {cdc_time_dir}{os.sep}Output_Results") subprocess.run(cdc_str, shell=True) dtl_rpt = f"{cdc_time_dir}{os.sep}Output_Results{os.sep}cdc_detail.rpt" if not os.path.isfile(dtl_rpt): raise Exception(f"report file {dtl_rpt} is not existed") result_lst = gen_post_wv(dtl_rpt, wv_cfg_dic) if result_lst: with open(f"{cdc_time_dir}{os.sep}post_waiver.rpt", "w") as pwr: for wv_rule in result_lst: pwr.write(wv_rule + os.linesep) LOG.warning( "post waived violations exists, please check post_waiver.rpt file")
def proc_uvm_gen(self): """to generate module uvm env""" m_dir = self.module_dir if self.module_dir else self.ced["PROJ_VERIF"] module_path = os.path.expandvars(f"{m_dir}{os.sep}{self.module}") if os.path.isdir(module_path): raise Exception( f"module path you typed {module_path} has already existed") self.gen_data_struc() pj_gen_dir = f"{self.ced['SHARE_TEMPLATES']}{os.sep}pj_gen" if not os.path.isdir(pj_gen_dir): raise Exception(f"pj_gen dir {pj_gen_dir} is NA") for sub_dir in pcom.find_iter(pj_gen_dir, "*", dir_flg=True): dest_dir = sub_dir.replace(pj_gen_dir, module_path) os.makedirs(dest_dir, exist_ok=True) LOG.info(f"create a new {dest_dir} directory.") for temp_file in pcom.find_iter(pj_gen_dir, "*"): t_fn = os.path.basename(temp_file) if t_fn in self.filter_lst: continue LOG.info(f"template file is {t_fn}") tf_str = temp_file.replace(pj_gen_dir, module_path) blk_n = self.module if t_fn.startswith("_") else "" if t_fn in pcom.rd_cfg(self.cfg_dic["proj"], "gen_agt", "multiple"): mul_dic = copy.deepcopy(self.base_dic) del mul_dic["agt_name_lst"] for agt_name in self.base_dic["agt_name_lst"]: mul_dic["agt_name"] = agt_name pcom.ren_tempfile( temp_file, tf_str.replace(t_fn, f"{blk_n}_{agt_name}{t_fn}"), mul_dic) else: pcom.ren_tempfile(temp_file, tf_str.replace(t_fn, f"{blk_n}{t_fn}"), self.base_dic) LOG.info(f"module {self.module} uvm env generated")
def proc_reg(self, reg_module_lst): """reg process main""" proc_dic = {} proc_dic["reg_doc_dir"] = os.path.abspath( os.path.expandvars( pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "doc", True))) proc_dic["workbook_hw"] = xlsxwriter.Workbook( f"{proc_dic['reg_doc_dir']}{os.sep}YJD_register.xlsx") proc_dic["workbook_sw"] = xlsxwriter.Workbook( f"{proc_dic['reg_doc_dir']}{os.sep}CPU_register.xlsx") proc_dic["sheet_sw"] = proc_dic["workbook_sw"].add_worksheet( "CPU1_regsiter") proc_dic["format_sw"] = proc_dic["workbook_sw"].add_format( {"font_size": "15"}) proc_dic["reg_rtl_dir"] = os.path.abspath( os.path.expandvars( pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "rtl", True))) proc_dic["reg_cfg_dir"] = f"{self.ced['SHARE_CONFIG']}{os.sep}pj_reg" proc_dic[ "reg_temp_dir"] = f"{self.ced['SHARE_TEMPLATES']}{os.sep}pj_reg" proc_dic["reg_cfg_iter"] = [ f"{proc_dic['reg_cfg_dir']}{os.sep}{cc}.json" for cc in reg_module_lst ] if reg_module_lst else pcom.find_iter(proc_dic["reg_cfg_dir"], "*.json") proc_dic["reg_ralf_dir"] = os.path.abspath( os.path.expandvars( pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "ralf", True))) proc_dic["ralf_dic"] = {"blk_bytes": 0} proc_dic["public_reg_dic"] = {} for reg_cfg_json in proc_dic["reg_cfg_iter"]: if not os.path.isfile(reg_cfg_json): raise Exception(f"reg cfg file {reg_cfg_json} is NA") LOG.info("processing reg config file %s", reg_cfg_json) sin_dic = {} sin_dic["json_dir"], sin_dic["json_name"] = os.path.split( reg_cfg_json) sin_dic["module_cname"], _ = os.path.splitext(sin_dic["json_name"]) sin_dic["module_name"] = sin_dic["module_cname"].lower() sin_dic["rtl_dir"] = sin_dic["json_dir"].replace( proc_dic["reg_cfg_dir"], proc_dic["reg_rtl_dir"]) os.makedirs(sin_dic["rtl_dir"], exist_ok=True) sin_dic["sheet_hw"] = proc_dic["workbook_hw"].add_worksheet( sin_dic["module_cname"]) sin_dic["format_hw"] = proc_dic["workbook_hw"].add_format( {"font_size": "15"}) with open(reg_cfg_json, encoding="gb18030") as file: data = json.load(file) data_dic = self.expand_data(data) rtl_file = f"{sin_dic['rtl_dir']}{os.sep}{sin_dic['module_name']}.v" LOG.info(f"generating reg rtl file {rtl_file}") pcom.ren_tempfile( f"{proc_dic['reg_temp_dir']}{os.sep}reg_base.v", rtl_file, { "module_name": sin_dic["module_name"], "data": self.fmt_v_data(data_dic) }) self.gen_xls(data_dic, sin_dic["sheet_hw"], sin_dic["format_hw"]) self.fmt_sw_data(data_dic) ralf_data_dic = self.fmt_ralf_data(data_dic) proc_dic["ralf_dic"][sin_dic["module_name"]] = ralf_data_dic proc_dic["ralf_dic"]["blk_bytes"] += ralf_data_dic["blk_bytes"] self.check_public_register(proc_dic["public_reg_dic"], proc_dic["ralf_dic"], ralf_data_dic) os.makedirs(proc_dic["reg_ralf_dir"], exist_ok=True) ralf_file = f"{proc_dic['reg_ralf_dir']}{os.sep}reg.ralf" LOG.info(f"generating reg ralf file {ralf_file}") pcom.ren_tempfile( f"{proc_dic['reg_temp_dir']}{os.sep}reg_base.ralf", ralf_file, { "public_data": proc_dic["public_reg_dic"], "data": proc_dic["ralf_dic"] }) sw_data_dic = OrderedDict(self.sw_rtl_dic["NA"]) self.sw_rtl_dic["MSR"] = OrderedDict( sorted(self.sw_rtl_dic["MSR"].items(), key=lambda reg: int( f'0x{reg[1]["MSR_address"].split("h")[1]}', 16))) sw_data_dic.update(self.sw_rtl_dic["MSR"]) self.gen_xls(sw_data_dic, proc_dic["sheet_sw"], proc_dic["format_sw"]) LOG.info(f"generating reg doc file in {proc_dic['reg_doc_dir']}") os.makedirs(proc_dic["reg_doc_dir"], exist_ok=True) proc_dic["workbook_sw"].close() proc_dic["workbook_hw"].close()