def proc_dc(self): """to process and kick off dc flow""" if self.dc_dic["gen_tcl"]: self.gen_dir_dic() LOG.info("generating the tcl&sdc files from templates") dc_temp_dir = f"{self.src_dir}{os.sep}dc_template" for tcl_name in self.dc_cfg_dic.sections(): if tcl_name == "base_args" or tcl_name == "set_args": pcom.ren_tempfile( f"{dc_temp_dir}{os.sep}set_args.tcl", f"{self.dir_dic['tcl_dir']}{os.sep}set_args.tcl", {"base_arg_dic": self.dc_cfg_dic["base_args"], "set_arg_dic": self.dc_cfg_dic["set_args"]}) else: tn_str = tcl_name.replace('__', '.') pcom.ren_tempfile( f"{dc_temp_dir}{os.sep}{tn_str}", f"{self.dir_dic['tcl_dir']}{os.sep}{tn_str}", {"dc_dic":self.dc_cfg_dic}) if self.dc_dic["run"]: dc_topo_str = ( f"cd {self.dir_dic['dc_time_dir']} && " f"dc_shell {'-topo' if self.dc_dic['topo'] else ''} " f"-f {self.dir_dic['tcl_dir']}{os.sep}dc.tcl " f"-output_log_file {self.dir_dic['dc_time_dir']}{os.sep}dc.log ") try: proc = subprocess.Popen(dc_topo_str, shell=True) while proc.poll() is None: time.sleep(180) dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log() dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log() except KeyboardInterrupt: dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log() proc.kill() if self.dc_dic["tm_flg"]: tm_rpt = os.path.join(self.dir_dic["reports_dir"], pcom.rd_cfg( self.dc_cfg_dic, "set_args", "DCRM_FINAL_TIMING_REPORT", True)) tm_log_parser.TmParser( {"dt_file": tm_rpt, "level": self.dc_dic["tm_level"]}).parse_tm_log() if self.dc_dic["formality"]: if not shutil.which("fm_shell"): raise Exception("fm is not loaded") LOG.info("updating the fm_dic and running formality") fm_cfg_dic = pcom.gen_cfg([f"{self.src_dir}{os.sep}fm.cfg"]) fm_cfg_dic["fm"]["imp_filelist"] = os.linesep.join( pcom.find_iter(self.dir_dic["results_dir"], "*.syn.v")) fm_cfg_dic["fm"]["set_svf"] = os.linesep.join( pcom.find_iter(self.dir_dic["results_dir"], "*.syn.svf")) fm_cfg_dic["fm"]["DESIGN_NAME"] = pcom.rd_cfg( self.dc_cfg_dic, "base_args", "DESIGN_NAME", True) fm_cfg_dic["fm"]["ref_filelist"] = pcom.rd_cfg( self.dc_cfg_dic, "set_args", "rtl_files", True) fm_cfg_dic["fm"]["fm_top_dir"] = self.dir_dic["dc_time_dir"] fm_cfg_dic["fm"]["fm_src_dir"] = self.src_dir fm_runner.proc_fm(self.ced, fm_cfg_dic)
def gen_h_lst(self): """to generate handle signal list""" for sv_file in pcom.find_iter(self.ced["MODULE_TB"], "*.sv"): with open(sv_file) as svf: for line in svf: line = line.strip() if line.startswith("$fwrite") and "handle0" in line and "%b" in line: self.h_dic["h0"].append(re.search(r",(\w+)\s*\)\s*;", line).group(1)) elif line.startswith("$fwrite") and "handle1" in line and "%b" in line: self.h_dic["h1"].append(re.search(r",(\w+)\s*\)\s*;", line).group(1))
def proc_leda(self): """to process and kick off leda flow""" if self.leda_dic["leda_gen_log"]: os.makedirs(self.df_dic["time_dir"], exist_ok=True) self.gen_flist_file() nwd_lst = [] rule_lst = [] for rule_file in pcom.find_iter(self.df_dic["rule_dir"], "*.tcl"): self.cov_file2rulelst(rule_file, nwd_lst, rule_lst) for waiver_file in pcom.find_iter(self.df_dic["waiver_dir"], "*.tcl"): self.cov_file2rulelst(waiver_file, nwd_lst, rule_lst) with open(self.df_dic["tcl"], "w") as ltf: ltf.write(os.linesep.join(rule_lst)) self.kick_off_leda() shutil.copyfile(self.df_dic["log"], f"{self.ced['FLOW_LEDA']}{os.sep}leda_latest.log") leda_error_flg = False str_pat = re.compile("|".join([ re.escape(cc) for cc in pcom.rd_cfg(self.leda_cfg, "leda", "error_string") ])) with open(self.df_dic["log"], errors="replace") as llf: with open(f"{self.df_dic['log_dir']}{os.sep}leda_error.log", "w") as elf: for line in llf: line = line.strip() if str_pat.search(line): elf.write(f"{line}{os.linesep}") leda_error_flg = True if leda_error_flg: LOG.warning("ERRORs raised in the process of leda") elif self.leda_dic["leda_gui"]: leda_dir = os.path.abspath( os.path.expandvars(self.leda_dic["leda_dir"])) if not os.path.isdir(leda_dir): raise Exception(f"leda GUI loaded dir {leda_dir} is NA") leda_str = (f"cd {leda_dir} && " f"leda +gui -config {leda_dir}{os.sep}leda.tcl " f"-l {leda_dir}{os.sep}leda_logs{os.sep}leda.log") subprocess.run(leda_str, shell=True)
def run_cov(args): """to run cov sub cmd""" if args.cov_module and (args.cov_merge or args.cov_rpt or args.cov_verdi): ced, _ = env_booter.EnvBooter().module_env(args.cov_module) ef_str = "" for elf in pcom.find_iter(ced["MODULE_CONFIG"], "*.el"): ef_str += f"-elfile {elf} " cov_merge_vdb = f"{ced['COV_MERGE']}{os.sep}{ced['MODULE']}_merge.vdb" cd_str = f"cd {os.path.dirname(cov_merge_vdb)} && " cov_str = "" if args.cov_merge: vdb_lst = list(pcom.find_iter(ced["COV_CM"], "*.vdb", True)) if not vdb_lst: raise Exception("no coverage data found") merge_lst_file = f"{ced['COV_MERGE']}{os.sep}merge_lst" os.makedirs(os.path.dirname(merge_lst_file), exist_ok=True) with open(merge_lst_file, "w") as mlf: for vdb_dir in vdb_lst: mlf.write(f"{vdb_dir}{os.linesep}") if os.path.isdir(cov_merge_vdb): mlf.write(cov_merge_vdb) cov_str = (f"{cd_str} urg -full64 -f {merge_lst_file} -dbname " f"{os.path.basename(cov_merge_vdb)} -noreport {ef_str}") subprocess.run(cov_str, shell=True, check=True) for cc_dir in pcom.find_iter(ced["COV_CM"], "testdata", True): shutil.rmtree(cc_dir, ignore_errors=True) if args.cov_rpt: if not os.path.isdir(cov_merge_vdb): raise Exception(f"coverage merge dir {cov_merge_vdb} is NA") cov_str = f"{cd_str} urg -full64 -dir {cov_merge_vdb} {ef_str} -format both" subprocess.run(cov_str, shell=True) if args.cov_verdi: if not os.path.isdir(cov_merge_vdb): raise Exception(f"coverage merge dir {cov_merge_vdb} is NA") cov_str = f"{cd_str} verdi -cov -covdir {cov_merge_vdb} {ef_str} &" subprocess.run(cov_str, shell=True) LOG.info("running coverage of %s module done", args.cov_module) else: raise Exception("missing main arguments")
def __init__(self, ced, cfg_dic, mkg_dic): self.ced = ced self.cfg_dic = cfg_dic self.mkg_dic = mkg_dic c_src = f"{ced['MODULE_C']}{os.sep}src" self.mkg_dic["clib_flg"] = True if os.path.isdir(ced["MODULE_C"]) and ( list(pcom.find_iter(ced["MODULE_C"], "*.c")) or list(pcom.find_iter(ced["MODULE_C"], "*.cpp")) or list(pcom.find_iter(ced["MODULE_C"], "*.cc")) or list(pcom.find_iter(ced["MODULE_C"], "*.cxx"))) else False self.mkg_dic["csrc_flg"] = True if os.path.isdir(c_src) and ( list(pcom.find_iter(c_src, "*.c")) or list(pcom.find_iter(c_src, "*.cpp")) or list(pcom.find_iter(c_src, "*.cc")) or list(pcom.find_iter(c_src, "*.cxx"))) else False self.simv_lst = ["DEFAULT"]
def proc_uvm_gen(self): """to generate module uvm env""" m_dir = self.module_dir if self.module_dir else self.ced["PROJ_VERIF"] module_path = os.path.expandvars(f"{m_dir}{os.sep}{self.module}") if os.path.isdir(module_path): raise Exception( f"module path you typed {module_path} has already existed") self.gen_data_struc() pj_gen_dir = f"{self.ced['SHARE_TEMPLATES']}{os.sep}pj_gen" if not os.path.isdir(pj_gen_dir): raise Exception(f"pj_gen dir {pj_gen_dir} is NA") for sub_dir in pcom.find_iter(pj_gen_dir, "*", dir_flg=True): dest_dir = sub_dir.replace(pj_gen_dir, module_path) os.makedirs(dest_dir, exist_ok=True) LOG.info(f"create a new {dest_dir} directory.") for temp_file in pcom.find_iter(pj_gen_dir, "*"): t_fn = os.path.basename(temp_file) if t_fn in self.filter_lst: continue LOG.info(f"template file is {t_fn}") tf_str = temp_file.replace(pj_gen_dir, module_path) blk_n = self.module if t_fn.startswith("_") else "" if t_fn in pcom.rd_cfg(self.cfg_dic["proj"], "gen_agt", "multiple"): mul_dic = copy.deepcopy(self.base_dic) del mul_dic["agt_name_lst"] for agt_name in self.base_dic["agt_name_lst"]: mul_dic["agt_name"] = agt_name pcom.ren_tempfile( temp_file, tf_str.replace(t_fn, f"{blk_n}_{agt_name}{t_fn}"), mul_dic) else: pcom.ren_tempfile(temp_file, tf_str.replace(t_fn, f"{blk_n}{t_fn}"), self.base_dic) LOG.info(f"module {self.module} uvm env generated")
def find_module_dir(ced, cfg_dic, module): """to find verification module dir according to their subdir config""" for module_dir in pcom.find_iter(ced["PROJ_VERIF"], module, True): if os.path.isdir(f"{module_dir}{os.sep}config"): return module_dir tree_ignore_str = "|".join( pcom.rd_cfg(cfg_dic["proj"], "proj", "tree_ignore")) run_str = f"tree -d -I '(|{tree_ignore_str}|)' {ced['PROJ_VERIF']}" tree_str = subprocess.run(run_str, shell=True, check=True, stdout=subprocess.PIPE).stdout.decode() raise Exception( f"module {module} is NA; the possible module is {os.linesep}{tree_str}" )
def run_ac(args): """to run ac sub cmd""" if not shutil.which("emacs"): raise Exception("emacs is not loaded") if args.ac_dir: ac_dir = os.path.abspath(os.path.expandvars(args.ac_dir)) if not os.path.isdir(ac_dir): raise Exception(f"auto connection directory {ac_dir} is NA") for ac_file in pcom.find_iter(ac_dir, "*.ac.v"): ac_ext0, ac_ext1 = os.path.splitext(ac_file) ac_tar_file = f"{os.path.splitext(ac_ext0)[0]}{ac_ext1}" shutil.copyfile(ac_file, ac_tar_file) subprocess.run( f"emacs --batch {ac_tar_file} -f verilog-auto -f save-buffer", shell=True) LOG.info("emacs auto connection generated file %s done", ac_tar_file) else: raise Exception("missing main arguments")
def clean_module(args, module_name): """to clean verification module level dirs""" ced, _ = env_booter.EnvBooter().module_env(module_name) clean_lst = [] if args.clean_case_lst: clean_lst.extend([ f"{ced['MODULE_OUTPUT']}{os.sep}{cc}" for cc in args.clean_case_lst if cc ]) if args.clean_case: for case_dir in pcom.find_iter(ced["MODULE_OUTPUT"], "*", True, True): base_case_dir = os.path.basename(case_dir) if not (base_case_dir.startswith("__") and base_case_dir.endswith("__")): clean_lst.append(case_dir) if args.clean_cov: clean_lst.append(ced["OUTPUT_COV"]) if args.clean_output: clean_lst.append(ced["MODULE_OUTPUT"]) if args.clean_tb: LOG.info( "CAUTION!!! you are reverting %s directory, in which all modified/private " "files/directories will be removed!!!", ced["MODULE_TB"]) svn_tb_ver = pcom.gen_svn_ver(ced["MODULE_TB"]) clean_lst.append(ced["MODULE_TB"]) if args.clean_config: LOG.info( "CAUTION!!! you are reverting %s directory, in which all modified/private " "files/directories will be removed!!!", ced["MODULE_CONFIG"]) svn_cfg_ver = pcom.gen_svn_ver(ced["MODULE_CONFIG"]) clean_lst.append(ced["MODULE_CONFIG"]) rmtree(clean_lst) if args.clean_tb: subprocess.run(f"svn up {ced['MODULE_TB']} -{svn_tb_ver}", shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if args.clean_config: subprocess.run(f"svn up {ced['MODULE_CONFIG']} -{svn_cfg_ver}", shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def module_env(self, sim_module): """to boot verification module level environments used only by pj""" self.boot_env() self.ced["MODULE"] = os.environ["MODULE"] = sim_module self.ced["PROJ_MODULE"] = os.environ["PROJ_MODULE"] = find_module_dir( self.ced, self.cfg_dic, sim_module) for env_key, env_value in (self.cfg_dic["proj"]["module_env"] if "module_env" in self.cfg_dic["proj"] else {}).items(): os.environ[env_key] = os.path.expandvars(env_value) self.ced[env_key] = os.path.expandvars(env_value) c_cfg = f"{self.ced['MODULE_CONFIG']}{os.sep}c.cfg" if not os.path.isfile(c_cfg): c_cfg = "" self.cfg_dic["c"] = pcom.gen_cfg([c_cfg]) simv_cfg = f"{self.ced['MODULE_CONFIG']}{os.sep}simv.cfg" if not os.path.isfile(simv_cfg): raise Exception(f"simv config file {simv_cfg} is NA") self.cfg_dic["simv"] = pcom.gen_cfg([simv_cfg]) case_cfg = f"{self.ced['MODULE_CONFIG']}{os.sep}case.cfg" if not os.path.isfile(case_cfg): raise Exception(f"case config file {case_cfg} is NA") case_cfg_lst = [case_cfg] for cfg_file in pcom.find_iter(self.ced["MODULE_CONFIG"], "case_*.cfg"): LOG.info("more case config file %s", cfg_file) case_cfg_lst.append(cfg_file) case_cfg_lst.reverse() self.cfg_dic["case"] = pcom.gen_cfg(case_cfg_lst) c_module_env = copy.copy(self.cfg_dic["proj"]["env_c"] if "env_c" in self.cfg_dic["proj"] else {}) simv_module_env = copy.copy(self.cfg_dic["proj"]["env_simv"] if "env_simv" in self.cfg_dic["proj"] else {}) case_module_env = copy.copy(self.cfg_dic["proj"]["env_case"] if "env_case" in self.cfg_dic["proj"] else {}) c_module_env.update(self.cfg_dic["c"]["DEFAULT"]) simv_module_env.update(self.cfg_dic["simv"]["DEFAULT"]) case_module_env.update(self.cfg_dic["case"]["DEFAULT"]) self.cfg_dic["c"]["DEFAULT"] = c_module_env self.cfg_dic["simv"]["DEFAULT"] = simv_module_env self.cfg_dic["case"]["DEFAULT"] = case_module_env return self.ced, self.cfg_dic
def test_find_iter(self): """test case""" test_dir_tup = (f"{self.base_dir}{os.sep}test1", f"{self.base_dir}{os.sep}test2", f"{self.base_dir}{os.sep}test3") test_tup = (f"{self.base_dir}{os.sep}test.log", f"{self.base_dir}{os.sep}test.txt", f"{self.base_dir}{os.sep}test.cfg") test1_tup = (f"{test_dir_tup[0]}{os.sep}test1.log", f"{test_dir_tup[0]}{os.sep}test1.txt", f"{test_dir_tup[0]}{os.sep}test1.cfg") test2_tup = (f"{test_dir_tup[1]}{os.sep}test2.log", f"{test_dir_tup[1]}{os.sep}test2.txt", f"{test_dir_tup[1]}{os.sep}test2.cfg") test3_tup = (f"{test_dir_tup[2]}{os.sep}test3.log", f"{test_dir_tup[2]}{os.sep}test3.txt", f"{test_dir_tup[2]}{os.sep}test3.cfg") test_test_dir_tup = (f"{cc}{os.sep}test" for cc in test_dir_tup) for test_dir in test_dir_tup + test_test_dir_tup: os.makedirs(test_dir) for test_file in test_tup + test1_tup + test2_tup + test3_tup: open(test_file, "w").close() self.assertEqual( set(pcom.find_iter(self.base_dir, "*.log")), {test_tup[0], test1_tup[0], test2_tup[0], test3_tup[0]}) self.assertEqual(set(pcom.find_iter(self.base_dir, "*test1*")), set(test1_tup)) self.assertEqual(set(pcom.find_iter(self.base_dir, "*.log", True)), set()) self.assertEqual(set(pcom.find_iter(self.base_dir, "*test*", True)), set(test_dir_tup + test_test_dir_tup)) self.assertEqual( set(pcom.find_iter(self.base_dir, "*.log", cur_flg=True)), {test_tup[0]}) self.assertEqual( set(pcom.find_iter(test_dir_tup[0], "*.log", cur_flg=True)), {test1_tup[0]}) self.assertEqual( set(pcom.find_iter(self.base_dir, "*test*", True, True)), set(test_dir_tup))
def gen_simv_dic(self, v_n): """to generate simv related dic to render jinja2""" self.chk_simv_cfg(v_n) simv_dic = {"name": v_n} simv_dic["wave_format"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "wave_format", True, "fsdb") simv_dic["cda_opts"] = self.gen_cda_opts(v_n) simv_dic["cta_opts"] = self.gen_cta_opts(v_n) simv_dic["ce_opts"] = self.gen_ce_opts(v_n, simv_dic) simv_dic["upf_flg"] = True if pcom.rd_cfg(self.cfg_dic["simv"], v_n, "upf") == ["on"] else False smf_lst = self.gen_smf_lst(v_n) dut_flist_name = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "dut_flist", True, "rtl.flist") dut_flist_lst = [ f"{self.ced['MODULE_FLIST']}{os.sep}{dut_flist_name}" ] + smf_lst df_tup = filelst_gen.FilelstGen().gen_file_lst(dut_flist_lst) simv_dic["dut_dir_lst"], simv_dic["dut_file_lst"], simv_dic[ "vhdl_file_lst"] = df_tup self.chk_simv_flist(v_n, df_tup, False) tb_flist_name = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "tb_flist", True, "tb.flist") tb_flist_lst = [f"{self.ced['MODULE_FLIST']}{os.sep}{tb_flist_name}"] tf_tup = filelst_gen.FilelstGen().gen_file_lst(tb_flist_lst) simv_dic["tb_dir_lst"], simv_dic["tb_file_lst"], _ = tf_tup self.chk_simv_flist(v_n, tf_tup, True) simv_dic["tb_dep_file_lst"] = [ ddf for did in simv_dic["dut_dir_lst"] for ddf in pcom.find_iter( did.replace("+incdir+", ""), "*", cur_flg=True, i_str="\\") ] + [ tdf for tid in simv_dic["tb_dir_lst"] for tdf in pcom.find_iter( tid.replace("+incdir+", ""), "*", cur_flg=True, i_str="\\") ] simv_dic["vhdl_tool"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "vhdl_tool", True, "vhdlan") simv_dic["vhdl_da_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["simv"], v_n, f"vt_{simv_dic['vhdl_tool']}_dut_ana_opts")) simv_dic["vhdl_ta_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["simv"], v_n, f"vt_{simv_dic['vhdl_tool']}_tb_ana_opts")) simv_dic["ana_tool"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "ana_tool", True, "vlogan") simv_dic["da_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["simv"], v_n, f"at_{simv_dic['ana_tool']}_dut_ana_opts")) simv_dic["ta_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["simv"], v_n, f"at_{simv_dic['ana_tool']}_tb_ana_opts")) simv_dic["elab_tool"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "elab_tool", True, "vcs") simv_dic["e_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["simv"], v_n, f"et_{simv_dic['elab_tool']}_elab_opts")) simv_dic["w_opts"] = " ".join( pcom.rd_cfg(self.cfg_dic["simv"], v_n, "verdi_opts")) simv_dic["tb_top"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "tb_top", True, "test_top") simv_dic["power_top"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "power_top", True, "chip_top") simv_dic["pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "pre_cmd") simv_dic["post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "post_cmd") simv_dic["file_dic"] = {} for opt_name, opt_cont in self.cfg_dic["simv"][v_n].items(): if not opt_name.startswith("file__"): continue simv_dic["file_dic"][opt_name[6:]] = opt_cont.replace( "$", "$$").replace("\\", "").split(os.linesep) return simv_dic
def proc_reg(self, reg_module_lst): """reg process main""" proc_dic = {} proc_dic["reg_doc_dir"] = os.path.abspath( os.path.expandvars( pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "doc", True))) proc_dic["workbook_hw"] = xlsxwriter.Workbook( f"{proc_dic['reg_doc_dir']}{os.sep}YJD_register.xlsx") proc_dic["workbook_sw"] = xlsxwriter.Workbook( f"{proc_dic['reg_doc_dir']}{os.sep}CPU_register.xlsx") proc_dic["sheet_sw"] = proc_dic["workbook_sw"].add_worksheet( "CPU1_regsiter") proc_dic["format_sw"] = proc_dic["workbook_sw"].add_format( {"font_size": "15"}) proc_dic["reg_rtl_dir"] = os.path.abspath( os.path.expandvars( pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "rtl", True))) proc_dic["reg_cfg_dir"] = f"{self.ced['SHARE_CONFIG']}{os.sep}pj_reg" proc_dic[ "reg_temp_dir"] = f"{self.ced['SHARE_TEMPLATES']}{os.sep}pj_reg" proc_dic["reg_cfg_iter"] = [ f"{proc_dic['reg_cfg_dir']}{os.sep}{cc}.json" for cc in reg_module_lst ] if reg_module_lst else pcom.find_iter(proc_dic["reg_cfg_dir"], "*.json") proc_dic["reg_ralf_dir"] = os.path.abspath( os.path.expandvars( pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "ralf", True))) proc_dic["ralf_dic"] = {"blk_bytes": 0} proc_dic["public_reg_dic"] = {} for reg_cfg_json in proc_dic["reg_cfg_iter"]: if not os.path.isfile(reg_cfg_json): raise Exception(f"reg cfg file {reg_cfg_json} is NA") LOG.info("processing reg config file %s", reg_cfg_json) sin_dic = {} sin_dic["json_dir"], sin_dic["json_name"] = os.path.split( reg_cfg_json) sin_dic["module_cname"], _ = os.path.splitext(sin_dic["json_name"]) sin_dic["module_name"] = sin_dic["module_cname"].lower() sin_dic["rtl_dir"] = sin_dic["json_dir"].replace( proc_dic["reg_cfg_dir"], proc_dic["reg_rtl_dir"]) os.makedirs(sin_dic["rtl_dir"], exist_ok=True) sin_dic["sheet_hw"] = proc_dic["workbook_hw"].add_worksheet( sin_dic["module_cname"]) sin_dic["format_hw"] = proc_dic["workbook_hw"].add_format( {"font_size": "15"}) with open(reg_cfg_json, encoding="gb18030") as file: data = json.load(file) data_dic = self.expand_data(data) rtl_file = f"{sin_dic['rtl_dir']}{os.sep}{sin_dic['module_name']}.v" LOG.info(f"generating reg rtl file {rtl_file}") pcom.ren_tempfile( f"{proc_dic['reg_temp_dir']}{os.sep}reg_base.v", rtl_file, { "module_name": sin_dic["module_name"], "data": self.fmt_v_data(data_dic) }) self.gen_xls(data_dic, sin_dic["sheet_hw"], sin_dic["format_hw"]) self.fmt_sw_data(data_dic) ralf_data_dic = self.fmt_ralf_data(data_dic) proc_dic["ralf_dic"][sin_dic["module_name"]] = ralf_data_dic proc_dic["ralf_dic"]["blk_bytes"] += ralf_data_dic["blk_bytes"] self.check_public_register(proc_dic["public_reg_dic"], proc_dic["ralf_dic"], ralf_data_dic) os.makedirs(proc_dic["reg_ralf_dir"], exist_ok=True) ralf_file = f"{proc_dic['reg_ralf_dir']}{os.sep}reg.ralf" LOG.info(f"generating reg ralf file {ralf_file}") pcom.ren_tempfile( f"{proc_dic['reg_temp_dir']}{os.sep}reg_base.ralf", ralf_file, { "public_data": proc_dic["public_reg_dic"], "data": proc_dic["ralf_dic"] }) sw_data_dic = OrderedDict(self.sw_rtl_dic["NA"]) self.sw_rtl_dic["MSR"] = OrderedDict( sorted(self.sw_rtl_dic["MSR"].items(), key=lambda reg: int( f'0x{reg[1]["MSR_address"].split("h")[1]}', 16))) sw_data_dic.update(self.sw_rtl_dic["MSR"]) self.gen_xls(sw_data_dic, proc_dic["sheet_sw"], proc_dic["format_sw"]) LOG.info(f"generating reg doc file in {proc_dic['reg_doc_dir']}") os.makedirs(proc_dic["reg_doc_dir"], exist_ok=True) proc_dic["workbook_sw"].close() proc_dic["workbook_hw"].close()