Ejemplo n.º 1
0
 def proc_home_sheet(self, w_s):
     """to process generated vplan home sheet"""
     w_s.title = "home"
     home_row_lst = pcom.rd_cfg(self.cfg_dic["proj"], "vplan_sheets", "home")
     home_row_lst.insert(5, "")
     for index, row in enumerate(home_row_lst):
         if index == 5:
             continue
         cell = w_s[f"a{index+1}"]
         cell.value = row
         if self.gen_flg:
             cell.style = "Accent1"
         cell.alignment = Alignment(wrap_text=True)
         next_cell = w_s[f"b{index+1}"]
         if row == "Project":
             next_cell.value = self.ced["PROJ_NAME"]
         elif row == "Module Name":
             next_cell.value = self.ced["MODULE"]
         elif row == "Case Passing Rate":
             d_v = self.v_dic["tpn"]/self.v_dic["ttn"] if self.v_dic["ttn"] else 0
             cpr = str(round(100*d_v, 2))
             next_cell.value = f"{cpr} % ({self.v_dic['tpn']}/{self.v_dic['ttn']})"
             next_cell.fill = PatternFill(
                 fill_type="gray125", end_color=self.gen_per_color(cpr))
         elif row == "Code Coverage Score":
             next_cell.value = f"{self.v_dic['ccs']} %"
             next_cell.fill = PatternFill(
                 fill_type="gray125", end_color=self.gen_per_color(self.v_dic["ccs"]))
         elif row == "Function Coverage Per":
             next_cell.value = f"{self.v_dic['fcp']} %"
             next_cell.fill = PatternFill(
                 fill_type="gray125", end_color=self.gen_per_color(self.v_dic["fcp"]))
     w_s.column_dimensions["a"].width = pcom.rd_cfg(
         self.cfg_dic["proj"], "vplan_column_width", w_s.title, True)
     w_s.column_dimensions["b"].width = w_s.column_dimensions["a"].width
Ejemplo n.º 2
0
 def parse_icc_log(self, log_file):
     """to parse icc log file"""
     if not os.path.isfile(log_file):
         raise Exception(f"{log_file} is not existed")
     err_lst = []
     warn_lst = []
     f_pat = re.compile("|".join(
         [re.escape(cc) for cc in pcom.rd_cfg(self.icc_cfg_dic, "icc_string", "fail_string")]))
     w_pat = re.compile("|".join(
         [re.escape(cc) for cc in pcom.rd_cfg(self.icc_cfg_dic, "icc_string", "warn_string")]))
     i_pat = re.compile(
         (r"^$|"+"|".join([re.escape(cc) for cc in pcom.rd_cfg(
             self.icc_cfg_dic, "icc_string", "ignore_string")])).strip("|"))
     with open(log_file) as lgf:
         for line in lgf:
             line = line.strip()
             if i_pat.search(line):
                 continue
             if f_pat.search(line):
                 err_lst.append(line)
             elif w_pat.search(line):
                 warn_lst.append(line)
     if warn_lst:
         with open(self.dir_f_dic["warn_rpt"], "a") as warnf:
             warnf.write(f"{os.linesep*3}{log_file}{os.linesep}")
             for warn in warn_lst:
                 warnf.write(f"{warn}{os.linesep}")
     with open(self.dir_f_dic["err_rpt"], "w") as errf:
         if err_lst:
             errf.write(f"{log_file}{os.linesep}")
             for err in err_lst:
                 errf.write(f"{err}{os.linesep}")
             raise Exception(f"Errors generated")
         else:
             errf.write(f"Errors not found")
Ejemplo n.º 3
0
 def proc_dc(self):
     """to process and kick off dc flow"""
     if self.dc_dic["gen_tcl"]:
         self.gen_dir_dic()
         LOG.info("generating the tcl&sdc files from templates")
         dc_temp_dir = f"{self.src_dir}{os.sep}dc_template"
         for tcl_name in self.dc_cfg_dic.sections():
             if tcl_name == "base_args" or tcl_name == "set_args":
                 pcom.ren_tempfile(
                     f"{dc_temp_dir}{os.sep}set_args.tcl",
                     f"{self.dir_dic['tcl_dir']}{os.sep}set_args.tcl",
                     {"base_arg_dic": self.dc_cfg_dic["base_args"],
                      "set_arg_dic": self.dc_cfg_dic["set_args"]})
             else:
                 tn_str = tcl_name.replace('__', '.')
                 pcom.ren_tempfile(
                     f"{dc_temp_dir}{os.sep}{tn_str}",
                     f"{self.dir_dic['tcl_dir']}{os.sep}{tn_str}",
                     {"dc_dic":self.dc_cfg_dic})
     if self.dc_dic["run"]:
         dc_topo_str = (
             f"cd {self.dir_dic['dc_time_dir']} && "
             f"dc_shell {'-topo' if self.dc_dic['topo'] else ''} "
             f"-f {self.dir_dic['tcl_dir']}{os.sep}dc.tcl "
             f"-output_log_file {self.dir_dic['dc_time_dir']}{os.sep}dc.log ")
         try:
             proc = subprocess.Popen(dc_topo_str, shell=True)
             while proc.poll() is None:
                 time.sleep(180)
                 dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log()
             dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log()
         except KeyboardInterrupt:
             dc_log_parser.DcLogParser(self.ced, self.dc_cfg_dic).parse_log()
             proc.kill()
         if self.dc_dic["tm_flg"]:
             tm_rpt = os.path.join(self.dir_dic["reports_dir"], pcom.rd_cfg(
                 self.dc_cfg_dic, "set_args", "DCRM_FINAL_TIMING_REPORT", True))
             tm_log_parser.TmParser(
                 {"dt_file": tm_rpt, "level": self.dc_dic["tm_level"]}).parse_tm_log()
         if self.dc_dic["formality"]:
             if not shutil.which("fm_shell"):
                 raise Exception("fm is not loaded")
             LOG.info("updating the fm_dic and running formality")
             fm_cfg_dic = pcom.gen_cfg([f"{self.src_dir}{os.sep}fm.cfg"])
             fm_cfg_dic["fm"]["imp_filelist"] = os.linesep.join(
                 pcom.find_iter(self.dir_dic["results_dir"], "*.syn.v"))
             fm_cfg_dic["fm"]["set_svf"] = os.linesep.join(
                 pcom.find_iter(self.dir_dic["results_dir"], "*.syn.svf"))
             fm_cfg_dic["fm"]["DESIGN_NAME"] = pcom.rd_cfg(
                 self.dc_cfg_dic, "base_args", "DESIGN_NAME", True)
             fm_cfg_dic["fm"]["ref_filelist"] = pcom.rd_cfg(
                 self.dc_cfg_dic, "set_args", "rtl_files", True)
             fm_cfg_dic["fm"]["fm_top_dir"] = self.dir_dic["dc_time_dir"]
             fm_cfg_dic["fm"]["fm_src_dir"] = self.src_dir
             fm_runner.proc_fm(self.ced, fm_cfg_dic)
Ejemplo n.º 4
0
 def gen_makefile(self):
     """to generate top makefile"""
     mk_dic = {"CED": self.ced}
     mk_dic["clib_flg"] = self.mkg_dic["clib_flg"]
     mk_dic["csrc_flg"] = self.mkg_dic["csrc_flg"]
     self.cfg_dic["c"]["DEFAULT"].update(self.mkg_dic["ow_dic"]["pre"])
     self.chk_c_cfg()
     mk_dic["base_comp_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "base_comp_opts"))
     mk_dic["lib_comp_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "lib_comp_opts"))
     mk_dic["src_comp_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "src_comp_opts"))
     mk_dic["src_run_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT", "src_run_opts"))
     mk_dic["lib_pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT",
                                             "lib_pre_cmd")
     mk_dic["lib_post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT",
                                              "lib_post_cmd")
     mk_dic["src_pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT",
                                             "src_pre_cmd")
     mk_dic["src_post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["c"], "DEFAULT",
                                              "src_post_cmd")
     mk_dir = self.ced["MODULE_OUTPUT"]
     mk_file = "Makefile"
     os.makedirs(mk_dir, exist_ok=True)
     LOG.info("output dir %s is generated", mk_dir)
     pcom.ren_tempfile(f"{self.ced['PJ_TEMPLATES']}{os.sep}{mk_file}",
                       f"{mk_dir}{os.sep}{mk_file}", mk_dic)
     return mk_dir, mk_file
Ejemplo n.º 5
0
def proc_fm(ced, fm_cfg_dic):
    """to process and kick off fm flow"""
    ref_name = (fm_cfg_dic["fm"]["REF_NAME"] if fm_cfg_dic["fm"]["REF_NAME"]
                else fm_cfg_dic["fm"]["DESIGN_NAME"])
    imp_name = (fm_cfg_dic["fm"]["IMP_NAME"] if fm_cfg_dic["fm"]["IMP_NAME"]
                else fm_cfg_dic["fm"]["DESIGN_NAME"])
    fm_cfg_dic["fm"]["fm_time_dir"] = fm_time_dir = (
        f"{fm_cfg_dic['fm']['fm_top_dir']}{os.sep}fm_"
        f"{os.path.basename(fm_cfg_dic['fm']['fm_src_dir'])}_{imp_name}_{ref_name}_"
        f"{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}")
    os.makedirs(fm_time_dir, exist_ok=True)
    LOG.info("generate fm.tcl file")
    pcom.ren_tempfile(
        f"{fm_cfg_dic['fm']['fm_src_dir']}{os.sep}fm_template{os.sep}fm.tcl",
        f"{fm_time_dir}{os.sep}fm.tcl", fm_cfg_dic["fm"])
    fm_str = (f"cd {fm_time_dir} && fm_shell -f {fm_time_dir}{os.sep}fm.tcl "
              f"| tee -i {fm_time_dir}{os.sep}fm.log")
    subprocess.run(fm_str, shell=True)
    parse_rlt_dic = {
        "design_name":
        pcom.rd_cfg(fm_cfg_dic, "fm", "DESIGN_NAME", True),
        "proj":
        ced["PROJ_NAME"],
        "user":
        ced["USER_NAME"],
        "run_time":
        dt.datetime.timestamp(ced["TIME"]),
        "status":
        "passed"
        if os.path.isfile(f"{fm_time_dir}{os.sep}passed") else "failed"
    }
    query_url = f"{pcom.BE_URL}/pj_app/fm/db_query/query_insert_case/"
    if pcom.BACKEND:
        requests.post(query_url, json=parse_rlt_dic)
Ejemplo n.º 6
0
 def parse_dc_pw_rpt(self):
     """to parse dc power log"""
     dc_pw_file = os.path.join(
         self.dc_dic["rpt_dir"],
         pcom.rd_cfg(self.dc_cfg_dic, "set_args", POWER, True))
     if not os.path.isfile(dc_pw_file):
         return
     with open(dc_pw_file) as dpf:
         for line in dpf:
             line = line.strip()
             if line.startswith("Operating Conditions:"):
                 self.dc_dic["pw_rpt"]["lib"] = line.split(
                     "Library:")[1].strip()
             if line.startswith("Total  "):
                 line_lst = re.split(r"\s+(?=\d|NA)", line)
                 if len(line_lst) == 5:
                     self.dc_dic["pw_rpt"]["internal_pw"] = line_lst[1]
                     self.dc_dic["pw_rpt"]["swithing_pw"] = line_lst[2]
                     self.dc_dic["pw_rpt"]["leakage_pw"] = line_lst[3]
                     self.dc_dic["pw_rpt"]["total_pw"] = line_lst[4]
                 else:
                     self.dc_dic["pw_rpt"]["internal_pw"] = "NA"
                     self.dc_dic["pw_rpt"]["swithing_pw"] = "NA"
                     self.dc_dic["pw_rpt"]["leakage_pw"] = "NA"
                     self.dc_dic["pw_rpt"]["total_pw"] = "NA"
     self.dc_dic["pw_rpt"]["log_path"] = dc_pw_file
Ejemplo n.º 7
0
 def gen_sem_key(self):
     """to generate sem_key"""
     sk_dic = {
         "range_str": "",
         "m_cont": "",
         "s_cont": "",
         "line_lst": [],
         "key_lst": []
     }
     sk_dic["range_str"] = pcom.rd_cfg(self.cfg_dic["proj"], "proj",
                                       "sem_key_range", True)
     sk_dic["m_cont"] = subprocess.run(
         "ipcs -m", shell=True, check=True,
         stdout=subprocess.PIPE).stdout.decode()
     sk_dic["s_cont"] = subprocess.run(
         "ipcs -s", shell=True, check=True,
         stdout=subprocess.PIPE).stdout.decode()
     sk_dic["line_lst"].extend(re.split(r"\n", sk_dic["m_cont"]))
     sk_dic["line_lst"].extend(re.split(r"\s+", sk_dic["s_cont"]))
     for line in sk_dic["line_lst"]:
         line = line.strip()
         if line.startswith("0x"):
             sk_dic["key_lst"].append(
                 int(re.split(r"\s+", line)[0].strip(), 16))
     while True:
         rand_v = random.randrange(1, 2147483648 - int(sk_dic["range_str"]))
         for key in range(rand_v, rand_v + int(sk_dic["range_str"])):
             if key in sk_dic["key_lst"]:
                 break
         else:
             return rand_v
Ejemplo n.º 8
0
 def parse_dc_qor_rpt(self):
     """to parse dc qor log"""
     dc_qor_file = os.path.join(
         self.dc_dic["rpt_dir"],
         pcom.rd_cfg(self.dc_cfg_dic, "set_args", QOR, True))
     if not os.path.isfile(dc_qor_file):
         return
     with open(dc_qor_file) as dqf:
         lines_str = dqf.read()
         for blk_str in lines_str.split("\n\n"):
             if "-----" not in blk_str or "in2" in blk_str or "2out" in blk_str:
                 continue
             items_lst = []
             for l_str in blk_str.split("\n"):
                 if "-----" in l_str or not l_str:
                     continue
                 if ":" in l_str:
                     str_lst = l_str.split(":")
                     items_lst.append(
                         {str_lst[0].strip(): float(str_lst[1])})
                 else:
                     mop = pcom.REOpter(l_str.strip())
                     items_lst.append(
                         mop.group(2) if mop.match(self.pat_dic["tpg_pat"]
                                                   ) else l_str.strip())
             if items_lst:
                 for item_dic in items_lst[1:]:
                     for key, value in item_dic.items():
                         self.dc_dic["qor_rpt"][items_lst[0]][key] = value
     self.dc_dic["qor_rpt"]["log_path"] = dc_qor_file
Ejemplo n.º 9
0
 def parse_dc_tm_rpt(self):
     """to parse dc timing log"""
     dc_tm_file = os.path.join(
         self.dc_dic["rpt_dir"],
         pcom.rd_cfg(self.dc_cfg_dic, "set_args", TIMING, True))
     if not os.path.isfile(dc_tm_file):
         return
     with open(dc_tm_file) as dtf:
         targ_flag = False
         for line in dtf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if line.startswith("Startpoint:"):
                 targ_flag = True
                 targ_dic = {}
             if targ_flag:
                 line_lst = line.split(":")
                 if line_lst[0] in ("Startpoint", "Endpoint", "Path Group"):
                     targ_dic[line_lst[0]] = line_lst[1].strip()
             if line.startswith("slack "):
                 targ_flag = False
                 if mop.match(self.pat_dic["slk_pat"]):
                     if float(mop.group(2)) < 0:
                         targ_dic[mop.group(1)] = float(mop.group(2))
                         self.dc_dic["tm_rpt"]["timing"].append(targ_dic)
                 else:
                     LOG.info("%s condition not matched in line %s",
                              mop.group(1), line)
     self.dc_dic["tm_rpt"]["log_path"] = dc_tm_file
Ejemplo n.º 10
0
 def proc_vplan_row1(self, w_s):
     """to process vplan row1 style per sheet except home"""
     for cell, width in zip(w_s[1], pcom.rd_cfg(
             self.cfg_dic["proj"], "vplan_column_width", w_s.title)):
         if self.gen_flg:
             cell.style = "Accent1"
         cell.alignment = Alignment(wrap_text=True)
         w_s.column_dimensions[cell.column].width = width
Ejemplo n.º 11
0
def proc_cdc(cdc_src):
    """to process cdc flow"""
    if not shutil.which("vlog"):
        raise Exception("qsim is not loaded")
    if not shutil.which("qverify"):
        raise Exception("formal is not loaded")
    ced, _ = env_booter.EnvBooter().boot_env()
    cdc_src_dir = (os.path.abspath(os.path.expandvars(cdc_src))
                   if cdc_src else f"{ced['FLOW_CDC']}{os.sep}src")
    cdc_cfg_dic = pcom.gen_cfg([f"{cdc_src_dir}{os.sep}cdc.cfg"])
    wv_cfg_dic = pcom.gen_cfg([f"{cdc_src_dir}{os.sep}waiver.cfg"])
    cdc_flist = pcom.rd_cfg(cdc_cfg_dic, "cdc", "design_flist")
    top = pcom.rd_cfg(cdc_cfg_dic, "cdc", "design_top")
    if not cdc_flist or not top:
        raise Exception(f"no cdc filelist or top name found in cfg")
    cdc_time_dir = (
        f"{ced['FLOW_CDC']}{os.sep}cdc_{os.path.basename(cdc_src_dir)}_"
        f"{top[0]}_{ced['TIME'].strftime('%Y_%m_%d_%H_%M_%S')}")
    os.makedirs(cdc_time_dir, exist_ok=True)
    pcom.ren_tempfile(f"{cdc_src_dir}{os.sep}template{os.sep}cdc.tcl",
                      f"{cdc_time_dir}{os.sep}cdc.tcl", cdc_cfg_dic["cdc"])
    compiler_err_pat = re.compile("|".join(
        [re.escape(cc) for cc in pcom.rd_cfg(cdc_cfg_dic, "cdc", "err_str")]))
    run_compiler(cdc_time_dir, cdc_flist, compiler_err_pat)
    if not os.path.isfile(f"{cdc_time_dir}{os.sep}failed"):
        cdc_str = (f"cd {cdc_time_dir} && "
                   f"qverify -c -do {cdc_time_dir}{os.sep}cdc.tcl "
                   f"-od {cdc_time_dir}{os.sep}Output_Results")
        subprocess.run(cdc_str, shell=True)
    dtl_rpt = f"{cdc_time_dir}{os.sep}Output_Results{os.sep}cdc_detail.rpt"
    if not os.path.isfile(dtl_rpt):
        raise Exception(f"report file {dtl_rpt} is not existed")
    result_lst = gen_post_wv(dtl_rpt, wv_cfg_dic)
    if result_lst:
        with open(f"{cdc_time_dir}{os.sep}post_waiver.rpt", "w") as pwr:
            for wv_rule in result_lst:
                pwr.write(wv_rule + os.linesep)
        LOG.warning(
            "post waived violations exists, please check post_waiver.rpt file")
Ejemplo n.º 12
0
def gen_post_wv(wv_dtl_rpt, cfg_dic):
    """to generate the post_waiver_rpt"""
    type_lst = []
    sp_ep_lst = []
    for sect in cfg_dic.sections():
        type_lst.extend(pcom.rd_cfg(cfg_dic, sect, "type"))
        sp_ep_lst.extend(pcom.rd_cfg(cfg_dic, sect, "sp|ep"))
    with open(wv_dtl_rpt) as drf:
        mop = pcom.REOpter(drf.read())
    str_pat = re.compile(
        rf"Violations{os.linesep}=+{os.linesep}(.*?){os.linesep}{{3,4}}",
        re.DOTALL)
    wv_lst = mop.group(1).split(f"{os.linesep*2}") if mop.search(
        str_pat) else []
    wv_pat = re.compile(rf"{os.linesep}-+{os.linesep}(.*)", re.DOTALL)
    result_lst = []
    for waiver in wv_lst:
        sop = pcom.REOpter(waiver)
        if sop.search(wv_pat):
            waiver = sop.group(1)
        if type_lst and re.search(
                "|".join(
                    [re.escape(cc).replace(r"\*", ".*")
                     for cc in type_lst]), waiver):
            continue
        for sp_ep in sp_ep_lst:
            if "|" not in sp_ep:
                LOG.warning(f"the format of {sp_ep} is wrong")
                continue
            if re.search(
                    re.escape(sp_ep.split("|")[0]).replace(r"\*", ".*"),
                    waiver.split(os.linesep)[0]) and re.search(
                        re.escape(sp_ep.split("|")[1]).replace(r"\*", ".*"),
                        waiver.split(os.linesep)[1]):
                break
        else:
            result_lst.append(waiver)
    return result_lst
Ejemplo n.º 13
0
 def proc_fc_sheet(self, w_s):
     """to process generated vplan function coverage sheet"""
     cg_score_dic = self.parse_cg_report()
     fc_col_lst = pcom.rd_cfg(self.cfg_dic["proj"], "vplan_sheets", "function_coverage")
     index_dic = {
         "c": fc_col_lst.index("Coverage Group"),
         "p": fc_col_lst.index("Priority"),
         "s": fc_col_lst.index("SNPS Cov Per")}
     if self.gen_flg:
         w_s.append(fc_col_lst)
     self.proc_vplan_row1(w_s)
     for index, cg_row in enumerate(w_s.rows):
         if index == 0:
             continue
         cg_name = cg_row[index_dic["c"]].value
         if cg_name in cg_score_dic:
             per = cg_score_dic[cg_name]["per"]
             cg_row[index_dic["s"]].value = f"{per} %"
             cg_row[index_dic["s"]].fill = PatternFill(
                 fill_type="gray125", end_color=self.gen_per_color(per))
         elif "::" in cg_name:
             base_cg_name = cg_name.split("::")[0]
             per = cg_score_dic[base_cg_name]["cp_dic"][cg_name]
             if cg_name in cg_score_dic[base_cg_name]["cp_dic"]:
                 cg_row[index_dic["s"]].value = f"{per} %"
                 cg_row[index_dic["s"]].fill = PatternFill(
                     fill_type="gray125", end_color=self.gen_per_color(per))
                 del cg_score_dic[base_cg_name]["cp_dic"][cg_name]
         else:
             cg_row[index_dic["p"]].value = "Out of Date"
             cg_row[index_dic["p"]].fill = PatternFill(
                 fill_type="gray125", end_color="FFFF0000")
         self.clean_cg_score_dic(cg_score_dic)
     for cg_name, cg_dic in cg_score_dic.items():
         new_line = [""]*len(fc_col_lst)
         new_line[index_dic["c"]] = cg_name
         new_line[index_dic["s"]] = f"{cg_dic['per']} %"
         w_s.append(new_line)
         w_s[w_s.max_row][index_dic["c"]].fill = PatternFill(
             fill_type="gray125", end_color="FFFFFF00")
         w_s[w_s.max_row][index_dic["s"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(cg_dic["per"]))
         for cp_name, cp_per in cg_dic["cp_dic"].items():
             new_line = [""]*len(fc_col_lst)
             new_line[index_dic["c"]] = cp_name
             new_line[index_dic["s"]] = f"{cp_per} %"
             w_s.append(new_line)
             w_s[w_s.max_row][index_dic["s"]].fill = PatternFill(
                 fill_type="gray125", end_color=self.gen_per_color(cp_per))
Ejemplo n.º 14
0
def find_module_dir(ced, cfg_dic, module):
    """to find verification module dir according to their subdir config"""
    for module_dir in pcom.find_iter(ced["PROJ_VERIF"], module, True):
        if os.path.isdir(f"{module_dir}{os.sep}config"):
            return module_dir
    tree_ignore_str = "|".join(
        pcom.rd_cfg(cfg_dic["proj"], "proj", "tree_ignore"))
    run_str = f"tree -d -I '(|{tree_ignore_str}|)' {ced['PROJ_VERIF']}"
    tree_str = subprocess.run(run_str,
                              shell=True,
                              check=True,
                              stdout=subprocess.PIPE).stdout.decode()
    raise Exception(
        f"module {module} is NA; the possible module is {os.linesep}{tree_str}"
    )
Ejemplo n.º 15
0
 def __init__(self, ced, dc_cfg_dic):
     self.ced = ced
     self.dc_cfg_dic = dc_cfg_dic
     rpt_dir = pcom.rd_cfg(dc_cfg_dic, "set_args", "REPORTS_DIR", True)
     self.pat_dic = {
         "warn_pat1": re.compile(r"(.*?):\s*(.*?):(\d+?):(.*)\s*\((.*)\)"),
         "warn_pat2": re.compile(r"(.*?):\s*(.*)\.(.*?):"),
         "warn_pat3": re.compile(r"(.*?):\s*(.*?)\/(.*)."),
         "we_pat": re.compile(r"(.*?):\s*(.*)\s*\((.*)\)"),
         "line_pat": re.compile(r"\((.*)\)"),
         "ct_pat": re.compile(r".*\(\s*([\.\d]*).*\)"),
         "slk_pat": re.compile(r"(.*?)\s*\(.*\)\s*(.*)"),
         "tpg_pat": re.compile(r"(.*?)\s*\'(.*)\'")
     }
     self.dc_dic = {
         "user":
         self.ced["USER_NAME"],
         "proj":
         self.ced["PROJ_NAME"],
         "design_name":
         pcom.rd_cfg(dc_cfg_dic, "base_args", "DESIGN_NAME", True),
         "run_time":
         dt.datetime.timestamp(self.ced["TIME"]),
         "clk_freq":
         pcom.rd_cfg(dc_cfg_dic, "set_args", "clk_freq", True),
         "dc_log": {},
         "tm_rpt":
         collections.defaultdict(list),
         "qor_rpt":
         collections.defaultdict(dict),
         "pw_rpt": {},
         "rpt_dir":
         rpt_dir,
         "log_file":
         os.path.join(os.path.dirname(rpt_dir), "dc.log")
     }
Ejemplo n.º 16
0
 def gen_data_struc(self):
     """to generate base_dic and filter_lst"""
     while True:
         agt_name = input(f"input agt name: ")
         LOG.info(f"agt name is {agt_name}")
         self.base_dic["agt_name_lst"].append(agt_name)
         if input("generate another agt?(yes/y or no/n): ") in ("no", "n"):
             break
     for opt in self.cfg_dic["proj"].options("gen_with"):
         self.base_dic[opt] = False
         if input(f"create {opt}?(yes/y or no/n): ") in ("yes", "y"):
             self.base_dic[opt] = True
             continue
         self.filter_lst.extend(
             pcom.rd_cfg(self.cfg_dic["proj"], "gen_with", opt))
Ejemplo n.º 17
0
 def gen_cta_opts(self, v_n):
     """to generate customized tb analysis opts"""
     cta_opts_lst = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                "custom_tb_ana_opts")
     if self.mkg_dic["regr_flg"]:
         cta_opts_lst = pcom.rd_cfg(self.cfg_dic["proj"], "regression_opts",
                                    "custom_tb_ana_opts") + cta_opts_lst
     fpga_cta_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "fpga_tb_ana_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "fpga") == ["on"] else []
     uvm_cta_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "uvm_tb_ana_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "uvm") == ["on"] else []
     upf_cta_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "upf_tb_ana_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "upf") == ["on"] else []
     return " ".join(cta_opts_lst + fpga_cta_lst + uvm_cta_lst +
                     upf_cta_lst)
Ejemplo n.º 18
0
 def gen_smf_lst(self, v_n):
     """to generate sub modules filelist list"""
     smf_lst = []
     for s_m in pcom.rd_cfg(self.cfg_dic["simv"], v_n, "sub_modules"):
         if not ":" in s_m:
             raise Exception(
                 f"sub_modules {s_m} in module {self.ced['MODULE']} simv cfg "
                 f"has incorrect name:type format")
         m_name, m_type, *_ = s_m.split(":")
         module_dir = env_booter.find_module_dir(self.ced, self.cfg_dic,
                                                 m_name)
         sm_flist = os.sep.join([module_dir, "flist", f"{m_type}.flist"])
         if not os.path.isfile(sm_flist):
             raise Exception(
                 f"sub_modules {s_m} has no such file {sm_flist}")
         smf_lst.append(sm_flist)
     return smf_lst
Ejemplo n.º 19
0
 def gen_dir_dic(self):
     """to updated dc cfg and generate dc info dic"""
     dc_flist = pcom.rd_cfg(self.dc_cfg_dic, "base_args", "DESIGN_FLIST")
     for flist in dc_flist:
         if not os.path.isfile(flist):
             raise Exception(f"base flist file {flist} is NA")
     _, verilog_lst, vhdl_lst = filelst_gen.FilelstGen().gen_file_lst(dc_flist)
     self.dc_cfg_dic["set_args"]["rtl_files"] = os.linesep.join(verilog_lst+vhdl_lst)
     os.makedirs(self.dir_dic["reports_dir"], exist_ok=True)
     os.makedirs(self.dir_dic["results_dir"], exist_ok=True)
     os.makedirs(self.dir_dic["tcl_dir"], exist_ok=True)
     if self.dc_dic["cp_rtl"]:
         filelst_gen.cp_flist(
             verilog_lst, self.ced["PROJ_RTL"], f"{self.dir_dic['dc_time_dir']}{os.sep}cp_rtl")
     self.dc_cfg_dic["set_args"]["REPORTS_DIR"] = self.dir_dic["reports_dir"]
     self.dc_cfg_dic["set_args"]["RESULTS_DIR"] = self.dir_dic["results_dir"]
     self.dc_cfg_dic["dc__tcl"]["DcTcl_DIR"] = self.dir_dic["tcl_dir"]
     self.dc_cfg_dic["dc__tcl"]["DT_DIR"] = self.dir_dic["dc_time_dir"]
Ejemplo n.º 20
0
 def proc_leda(self):
     """to process and kick off leda flow"""
     if self.leda_dic["leda_gen_log"]:
         os.makedirs(self.df_dic["time_dir"], exist_ok=True)
         self.gen_flist_file()
         nwd_lst = []
         rule_lst = []
         for rule_file in pcom.find_iter(self.df_dic["rule_dir"], "*.tcl"):
             self.cov_file2rulelst(rule_file, nwd_lst, rule_lst)
         for waiver_file in pcom.find_iter(self.df_dic["waiver_dir"],
                                           "*.tcl"):
             self.cov_file2rulelst(waiver_file, nwd_lst, rule_lst)
         with open(self.df_dic["tcl"], "w") as ltf:
             ltf.write(os.linesep.join(rule_lst))
         self.kick_off_leda()
         shutil.copyfile(self.df_dic["log"],
                         f"{self.ced['FLOW_LEDA']}{os.sep}leda_latest.log")
         leda_error_flg = False
         str_pat = re.compile("|".join([
             re.escape(cc)
             for cc in pcom.rd_cfg(self.leda_cfg, "leda", "error_string")
         ]))
         with open(self.df_dic["log"], errors="replace") as llf:
             with open(f"{self.df_dic['log_dir']}{os.sep}leda_error.log",
                       "w") as elf:
                 for line in llf:
                     line = line.strip()
                     if str_pat.search(line):
                         elf.write(f"{line}{os.linesep}")
                         leda_error_flg = True
         if leda_error_flg:
             LOG.warning("ERRORs raised in the process of leda")
     elif self.leda_dic["leda_gui"]:
         leda_dir = os.path.abspath(
             os.path.expandvars(self.leda_dic["leda_dir"]))
         if not os.path.isdir(leda_dir):
             raise Exception(f"leda GUI loaded dir {leda_dir} is NA")
         leda_str = (f"cd {leda_dir} && "
                     f"leda +gui -config {leda_dir}{os.sep}leda.tcl "
                     f"-l {leda_dir}{os.sep}leda_logs{os.sep}leda.log")
         subprocess.run(leda_str, shell=True)
Ejemplo n.º 21
0
 def proc_uvm_gen(self):
     """to generate module uvm env"""
     m_dir = self.module_dir if self.module_dir else self.ced["PROJ_VERIF"]
     module_path = os.path.expandvars(f"{m_dir}{os.sep}{self.module}")
     if os.path.isdir(module_path):
         raise Exception(
             f"module path you typed {module_path} has already existed")
     self.gen_data_struc()
     pj_gen_dir = f"{self.ced['SHARE_TEMPLATES']}{os.sep}pj_gen"
     if not os.path.isdir(pj_gen_dir):
         raise Exception(f"pj_gen dir {pj_gen_dir} is NA")
     for sub_dir in pcom.find_iter(pj_gen_dir, "*", dir_flg=True):
         dest_dir = sub_dir.replace(pj_gen_dir, module_path)
         os.makedirs(dest_dir, exist_ok=True)
         LOG.info(f"create a new {dest_dir} directory.")
     for temp_file in pcom.find_iter(pj_gen_dir, "*"):
         t_fn = os.path.basename(temp_file)
         if t_fn in self.filter_lst:
             continue
         LOG.info(f"template file is {t_fn}")
         tf_str = temp_file.replace(pj_gen_dir, module_path)
         blk_n = self.module if t_fn.startswith("_") else ""
         if t_fn in pcom.rd_cfg(self.cfg_dic["proj"], "gen_agt",
                                "multiple"):
             mul_dic = copy.deepcopy(self.base_dic)
             del mul_dic["agt_name_lst"]
             for agt_name in self.base_dic["agt_name_lst"]:
                 mul_dic["agt_name"] = agt_name
                 pcom.ren_tempfile(
                     temp_file,
                     tf_str.replace(t_fn, f"{blk_n}_{agt_name}{t_fn}"),
                     mul_dic)
         else:
             pcom.ren_tempfile(temp_file,
                               tf_str.replace(t_fn, f"{blk_n}{t_fn}"),
                               self.base_dic)
     LOG.info(f"module {self.module} uvm env generated")
Ejemplo n.º 22
0
 def kick_off_leda(self):
     """to kick off main leda flow without gui"""
     if not os.path.isfile(self.df_dic["flist_file"]):
         raise Exception(
             f"generated leda filelist {self.df_dic['flist_file']} is NA")
     with open(self.df_dic["flist_file"]) as flf:
         with open(
                 f"{self.df_dic['time_dir']}{os.sep}leda_on_off_warning.log",
                 "w") as lwf:
             for line in flf:
                 line = line.strip()
                 if line.startswith("+incdir"):
                     continue
                 if not os.path.isfile(line):
                     raise Exception(f"rtl file {line} is NA")
                 with open(line, errors="replace") as lvf:
                     vf_con = lvf.read()
                 if "leda off" in vf_con:
                     wline = f"leda off in file {line}"
                     LOG.warning(wline)
                     lwf.write(wline)
                 if "leda on" in vf_con:
                     wline = f"leda on in file {line}"
                     LOG.warning(wline)
                     lwf.write(wline)
     bb_str = f"-bbox_file {self.df_dic['bbox_file']}" if os.path.isfile(
         self.df_dic["bbox_file"]) else ""
     leda_opts = " ".join(pcom.rd_cfg(self.leda_cfg, "leda", "opts"))
     leda_str = (
         f"cd {self.df_dic['time_dir']} && "
         f"leda {leda_opts} -config {self.df_dic['tcl']} "
         f"-f {self.df_dic['flist_file']} {bb_str} -top {self.leda_dic['leda_top']} "
         f"-log_dir {self.df_dic['log_dir']} -l {self.df_dic['log']}")
     with open(f"{self.df_dic['time_dir']}{os.sep}leda_cmd", "w") as lcf:
         lcf.write(f"{leda_str}{os.linesep}")
     subprocess.run(leda_str, shell=True, stdout=subprocess.PIPE)
Ejemplo n.º 23
0
 def gen_ce_opts(self, v_n, simv_dic):
     """to generate customized elaboration opts"""
     ce_opts_lst = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                               "custom_elab_opts")
     if self.mkg_dic["regr_flg"]:
         ce_opts_lst = pcom.rd_cfg(self.cfg_dic["proj"], "regression_opts",
                                   "custom_elab_opts") + ce_opts_lst
     cov_ce_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "cov_elab_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "cov") == ["on"] else []
     wave_ce_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "wave_elab_opts") + pcom.rd_cfg(
             self.cfg_dic["simv"], v_n,
             f"wf_{simv_dic['wave_format']}_elab_opts") if pcom.rd_cfg(
                 self.cfg_dic["simv"], v_n, "wave") == ["on"] else []
     gui_ce_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "gui_elab_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "gui") == ["on"] else []
     prof_ce_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "prof_elab_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "prof") == ["on"] else []
     uvm_ce_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "uvm_elab_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "uvm") == ["on"] else []
     upf_ce_lst = pcom.rd_cfg(
         self.cfg_dic["simv"], v_n, "upf_elab_opts") if pcom.rd_cfg(
             self.cfg_dic["simv"], v_n, "upf") == ["on"] else []
     return " ".join(ce_opts_lst + cov_ce_lst + wave_ce_lst + gui_ce_lst +
                     prof_ce_lst + uvm_ce_lst + upf_ce_lst)
Ejemplo n.º 24
0
 def __init__(self, ced, cfg_dic, cvsr_tup):
     self.ced = ced
     self.cfg_dic = cfg_dic
     self.cvsr_tup = cvsr_tup
     self.pat_dic = {
         "f_pat": re.compile(
             (r"\b[Ee]rror\b|\bERROR\b|\*E\b|\bUVM_(ERROR|FATAL)\s*:\s*[1-9]\d*|"+"|".join(
                 [re.escape(cc) for cc in pcom.rd_cfg(
                     cfg_dic["case"], cvsr_tup[0], "fail_string")])).strip("|")),
         "i_pat": re.compile(
             (r"^$|"+"|".join([re.escape(cc) for cc in pcom.rd_cfg(
                 cfg_dic["case"], cvsr_tup[0], "ignore_string")])).strip("|")),
         "ct_pat": re.compile(r"CPU [Tt]ime:\s+(\d*\.\d+)\s+(\w+)"),
         "p_pat": re.compile(
             (r"|"+"|".join([re.escape(cc) for cc in pcom.rd_cfg(
                 cfg_dic["case"], cvsr_tup[0], "pass_string")])).strip("|")),
         "fin_pat": re.compile(r"\$finish at simulation time\s+(\d+)"),
         "uvm_pat": re.compile(r"\+UVM_TESTNAME=(\w+)\s")}
     self.log_dic = log_dic = {
         "da": os.path.join(ced["OUTPUT_SIMV"], cvsr_tup[1], "dut_ana.log"),
         "ta": os.path.join(ced["OUTPUT_SIMV"], cvsr_tup[1], "tb_ana.log"),
         "e": os.path.join(ced["OUTPUT_SIMV"], cvsr_tup[1], "elab.log"),
         "s": os.path.join(
             ced["MODULE_OUTPUT"], cvsr_tup[0], f"{cvsr_tup[1]}__{cvsr_tup[2]}",
             f"{cvsr_tup[2]}.log")}
     module_name = f"{ced['MODULE']}___{ced['PROJ_NAME']}"
     simv_name = f"{cvsr_tup[1]}___{module_name}"
     case_name = f"{cvsr_tup[0]}___{simv_name}"
     try:
         svn_ver = pcom.gen_svn_ver(ced["PROJ_ROOT"])
     except subprocess.CalledProcessError:
         svn_ver = pcom.gen_svn_ver(ced["PROJ_VERIF"])
     self.sc_dic = {
         "s": {
             "dut_ana_log": log_dic["da"],
             "dut_ana_status": "NA",
             "dut_ana_error": "NA",
             "tb_ana_log": log_dic["ta"],
             "tb_ana_status": "NA",
             "tb_ana_error": "NA",
             "elab_log": log_dic["e"],
             "elab_status": "NA",
             "elab_error": "NA",
             "comp_cpu_time": "NA"},
         "c": {
             "pub_date": dt.datetime.timestamp(ced["TIME"]),
             "end_date": dt.datetime.timestamp(dt.datetime.now()),
             "case_name": case_name,
             "c_name": cvsr_tup[0],
             "simv_name": simv_name,
             "v_name": cvsr_tup[1],
             "module_name": module_name,
             "m_name": ced["MODULE"],
             "proj_name": ced["PROJ_NAME"],
             "user_name": ced["USER_NAME"],
             "proj_cl": svn_ver,
             "seed": cvsr_tup[2],
             "simu_log": log_dic["s"],
             "simu_status": "NA",
             "simu_error": "NA",
             "simu_cpu_time": "NA",
             "simu_time": "NA",
             "regr_types": cvsr_tup[3]}}
Ejemplo n.º 25
0
 def gen_su_opts(self, c_n, case_dic):
     """to generate customized simulation opts"""
     su_opts_lst = pcom.rd_cfg(self.cfg_dic["case"], c_n,
                               "custom_simu_opts")
     if self.mkg_dic["regr_flg"]:
         su_opts_lst = pcom.rd_cfg(self.cfg_dic["proj"], "regression_opts",
                                   "custom_simu_opts") + su_opts_lst
     cov_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "cov_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "cov") == ["on"] else []
     wave_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n,
         f"wf_{case_dic['wave_format']}_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "wave") == ["on"] else []
     wg_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n,
         f"wf_{case_dic['wave_format']}_glitch_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "wave") == ["on"] and pcom.rd_cfg(
                 self.cfg_dic["case"], c_n, "wave_glitch") == ["on"
                                                               ] else []
     seed_su_lst = pcom.rd_cfg(self.cfg_dic["case"], c_n, "seed_simu_opts")
     uvm_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "uvm_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "uvm") == ["on"] else []
     gui_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "gui_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "gui") == ["on"] else []
     prof_mem_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "prof_mem_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "prof_mem") == ["on"] else []
     prof_time_su_lst = pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "prof_time_simu_opts") if pcom.rd_cfg(
             self.cfg_dic["case"], c_n, "prof_time") == ["on"] else []
     sem_key_su_lst = pcom.rd_cfg(self.cfg_dic["case"], c_n,
                                  "sem_key_simu_opts")
     return " ".join(su_opts_lst + cov_su_lst + wave_su_lst + wg_su_lst +
                     seed_su_lst + uvm_su_lst + gui_su_lst +
                     prof_mem_su_lst + prof_time_su_lst + sem_key_su_lst)
Ejemplo n.º 26
0
 def gen_case_dic(self, c_n):
     """to generate case related dic to render jinja2"""
     case_dic = {"name": c_n}
     simv_str = pcom.rd_cfg(self.cfg_dic["case"], c_n, "simv", True)
     case_dic["simv"] = simv_str if simv_str and simv_str in self.cfg_dic[
         "simv"] else "DEFAULT"
     if case_dic["simv"] not in self.simv_lst:
         self.simv_lst.append(case_dic["simv"])
     case_dic["tb_top"] = pcom.rd_cfg(self.cfg_dic["simv"],
                                      case_dic["simv"], "tb_top", True,
                                      "test_top")
     case_dic["wave"] = True if pcom.rd_cfg(self.cfg_dic["case"], c_n,
                                            "wave") == ["on"] else False
     case_dic["wave_format"] = pcom.rd_cfg(self.cfg_dic["simv"],
                                           case_dic["simv"], "wave_format",
                                           True, "fsdb")
     case_dic["wave_mem"] = True if pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "wave_mem") == ["on"] else False
     case_dic["wave_glitch"] = True if pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "wave_glitch") == ["on"] else False
     case_dic["sem_key"] = True if pcom.rd_cfg(
         self.cfg_dic["case"], c_n, "sem_key", True) == "on" else False
     case_dic["su_opts"] = self.gen_su_opts(c_n, case_dic)
     case_dic["w_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], case_dic["simv"], "verdi_opts") +
         pcom.rd_cfg(self.cfg_dic["case"], c_n, "verdi_simu_opts"))
     case_dic["pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["case"], c_n,
                                           "pre_cmd")
     case_dic["post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["case"], c_n,
                                            "post_cmd")
     case_dic["file_dic"] = {}
     for opt_name, opt_cont in self.cfg_dic["case"][c_n].items():
         if not opt_name.startswith("file__"):
             continue
         case_dic["file_dic"][opt_name[6:]] = opt_cont.replace(
             "$", "$$").replace("\\", "").split(os.linesep)
     case_dic["regr_type_lst"] = pcom.rd_cfg(self.cfg_dic["case"], c_n,
                                             "regression_type")
     case_dic["regr_type_lst"].append("all")
     seed_set = set()
     seed_str = pcom.rd_cfg(self.cfg_dic["case"], c_n, "seed", True)
     rt_str = pcom.rd_cfg(self.cfg_dic["case"], c_n, "random_times", True)
     loop_times = rt_str if rt_str and rt_str.isdigit() else "1"
     for _ in range(int(loop_times)):
         if seed_str:
             if seed_str.isdigit():
                 seed_set.add(int(seed_str))
             else:
                 seed = check_seed(random.randrange(1, 999999), seed_set)
                 seed_set.add(seed)
         elif rt_str:
             seed = check_seed(random.randrange(1, 999999), seed_set)
             seed_set.add(seed)
         else:
             seed_set.add(1)
     case_dic["seed_set"] = seed_set
     if self.mkg_dic["sem_flg"]:
         case_dic["sem_key"] = self.gen_sem_key()
     return case_dic
Ejemplo n.º 27
0
 def proc_cc_sheet(self, w_s):
     """to process generated vplan code coverage sheet"""
     ch_score_dic = self.parse_ch_report()
     cc_col_lst = pcom.rd_cfg(self.cfg_dic["proj"], "vplan_sheets", "code_coverage")
     index_dic = {
         "h": cc_col_lst.index("Hierarchy"),
         "p": cc_col_lst.index("Priority"),
         "s": cc_col_lst.index("Score"),
         "l": cc_col_lst.index("Line"),
         "c": cc_col_lst.index("Cond"),
         "t": cc_col_lst.index("Toggle"),
         "f": cc_col_lst.index("FSM"),
         "b": cc_col_lst.index("Branch"),
         "a": cc_col_lst.index("Assert")}
     if self.gen_flg:
         w_s.append(cc_col_lst)
     self.proc_vplan_row1(w_s)
     for index, ch_row in enumerate(w_s.rows):
         if index == 0:
             continue
         ch_name = ch_row[index_dic["h"]].value
         if ch_name in ch_score_dic:
             self.fill_cc_scores(ch_row[index_dic["s"]], ch_score_dic[ch_name]["s"])
             self.fill_cc_scores(ch_row[index_dic["l"]], ch_score_dic[ch_name]["l"])
             self.fill_cc_scores(ch_row[index_dic["c"]], ch_score_dic[ch_name]["c"])
             self.fill_cc_scores(ch_row[index_dic["t"]], ch_score_dic[ch_name]["t"])
             self.fill_cc_scores(ch_row[index_dic["f"]], ch_score_dic[ch_name]["f"])
             self.fill_cc_scores(ch_row[index_dic["b"]], ch_score_dic[ch_name]["b"])
             self.fill_cc_scores(ch_row[index_dic["a"]], ch_score_dic[ch_name]["a"])
             del ch_score_dic[ch_name]
         else:
             ch_row[index_dic["p"]].value = "Out of Date"
             ch_row[index_dic["p"]].fill = PatternFill(
                 fill_type="gray125", end_color="FFFF0000")
     for ch_name, ch_dic in ch_score_dic.items():
         new_line = [""]*len(cc_col_lst)
         new_line[index_dic["h"]] = ch_name
         new_line[index_dic["s"]] = f"{ch_dic['s']} %"
         new_line[index_dic["l"]] = f"{ch_dic['l']} %"
         new_line[index_dic["c"]] = f"{ch_dic['c']} %"
         new_line[index_dic["t"]] = f"{ch_dic['t']} %"
         new_line[index_dic["f"]] = f"{ch_dic['f']} %"
         new_line[index_dic["b"]] = f"{ch_dic['b']} %"
         new_line[index_dic["a"]] = f"{ch_dic['a']} %"
         w_s.append(new_line)
         if "(top)" in ch_name:
             w_s[w_s.max_row][index_dic["h"]].fill = PatternFill(
                 fill_type="gray125", end_color="FFFFFF00")
         w_s[w_s.max_row][index_dic["s"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["s"]))
         w_s[w_s.max_row][index_dic["l"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["l"]))
         w_s[w_s.max_row][index_dic["c"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["c"]))
         w_s[w_s.max_row][index_dic["t"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["t"]))
         w_s[w_s.max_row][index_dic["f"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["f"]))
         w_s[w_s.max_row][index_dic["b"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["b"]))
         w_s[w_s.max_row][index_dic["a"]].fill = PatternFill(
             fill_type="gray125", end_color=self.gen_per_color(ch_dic["a"]))
Ejemplo n.º 28
0
 def proc_tc_sheet(self, w_s):
     """to process generated vplan test case sheet"""
     query_url = f"{pcom.BE_URL}/pj_app/regr/db_query/query_case_dic/"
     query_param = {
         "date": dt.datetime.now().strftime("%Y_%m_%d"),
         "proj": self.ced["PROJ_NAME"],
         "module": self.ced["MODULE"],
         "days": self.days}
     case_pr_dic = requests.get(query_url, params=query_param).json() if pcom.BACKEND else {}
     tc_col_lst = pcom.rd_cfg(self.cfg_dic["proj"], "vplan_sheets", "test_case")
     index_dic = {
         "c": tc_col_lst.index("Case Name"),
         "p": tc_col_lst.index("Priority"),
         "o": tc_col_lst.index("Owner"),
         "s": tc_col_lst.index("Status"),
         "r": tc_col_lst.index("Days"),
         "v": tc_col_lst.index("CL Ver"),
         "d": tc_col_lst.index("Description")}
     if self.gen_flg:
         w_s.append(tc_col_lst)
     self.proc_vplan_row1(w_s)
     for index, case_row in enumerate(w_s.rows):
         if index == 0:
             continue
         case_name = case_row[index_dic["c"]].value
         case_dic = case_pr_dic.get(case_name, {})
         if case_name in self.cfg_dic["case"]:
             case_row[index_dic["d"]].value = self.cfg_dic["case"][case_name][
                 "vplan_desc"].replace(os.linesep, "; ")
             case_row[index_dic["p"]].value = pcom.rd_cfg(
                 self.cfg_dic["case"], case_name, "vplan_priority", True)
             case_row[index_dic["o"]].value = pcom.rd_cfg(
                 self.cfg_dic["case"], case_name, "vplan_owner", True)
             case_row[index_dic["s"]].value = (
                 f"{case_dic.get('pr', 0.0)} % "
                 f"({case_dic.get('pn', 0)}/{case_dic.get('tn', 0)})")
             self.v_dic["tpn"] += case_dic.get("pn", 0)
             self.v_dic["ttn"] += case_dic.get("tn", 0)
             case_row[index_dic["s"]].fill = PatternFill(
                 fill_type="gray125",
                 end_color=case_dic.get("bc", "#FF0000").replace("#", "FF"))
             case_row[index_dic["r"]].value = self.days
             case_row[index_dic["v"]].value = case_dic.get("cl_range", "NA")
             del self.cfg_dic["case"][case_name]
         else:
             case_row[index_dic["p"]].value = "Out of Date"
             case_row[index_dic["p"]].fill = PatternFill(
                 fill_type="gray125", end_color="FFFF0000")
     for case_name in self.cfg_dic["case"]:
         if case_name == "DEFAULT":
             continue
         case_dic = case_pr_dic.get(case_name, {})
         new_line = [""]*len(tc_col_lst)
         new_line[index_dic["c"]] = case_name
         new_line[index_dic["d"]] = self.cfg_dic["case"][case_name][
             "vplan_desc"].replace(os.linesep, "; ")
         new_line[index_dic["p"]] = pcom.rd_cfg(
             self.cfg_dic["case"], case_name, "vplan_priority", True)
         new_line[index_dic["o"]] = pcom.rd_cfg(
             self.cfg_dic["case"], case_name, "vplan_owner", True)
         new_line[index_dic["s"]] = (
             f"{case_dic.get('pr', 0.0)} % "
             f"({case_dic.get('pn', 0)}/{case_dic.get('tn', 0)})")
         self.v_dic["tpn"] += case_dic.get("pn", 0)
         self.v_dic["ttn"] += case_dic.get("tn", 0)
         new_line[index_dic["r"]] = self.days
         new_line[index_dic["v"]] = case_dic.get("cl_range", "NA")
         w_s.append(new_line)
         w_s[w_s.max_row][index_dic["s"]].fill = PatternFill(
             fill_type="gray125",
             end_color=case_dic.get("bc", "#FF0000").replace("#", "FF"))
Ejemplo n.º 29
0
 def gen_simv_dic(self, v_n):
     """to generate simv related dic to render jinja2"""
     self.chk_simv_cfg(v_n)
     simv_dic = {"name": v_n}
     simv_dic["wave_format"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                           "wave_format", True, "fsdb")
     simv_dic["cda_opts"] = self.gen_cda_opts(v_n)
     simv_dic["cta_opts"] = self.gen_cta_opts(v_n)
     simv_dic["ce_opts"] = self.gen_ce_opts(v_n, simv_dic)
     simv_dic["upf_flg"] = True if pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                               "upf") == ["on"] else False
     smf_lst = self.gen_smf_lst(v_n)
     dut_flist_name = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "dut_flist",
                                  True, "rtl.flist")
     dut_flist_lst = [
         f"{self.ced['MODULE_FLIST']}{os.sep}{dut_flist_name}"
     ] + smf_lst
     df_tup = filelst_gen.FilelstGen().gen_file_lst(dut_flist_lst)
     simv_dic["dut_dir_lst"], simv_dic["dut_file_lst"], simv_dic[
         "vhdl_file_lst"] = df_tup
     self.chk_simv_flist(v_n, df_tup, False)
     tb_flist_name = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "tb_flist",
                                 True, "tb.flist")
     tb_flist_lst = [f"{self.ced['MODULE_FLIST']}{os.sep}{tb_flist_name}"]
     tf_tup = filelst_gen.FilelstGen().gen_file_lst(tb_flist_lst)
     simv_dic["tb_dir_lst"], simv_dic["tb_file_lst"], _ = tf_tup
     self.chk_simv_flist(v_n, tf_tup, True)
     simv_dic["tb_dep_file_lst"] = [
         ddf for did in simv_dic["dut_dir_lst"] for ddf in pcom.find_iter(
             did.replace("+incdir+", ""), "*", cur_flg=True, i_str="\\")
     ] + [
         tdf for tid in simv_dic["tb_dir_lst"] for tdf in pcom.find_iter(
             tid.replace("+incdir+", ""), "*", cur_flg=True, i_str="\\")
     ]
     simv_dic["vhdl_tool"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                         "vhdl_tool", True, "vhdlan")
     simv_dic["vhdl_da_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                     f"vt_{simv_dic['vhdl_tool']}_dut_ana_opts"))
     simv_dic["vhdl_ta_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                     f"vt_{simv_dic['vhdl_tool']}_tb_ana_opts"))
     simv_dic["ana_tool"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                        "ana_tool", True, "vlogan")
     simv_dic["da_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                     f"at_{simv_dic['ana_tool']}_dut_ana_opts"))
     simv_dic["ta_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                     f"at_{simv_dic['ana_tool']}_tb_ana_opts"))
     simv_dic["elab_tool"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                         "elab_tool", True, "vcs")
     simv_dic["e_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                     f"et_{simv_dic['elab_tool']}_elab_opts"))
     simv_dic["w_opts"] = " ".join(
         pcom.rd_cfg(self.cfg_dic["simv"], v_n, "verdi_opts"))
     simv_dic["tb_top"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n, "tb_top",
                                      True, "test_top")
     simv_dic["power_top"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                         "power_top", True, "chip_top")
     simv_dic["pre_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                           "pre_cmd")
     simv_dic["post_cmd_lst"] = pcom.rd_cfg(self.cfg_dic["simv"], v_n,
                                            "post_cmd")
     simv_dic["file_dic"] = {}
     for opt_name, opt_cont in self.cfg_dic["simv"][v_n].items():
         if not opt_name.startswith("file__"):
             continue
         simv_dic["file_dic"][opt_name[6:]] = opt_cont.replace(
             "$", "$$").replace("\\", "").split(os.linesep)
     return simv_dic
Ejemplo n.º 30
0
 def proc_reg(self, reg_module_lst):
     """reg process main"""
     proc_dic = {}
     proc_dic["reg_doc_dir"] = os.path.abspath(
         os.path.expandvars(
             pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "doc", True)))
     proc_dic["workbook_hw"] = xlsxwriter.Workbook(
         f"{proc_dic['reg_doc_dir']}{os.sep}YJD_register.xlsx")
     proc_dic["workbook_sw"] = xlsxwriter.Workbook(
         f"{proc_dic['reg_doc_dir']}{os.sep}CPU_register.xlsx")
     proc_dic["sheet_sw"] = proc_dic["workbook_sw"].add_worksheet(
         "CPU1_regsiter")
     proc_dic["format_sw"] = proc_dic["workbook_sw"].add_format(
         {"font_size": "15"})
     proc_dic["reg_rtl_dir"] = os.path.abspath(
         os.path.expandvars(
             pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "rtl", True)))
     proc_dic["reg_cfg_dir"] = f"{self.ced['SHARE_CONFIG']}{os.sep}pj_reg"
     proc_dic[
         "reg_temp_dir"] = f"{self.ced['SHARE_TEMPLATES']}{os.sep}pj_reg"
     proc_dic["reg_cfg_iter"] = [
         f"{proc_dic['reg_cfg_dir']}{os.sep}{cc}.json"
         for cc in reg_module_lst
     ] if reg_module_lst else pcom.find_iter(proc_dic["reg_cfg_dir"],
                                             "*.json")
     proc_dic["reg_ralf_dir"] = os.path.abspath(
         os.path.expandvars(
             pcom.rd_cfg(self.cfg_dic["proj"], "reg_dir", "ralf", True)))
     proc_dic["ralf_dic"] = {"blk_bytes": 0}
     proc_dic["public_reg_dic"] = {}
     for reg_cfg_json in proc_dic["reg_cfg_iter"]:
         if not os.path.isfile(reg_cfg_json):
             raise Exception(f"reg cfg file {reg_cfg_json} is NA")
         LOG.info("processing reg config file %s", reg_cfg_json)
         sin_dic = {}
         sin_dic["json_dir"], sin_dic["json_name"] = os.path.split(
             reg_cfg_json)
         sin_dic["module_cname"], _ = os.path.splitext(sin_dic["json_name"])
         sin_dic["module_name"] = sin_dic["module_cname"].lower()
         sin_dic["rtl_dir"] = sin_dic["json_dir"].replace(
             proc_dic["reg_cfg_dir"], proc_dic["reg_rtl_dir"])
         os.makedirs(sin_dic["rtl_dir"], exist_ok=True)
         sin_dic["sheet_hw"] = proc_dic["workbook_hw"].add_worksheet(
             sin_dic["module_cname"])
         sin_dic["format_hw"] = proc_dic["workbook_hw"].add_format(
             {"font_size": "15"})
         with open(reg_cfg_json, encoding="gb18030") as file:
             data = json.load(file)
         data_dic = self.expand_data(data)
         rtl_file = f"{sin_dic['rtl_dir']}{os.sep}{sin_dic['module_name']}.v"
         LOG.info(f"generating reg rtl file {rtl_file}")
         pcom.ren_tempfile(
             f"{proc_dic['reg_temp_dir']}{os.sep}reg_base.v", rtl_file, {
                 "module_name": sin_dic["module_name"],
                 "data": self.fmt_v_data(data_dic)
             })
         self.gen_xls(data_dic, sin_dic["sheet_hw"], sin_dic["format_hw"])
         self.fmt_sw_data(data_dic)
         ralf_data_dic = self.fmt_ralf_data(data_dic)
         proc_dic["ralf_dic"][sin_dic["module_name"]] = ralf_data_dic
         proc_dic["ralf_dic"]["blk_bytes"] += ralf_data_dic["blk_bytes"]
         self.check_public_register(proc_dic["public_reg_dic"],
                                    proc_dic["ralf_dic"], ralf_data_dic)
     os.makedirs(proc_dic["reg_ralf_dir"], exist_ok=True)
     ralf_file = f"{proc_dic['reg_ralf_dir']}{os.sep}reg.ralf"
     LOG.info(f"generating reg ralf file {ralf_file}")
     pcom.ren_tempfile(
         f"{proc_dic['reg_temp_dir']}{os.sep}reg_base.ralf", ralf_file, {
             "public_data": proc_dic["public_reg_dic"],
             "data": proc_dic["ralf_dic"]
         })
     sw_data_dic = OrderedDict(self.sw_rtl_dic["NA"])
     self.sw_rtl_dic["MSR"] = OrderedDict(
         sorted(self.sw_rtl_dic["MSR"].items(),
                key=lambda reg: int(
                    f'0x{reg[1]["MSR_address"].split("h")[1]}', 16)))
     sw_data_dic.update(self.sw_rtl_dic["MSR"])
     self.gen_xls(sw_data_dic, proc_dic["sheet_sw"], proc_dic["format_sw"])
     LOG.info(f"generating reg doc file in {proc_dic['reg_doc_dir']}")
     os.makedirs(proc_dic["reg_doc_dir"], exist_ok=True)
     proc_dic["workbook_sw"].close()
     proc_dic["workbook_hw"].close()