Exemplo n.º 1
0
 def proc_dc_warning(self, warn_dic, dlf, line):
     """to process dc warning type information"""
     mop = pcom.REOpter(line)
     if mop.match(self.pat_dic["warn_pat1"]):
         (ew_file, ew_line, ew_info, ew_type) = (mop.group(2), mop.group(3),
                                                 mop.group(4), mop.group(5))
         warn_dic[ew_type].append([ew_file, ew_line, ew_info])
     elif mop.match(self.pat_dic["warn_pat2"]):
         ew_info, ew_type = mop.group(2), mop.group(3)
         if ew_type.strip() in ("Renaming file", ):
             for _ in range(3):
                 ew_info += f",{dlf.readline().strip()}"
             warn_dic[ew_type.strip()].append([None, None, ew_info])
     elif mop.match(self.pat_dic["warn_pat3"]):
         ew_type, ew_info = mop.group(2), mop.group(3)
         if ew_type.strip() in ("Duplicate library", ):
             warn_dic[ew_type.strip()].append([None, None, ew_info])
     elif mop.match(self.pat_dic["we_pat"]):
         ew_info, ew_type = mop.group(2), mop.group(3)
         warn_dic[ew_type].append([None, None, ew_info])
     else:
         while not self.pat_dic["line_pat"].search(line):
             new_line = dlf.readline().strip()
             line += f" {new_line}"
         mop = pcom.REOpter(line)
         if mop.match(self.pat_dic["we_pat"]):
             ew_info, ew_type = mop.group(2), mop.group(3)
             warn_dic[ew_type].append([None, None, ew_info])
         else:
             LOG.warning("WARNING condition not matched in line %s", line)
     return warn_dic
Exemplo n.º 2
0
 def parse_elab_log(self):
     """to parse log in elaboration stage"""
     if not os.path.isfile(self.log_dic["e"]):
         return
     elab_error_lst = []
     with open(self.log_dic["e"], errors="replace") as elf:
         fin_flg = False
         for line in elf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.search(self.pat_dic["i_pat"]):
                 continue
             elif mop.search(self.pat_dic["f_pat"]):
                 elab_error_lst.append(line)
             elif mop.match(self.pat_dic["ct_pat"]):
                 fin_flg = True
                 self.sc_dic["s"]["comp_cpu_time"] = mop.group(1)
     if elab_error_lst:
         self.sc_dic["s"]["elab_status"] = "failed"
         self.sc_dic["s"]["elab_error"] = os.linesep.join(elab_error_lst)[-1000:]
     elif not fin_flg:
         self.sc_dic["s"]["elab_status"] = "pending"
     else:
         self.sc_dic["s"]["elab_status"] = "passed"
     LOG.debug(
         "parsing simv %s elaboration log file %s done", self.cvsr_tup[1], self.log_dic["e"])
Exemplo n.º 3
0
 def parse_ch_report(self):
     """to parse coverage hierarchy report"""
     cd_rpt_file = f"{self.ced['COV_MERGE']}{os.sep}urgReport{os.sep}dashboard.txt"
     ch_rpt_file = f"{self.ced['COV_MERGE']}{os.sep}urgReport{os.sep}hierarchy.txt"
     if not os.path.isfile(cd_rpt_file):
         raise Exception(f"merged code coverage dashboard file {cd_rpt_file} is NA")
     if not os.path.isfile(ch_rpt_file):
         raise Exception(f"merged code coverage report file {ch_rpt_file} is NA")
     with open(cd_rpt_file) as rptf:
         cd_rpt_con = rptf.read()
     self.v_dic["ccs"] = re.search(
         r"Total Coverage Summary.*?(\d+\.\d+)", cd_rpt_con, flags=re.DOTALL).group(1)
     ch_score_dic = collections.OrderedDict()
     with open(ch_rpt_file) as rptf:
         ch_rpt_con = rptf.read()
     con_lst = re.findall(
         rf"{os.linesep}\s{{0,2}}-{{10,}}{os.linesep}(.*?)(?={os.linesep}\s+-{{10}}|$)",
         ch_rpt_con, flags=re.DOTALL)
     for index, con in enumerate(con_lst):
         p_str = "(top)" if index == 0 else ""
         for line in con.split(os.linesep):
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.match(re.compile(
                     r"([\d\.\-]+)\s+([\d\.\-]+)\s+([\d\.\-]+)\s+([\d\.\-]+)\s+"
                     r"([\d\.\-]+)\s+([\d\.\-]+)\s+([\d\.\-]+)\s+(\w+)")):
                 ch_score_dic[f"{mop.group(8)}{p_str}"] = {
                     "s": mop.group(1), "l": mop.group(2), "c": mop.group(3), "t": mop.group(4),
                     "f": mop.group(5), "b": mop.group(6), "a": mop.group(7)}
     return ch_score_dic
Exemplo n.º 4
0
 def parse_dc_qor_rpt(self):
     """to parse dc qor log"""
     dc_qor_file = os.path.join(
         self.dc_dic["rpt_dir"],
         pcom.rd_cfg(self.dc_cfg_dic, "set_args", QOR, True))
     if not os.path.isfile(dc_qor_file):
         return
     with open(dc_qor_file) as dqf:
         lines_str = dqf.read()
         for blk_str in lines_str.split("\n\n"):
             if "-----" not in blk_str or "in2" in blk_str or "2out" in blk_str:
                 continue
             items_lst = []
             for l_str in blk_str.split("\n"):
                 if "-----" in l_str or not l_str:
                     continue
                 if ":" in l_str:
                     str_lst = l_str.split(":")
                     items_lst.append(
                         {str_lst[0].strip(): float(str_lst[1])})
                 else:
                     mop = pcom.REOpter(l_str.strip())
                     items_lst.append(
                         mop.group(2) if mop.match(self.pat_dic["tpg_pat"]
                                                   ) else l_str.strip())
             if items_lst:
                 for item_dic in items_lst[1:]:
                     for key, value in item_dic.items():
                         self.dc_dic["qor_rpt"][items_lst[0]][key] = value
     self.dc_dic["qor_rpt"]["log_path"] = dc_qor_file
Exemplo n.º 5
0
 def parse_dc_tm_rpt(self):
     """to parse dc timing log"""
     dc_tm_file = os.path.join(
         self.dc_dic["rpt_dir"],
         pcom.rd_cfg(self.dc_cfg_dic, "set_args", TIMING, True))
     if not os.path.isfile(dc_tm_file):
         return
     with open(dc_tm_file) as dtf:
         targ_flag = False
         for line in dtf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if line.startswith("Startpoint:"):
                 targ_flag = True
                 targ_dic = {}
             if targ_flag:
                 line_lst = line.split(":")
                 if line_lst[0] in ("Startpoint", "Endpoint", "Path Group"):
                     targ_dic[line_lst[0]] = line_lst[1].strip()
             if line.startswith("slack "):
                 targ_flag = False
                 if mop.match(self.pat_dic["slk_pat"]):
                     if float(mop.group(2)) < 0:
                         targ_dic[mop.group(1)] = float(mop.group(2))
                         self.dc_dic["tm_rpt"]["timing"].append(targ_dic)
                 else:
                     LOG.info("%s condition not matched in line %s",
                              mop.group(1), line)
     self.dc_dic["tm_rpt"]["log_path"] = dc_tm_file
Exemplo n.º 6
0
 def expand_data(self, data):
     """to expand data in original json file"""
     data_dic = nested_dict()
     for reg, reg_dic in data.items():
         if "<id>" in reg:
             if not reg_dic.get("range", None):
                 raise Exception(
                     f"auto step register {reg} has no range field")
             mop = pcom.REOpter(reg_dic["range"])
             if not mop.match(re.compile(r"\s*(\d*)\s*\,\s*(\d*)\s*")):
                 raise Exception(f"{reg} register range foramt error!")
             for id_num in range(int(mop.group(1)), int(mop.group(2)) + 1):
                 reg_real = reg.replace("<id>", str(id_num))
                 self.expand_id_fmt(reg_dic, data_dic, reg_real, id_num)
         else:
             local_address = reg_dic.get("local_address")
             global_address = reg_dic.get("global_address")
             if reg_dic["32bit/64bit"] == "64bit":
                 local_addr_lst = self.hex_add_num(local_address)
                 global_addr_lst = self.hex_add_num(global_address)
             else:
                 local_addr_lst = [local_address]
                 global_addr_lst = [global_address]
             self.detect_address(reg, "local_address", local_addr_lst)
             self.detect_address(reg, "global_address", global_addr_lst)
             if reg_dic["32bit/64bit"] == "64bit":
                 reg_dic["hlocal_address"] = local_addr_lst[1]
                 reg_dic["hglobal_address"] = global_addr_lst[1]
             data_dic[reg] = reg_dic
     return data_dic
Exemplo n.º 7
0
 def parse_cg_report(self):
     """to parse coverage group report"""
     cg_rpt_file = f"{self.ced['COV_MERGE']}{os.sep}urgReport{os.sep}groups.txt"
     cp_rpt_file = f"{self.ced['COV_MERGE']}{os.sep}urgReport{os.sep}grpinfo.txt"
     if not os.path.isfile(cg_rpt_file):
         raise Exception(f"merged coverage groups report file {cg_rpt_file} is NA")
     cg_score_dic = collections.OrderedDict()
     with open(cg_rpt_file) as rptf:
         for line in rptf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.match(re.compile(r"(\d+\.\d+)\s+\d+$")):
                 self.v_dic["fcp"] = mop.group(1)
             elif mop.match(re.compile(r"(\d+\.\d+)\s+.*\w+::\w+::(\w+)")):
                 cg_score_dic[mop.group(2)] = {
                     "per": mop.group(1), "cp_dic": collections.OrderedDict()}
     if not os.path.isfile(cp_rpt_file):
         LOG.warning("merged coverage points report file %s is NA", cp_rpt_file)
         cp_rpt_con = ""
     else:
         with open(cp_rpt_file) as rptf:
             cp_rpt_con = rptf.read()
     for cg_n, cg_dic in cg_score_dic.items():
         cg_sum_con = re.search(
             rf"Summary for Group\s+(?:\w+::)+{cg_n}(.*?{os.linesep}-{{60}})",
             cp_rpt_con, flags=re.DOTALL).group(1)
         var_con, cro_con = re.search(
             rf"Variables for Group\s+(?:\w+::)+{cg_n}(.*?){os.linesep}"
             rf"Crosses for Group\s+(?:\w+::)+{cg_n}(.*?){os.linesep}-{{60}}",
             cg_sum_con, flags=re.DOTALL).groups()
         for line in var_con.split(os.linesep):
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.match(re.compile(r"(\w+)\s+(?:\d+\s+)+(\d+\.\d+)\s+(?:\d+\s+)+")):
                 cg_dic["cp_dic"][f"{cg_n}::{mop.group(1)}"] = mop.group(2)
         for line in cro_con.split(os.linesep):
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.match(re.compile(r"(\w+)\s+(?:\d+\s+)+(\d+\.\d+)\s+(?:\d+\s+)+")):
                 cg_dic["cp_dic"][f"{cg_n}::{mop.group(1)}(cross)"] = mop.group(2)
     return cg_score_dic
Exemplo n.º 8
0
def gen_post_wv(wv_dtl_rpt, cfg_dic):
    """to generate the post_waiver_rpt"""
    type_lst = []
    sp_ep_lst = []
    for sect in cfg_dic.sections():
        type_lst.extend(pcom.rd_cfg(cfg_dic, sect, "type"))
        sp_ep_lst.extend(pcom.rd_cfg(cfg_dic, sect, "sp|ep"))
    with open(wv_dtl_rpt) as drf:
        mop = pcom.REOpter(drf.read())
    str_pat = re.compile(
        rf"Violations{os.linesep}=+{os.linesep}(.*?){os.linesep}{{3,4}}",
        re.DOTALL)
    wv_lst = mop.group(1).split(f"{os.linesep*2}") if mop.search(
        str_pat) else []
    wv_pat = re.compile(rf"{os.linesep}-+{os.linesep}(.*)", re.DOTALL)
    result_lst = []
    for waiver in wv_lst:
        sop = pcom.REOpter(waiver)
        if sop.search(wv_pat):
            waiver = sop.group(1)
        if type_lst and re.search(
                "|".join(
                    [re.escape(cc).replace(r"\*", ".*")
                     for cc in type_lst]), waiver):
            continue
        for sp_ep in sp_ep_lst:
            if "|" not in sp_ep:
                LOG.warning(f"the format of {sp_ep} is wrong")
                continue
            if re.search(
                    re.escape(sp_ep.split("|")[0]).replace(r"\*", ".*"),
                    waiver.split(os.linesep)[0]) and re.search(
                        re.escape(sp_ep.split("|")[1]).replace(r"\*", ".*"),
                        waiver.split(os.linesep)[1]):
                break
        else:
            result_lst.append(waiver)
    return result_lst
Exemplo n.º 9
0
 def parse_dc_log(self):
     """to parse main dc log"""
     warn_dic = collections.defaultdict(list)
     error_dic = collections.defaultdict(list)
     with open(self.dc_dic["log_file"]) as dlf:
         for line in dlf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if line.startswith("Stack trace"):
                 error_dic["tcrash"].append("stack overflow in tool crash")
                 break
             elif line.startswith("CPU usage"):
                 if mop.match(self.pat_dic["ct_pat"]):
                     self.dc_dic["cpu_usage"] = mop.group(1)
             elif line.startswith("Warning:"):
                 warn_dic = self.proc_dc_warning(warn_dic, dlf, line)
             elif line.startswith("Error:"):
                 if mop.match(self.pat_dic["we_pat"]):
                     ew_info, ew_type = mop.group(2), mop.group(3)
                     error_dic[ew_type].append(ew_info)
                 else:
                     mop = pcom.REOpter(f"{line} {dlf.readline().strip()}")
                     if mop.match(self.pat_dic["we_pat"]):
                         ew_info, ew_type = mop.group(2), mop.group(3)
                         error_dic[ew_type].append(ew_info)
                     else:
                         LOG.info("ERROR: condition not matched in line %s",
                                  line)
     if "cpu_usage" not in self.dc_dic:
         self.dc_dic["cpu_usage"] = "NA"
         self.dc_dic[
             "status"] = "crash" if "tcrash" in error_dic else "running"
     else:
         self.dc_dic["status"] = "finished"
     self.dc_dic["dc_log"]["warning"] = warn_dic
     self.dc_dic["dc_log"]["error"] = error_dic
     self.dc_dic["dc_log"]["log_path"] = self.dc_dic["log_file"]
Exemplo n.º 10
0
 def parse_tm(self):
     """to parse dc timing log"""
     if not os.path.isfile(self.tm_dic["dt_file"]):
         return
     slk_pat = re.compile(r"(.*?)\s*\(.*\)\s*(.*)")
     with open(self.tm_dic["dt_file"]) as dtf:
         blk_flag = False
         for line in dtf:
             line = line.strip()
             if line.startswith("Startpoint:"):
                 blk_flag = True
                 blk_dic = {}
             if not blk_flag:
                 continue
             line_lst = line.split(":")
             if line_lst[0] in ("Startpoint", "Endpoint", "Path Group"):
                 blk_dic[line_lst[0]] = line_lst[1].strip()
             if line.startswith("slack "):
                 blk_flag = False
                 mop = pcom.REOpter(line)
                 if mop.match(slk_pat):
                     if float(mop.group(2)) >= 0:
                         continue
                     bpg = blk_dic["Path Group"]
                     if self.tm_dic["group"] and (
                             not any(fnmatch.fnmatch(bpg, cc) for cc in self.tm_dic["group"])):
                         continue
                     if "in2" in bpg or "2out" in bpg:
                         nl_dic = self.sum_dic["nl_dic"]
                         nl_dic[bpg] = nl_dic[bpg]+1 if bpg in nl_dic else 1
                         self.statistics_sum(
                             (blk_dic["Path Group"], blk_dic["Startpoint"],
                              blk_dic["Endpoint"]), self.sum_dic["sum_nl_dic"], mop.group(2))
                     else:
                         l_dic = self.sum_dic["l_dic"]
                         l_dic[bpg] = l_dic[bpg]+1 if bpg in l_dic else 1
                         spt_lst = blk_dic["Startpoint"].split("/")
                         ept_lst = blk_dic["Endpoint"].split("/")
                         spt = os.path.join(*spt_lst[0:self.tm_dic["level"]])
                         ept = os.path.join(*ept_lst[0:self.tm_dic["level"]])
                         self.statistics_sum(
                             (blk_dic["Path Group"], spt, ept),
                             self.sum_dic["sum_l_dic"], mop.group(2))
                 else:
                     LOG.info("line %s is not matched pattern %s", line, slk_pat)
Exemplo n.º 11
0
 def parse_dut_ana_log(self):
     """to parse dut log in analysis stage"""
     if not os.path.isfile(self.log_dic["da"]):
         return
     dut_ana_error_lst = []
     with open(self.log_dic["da"], errors="replace") as daf:
         for line in daf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.search(self.pat_dic["i_pat"]):
                 continue
             elif mop.search(self.pat_dic["f_pat"]):
                 dut_ana_error_lst.append(line)
     if dut_ana_error_lst:
         self.sc_dic["s"]["dut_ana_status"] = "failed"
         self.sc_dic["s"]["dut_ana_error"] = os.linesep.join(dut_ana_error_lst)[-1000:]
     else:
         self.sc_dic["s"]["dut_ana_status"] = "passed"
     LOG.debug(
         "parsing simv %s dut analysis log file %s done", self.cvsr_tup[1], self.log_dic["da"])
Exemplo n.º 12
0
 def parse_simu_log(self):
     """to parse log in simulation stage"""
     if not os.path.isfile(self.log_dic["s"]):
         return
     simu_error_lst = []
     with open(self.log_dic["s"], errors="replace") as slf:
         fin_flg = False
         uvm_flg = False
         pass_flg = False
         for line in slf:
             line = line.strip()
             mop = pcom.REOpter(line)
             if mop.search(self.pat_dic["i_pat"]):
                 continue
             elif mop.search(self.pat_dic["f_pat"]):
                 simu_error_lst.append(line)
             elif mop.match(self.pat_dic["fin_pat"]):
                 fin_flg = True
                 self.sc_dic["c"]["simu_time"] = mop.group(1)
             elif mop.match(self.pat_dic["ct_pat"]):
                 self.sc_dic["c"]["simu_cpu_time"] = mop.group(1)
             elif mop.search(self.pat_dic["uvm_pat"]):
                 uvm_flg = True
             elif self.pat_dic["p_pat"].pattern and mop.search(self.pat_dic["p_pat"]):
                 pass_flg = True
     if simu_error_lst:
         self.sc_dic["c"]["simu_status"] = "failed"
         self.sc_dic["c"]["simu_error"] = os.linesep.join(simu_error_lst)[-1000:]
     elif not fin_flg:
         self.sc_dic["c"]["simu_status"] = "pending"
     elif uvm_flg or pass_flg:
         self.sc_dic["c"]["simu_status"] = "passed"
     else:
         self.sc_dic["c"]["simu_status"] = "unknown"
     LOG.debug(
         "parsing case %s simulation log file %s done", self.cvsr_tup[0], self.log_dic["s"])