def scan_report(self, rpt_file): self.data = ["NA"] * len(self.title) if not_exists(rpt_file, "report file for scanning Altera fmax"): return sdc_file = re.sub("\.sta\.rpt", ".sdc", rpt_file) if not_exists(sdc_file): sdc_file = os.path.join(os.path.dirname(sdc_file), "..", os.path.basename(sdc_file)) if not_exists(sdc_file, "SDC file"): return 1 target_fmax = self.get_target_fmax(sdc_file) fmax, clk_name = self.get_fmax(rpt_file) self.data = [target_fmax, "%.2f" % fmax, clk_name]
def create_conf_options(self): _conf = self.qa_options.get("conf") if not_exists(_conf): # unix cannot find </trunk/bin/runLattice.py/../../conf> path #_scripts_path = os.path.abspath(sys.argv[0]) #_conf = os.path.join(os.path.dirname(_scripts_path), "..", _conf) _conf = os.path.join(os.path.dirname(__file__), "..",'..', _conf) if not_exists(_conf, "Configuration path"): return 1 _conf = os.path.abspath(_conf) self.qa_options["conf"] = _conf conf_files = glob.glob(os.path.join(_conf, "*.conf")) self.conf_options = get_conf_options(conf_files)
def get_real_top_dir(self): if self.top_dir: if not_exists(self.top_dir, "Base top source path"): return 1 self.real_top_dir = os.path.abspath(self.top_dir) else: self.real_top_dir = self.job_dir
def parse_file(a_file, pattern, at_once=False): """ parse a file with user regular expression pattern """ if not_exists(a_file, "source file"): return if type(pattern) is dict: data = dict() for line in open(a_file): line = line.strip() for key, p in pattern.items(): if type(p) is list: for sub_p in p: sub_m = sub_p.search(line) if sub_m: data[key] = sub_m.group(1) if at_once: return data break else: m = p.search(line) if m: data[key] = m.group(1) if at_once: return data break return data else: for line in open(a_file): m = pattern.search(line) if m: return m.group(1)
def set_lattice_environment(diamond, rtf, os_name, env_sep, dry_run): if diamond: diamond = os.path.abspath(diamond) foundry = os.path.join(diamond, "ispfpga") sv_cpld_bin = os.path.join(diamond, "bin", os_name) sv_fpga_bin = os.path.join(foundry, "bin", os_name) tcl_library = os.path.join(diamond, "tcltk", "lib", "tcl8.5") elif rtf: rtf = os.path.abspath(rtf) foundry = rtf sv_cpld_bin = os.path.join(rtf, "bin", os_name) sv_fpga_bin = sv_cpld_bin tcl_library = "" else: print_error("Neither Diamond nor RTF specified !!") return 1 os.environ["FOUNDRY"] = foundry t_value = sv_cpld_bin + env_sep + sv_fpga_bin os.environ["PATH"] = t_value + env_sep + os.getenv("PATH", "") os.environ["LD_LIBRARY_PATH"] = t_value os.environ["QACPLDBIN"] = sv_cpld_bin os.environ["QAFPGABIN"] = sv_fpga_bin if tcl_library: os.environ["TCL_LIBRARY"] = tcl_library if dry_run: return for a_path in (foundry, sv_cpld_bin, sv_fpga_bin, tcl_library): if a_path: if not_exists(a_path, "Diamond Environment Path"): return 1
def set_xilinx_envs(self): ise = self.get_real_setting("ise") if not self.dry_run: if not_exists(ise, "Xilinx ISE path"): return 1 if self.x64: os.environ["QAISEBIN"] = os.path.join(ise, "bin", "nt64") else: os.environ["QAISEBIN"] = os.path.join(ise, "bin", "nt")
def scan_report_temp(self, rpt_file): self.data = ["NA"] * len(self.title) if not_exists(rpt_file, "report file for scanning Altera fmax"): return sdc_file = re.sub("\.sta\.rpt", ".sdc", rpt_file) seed = os.path.basename(os.path.dirname(sdc_file)) if not_exists(sdc_file): sdc_file = os.path.join(os.path.dirname(sdc_file), "..", os.path.basename(sdc_file)) if not_exists(sdc_file, "SDC file"): return 1 target_fmax = self.get_target_clk_fmax(sdc_file) print target_fmax fmax_clk_name = self.get_fmax_temp(rpt_file) #print '-----------------------------' #print target_fmax #print fmax, clk_name #raw_input() pap = 1000000000 pap_clk = '' pap_target_fmax = '' pap_fmax = -1 print fmax_clk_name #raw_input() for clk_name, fmax in fmax_clk_name.items(): if clk_name in target_fmax.keys(): pap1 = (fmax / target_fmax[clk_name]) * 100 pap1 = "%.2f" % pap1 print pap1 print '=================================' if float(pap1) < pap: print 'GGGGGGGGGGGGGGGGGGGGG', pap1, pap pap = float(pap1) pap_target_fmax = target_fmax[clk_name] pap_clk = clk_name pap_fmax = fmax pap = str(pap) + '%' self.data = [seed, pap, str(pap_target_fmax), str(pap_fmax), pap_clk] print self.data #raw_input() #self.data = [seed,pap,"%.2f"%target_fmax[clk_name], "%.2f" % fmax, clk_name] '''
def set_altera_envs(self): quartus = self.get_real_setting("quartus") if not self.dry_run: if not_exists(quartus, "Quartus Path"): return 1 os.environ["QUARTUS_ROOTDIR"] = quartus if self.x64: bin_name = "bin64" else: bin_name = "bin" os.environ["QAQUARTUSBIN"] = os.path.join(quartus, bin_name)
def get_target_fmax(self): ucf_file = get_fname_ext(self.rpt_file)[0] + ".ucf" if not_exists(ucf_file, "UCF file"): self.target_fmax = "-" else: for line in open(ucf_file): m_tf = self.p_tf.search(line) if m_tf: self.target_fmax = m_tf.group(1) break else: self.target_fmax = "NA"
def get_clocks_from_sta_summary(sta_summary): if not_exists(sta_summary, "sta report summary file"): return clocks = set() for line in open(sta_summary): line = line.strip() m_clock = p_clock.search(line) if m_clock: clocks.add(m_clock.group(1)) clock_list = list(clocks) clock_list.sort(key=str.lower) return clock_list
def get_target_fmax_temp(self): ucf_file = get_fname_ext(self.rpt_file)[0] + ".ucf" self.target_fmax_temp = {} #PERIOD "alg_8clk_c" 136.00 MHz HIGH 50%; self.p_tf_temp = re.compile(r'PERIOD "(\S+)"\s+([\d\.]+)\s+MHz') if not_exists(ucf_file, "UCF file"): self.target_fmax = "-" else: for line in open(ucf_file): m_tf = self.p_tf_temp.search(line) if m_tf: self.target_fmax_temp[m_tf.group(1)] = m_tf.group(2)
def update_frequency(prf_file, fixed_number): if not_exists(prf_file, "elder prf file"): return 1 prf_file_bak = prf_file + ".b" wrap_copy_file(prf_file, prf_file_bak, force=True) prf_lines = open(prf_file_bak) prf_ob = open(prf_file, "w") for line in prf_lines: line = line.strip() if p_frequency.search(line): line = p_mhz.sub("%s MHz" % fixed_number, line) print >> prf_ob, line prf_ob.close()
def initialize(self): if not_exists(self.xml_file, "Diamond Devfile"): return 1 xml_parser = ElementTree.parse(self.xml_file) family_table = xml_parser.findall("/Family") for item in family_table: family_name = item.get("name") t_family = dict(family=family_name) for part_table in item.getchildren(): part_name = part_table.get("name") part_dict = dict() part_dict.update(t_family) part_dict.update(part_table.attrib) ori_opt = part_dict.get("opt") if ori_opt: # mush have opt item part_dict["opt"] = self.opt_dict.get(ori_opt) self.devkit_dict[part_name] = part_dict
def set_iCEcube_environment(ice_cube, dry_run): os.environ["QAICEBIN"] = os.path.join(ice_cube, "sbt_backend", "bin", "win32", "opt") os.environ["QAICELIB"] = os.path.join(ice_cube, "sbt_backend") os.environ["QAICESYN"] = os.path.join(ice_cube, "sbt_backend", "bin", "win32", "opt") os.environ["SYNPLIFY_PATH"] = os.path.join(ice_cube, "synpbase") # set SYNPLIFY_PATH=D:\suzhilong\iCEcube2.2013.03\SBTools\synpbase # "D:\suzhilong\iCEcube2.2013.03\SBTools\sbt_backend\bin\win32\opt\synpwrap\synpwrap.exe" LSE_foundry = os.path.join(ice_cube, "LSE") os.environ["QAICELSE"] = os.path.join(LSE_foundry, "bin", "nt") os.environ["FOUNDRY"] = LSE_foundry if dry_run: return for key in ("QAICEBIN", "QAICELIB", "QAICESYN"): if not_exists(os.getenv(key), "iCEcube Environment Path"): return 1
def scan_report(self, rpt_file, hot_clk=""): self.all_raw_data = list() if not_exists(rpt_file, "report file"): return 1 self.rpt_file = rpt_file self.get_target_fmax() self.all_raw_data = list() start = 0 t_dict = dict() for line in open(rpt_file): line = line.strip() if not start: if self.p_clock_start.search(line): start = 1 continue m_clock = self.p_clock.search(line) m_min_period = self.p_min_period.search(line) m_logic_route = self.p_logic_route.search(line) m_level = self.p_level.search(line) if m_clock: _clk_name = m_clock.group(1) if hot_clk: if _clk_name != hot_clk: t_dict = dict() start = 0 continue t_dict["clkName"] = m_clock.group(1) elif m_min_period: _ns = m_min_period.group(1) _mhz = eval("1000.0/%s" % _ns) t_dict["fmax"] = "%.3f" % _mhz t_dict["targetFmax"] = self.target_fmax elif m_level: t_dict["level"] = m_level.group(1) elif m_logic_route: t_dict["logic"] = m_logic_route.group("logic") t_dict["route"] = m_logic_route.group("route") start = 0 self.all_raw_data.append(t_dict) t_dict = dict()
def get_clocks_from_map_twr(map_twr): if not_exists(map_twr, "_map.twr file"): return clocks = set() my_line = "" for line in open(map_twr): line = line.strip() if p_clock.search(line): my_line = line if my_line: if not line: # can get clock name my_line = shlex.split(my_line) clocks.add(my_line[-1]) my_line = "" else: line = " " + line my_line += line clock_list = list(clocks) clock_list.sort(key=str.lower) return clock_list
def get_clocks_from_mrp_file(mrp_file): """ Support more line type. .... 305. Net aaa.clk200_c: 17504 loads, 17358 rising, 146 falling (Driver: 306. aaa/jua_clock_ctrl/dcm200/inst/PLLInst_0 ) 307. Net aaa/jua_core/juana_mem_pool_ctrl/juana_ddr_ctrl/ddr_int_1/ddr_mem_8/ins 308. t/clk_op: 11 loads, 11 rising, 0 falling (Driver: aaa/jua_core/juana_mem_poo 309. l_ctrl/juana_ddr_ctrl/ddr_int_1/ddr_mem_8/inst/Inst1_PLL ) .... """ if not_exists(mrp_file, "Lattice Mrp file"): return p_clock_number = re.compile("Number\s+of\s+clocks:\s+(\d+)") p_clock = re.compile("\s+Net\s+([^:]+):") p_stop = re.compile("Number of Clock Enables") clock_number = 0 clocks_lines = list() for line in open(mrp_file): line = line.rstrip() if not clock_number: mcn = p_clock_number.search(line) if mcn: clock_number = int(mcn.group(1)) continue if p_stop.search(line): break clocks_lines.append(line) clocks_lines = "".join(clocks_lines) clocks = list() m_clock = p_clock.findall(clocks_lines) for item in m_clock: clocks.append(re.sub("\s+", "", item)) if len(clocks) != clock_number: print_error("Not found %d clocks in %s" % (clock_number, mrp_file)) return return clocks
def get_clocks_from_mrp_file(mrp_file): if not_exists(mrp_file, "Lattice Mrp file"): return p_clock_number = re.compile("^Number\s+of\s+clocks:\s+(\d+)") p_clock = re.compile("^Net\s+([^:]+):") p_stop = re.compile("Number of Clock Enables") clock_number = 0 clocks = list() for line in open(mrp_file): line = line.strip() if not clock_number: mcn = p_clock_number.search(line) if mcn: clock_number = int(mcn.group(1)) continue if p_stop.search(line): break mc = p_clock.search(line) if mc: clocks.append(mc.group(1)) if len(clocks) != clock_number: print_error("Not found %d clocks in %s" % (clock_number, mrp_file)) return return clocks
def scan_report(): opt = option() scanner = (tool_scan_altera_temp.ScanAlteraFit(), tool_scan_altera_temp.ScanAlteraFmax()) job_dir = opt.job_dir #design = opt.design #syn = opt.synthesis #family = opt.family report_name = opt.report_name #this is useless now for_pattern='' #pap = opt.pap scan_fit = tool_scan_altera_temp.ScanAlteraFit() scan_fmax = tool_scan_altera_temp.ScanAlteraFmax() scan_time_mem = tool_scan_altera_temp.ScanTimeMem() all_designs = [] if os.path.isdir(job_dir): pass else: print 'The job_dir:%s is not a directory'%job_dir return for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir,dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass report_file_list = [] if 0: pass else: root_dir = os.getcwd() for design in all_designs: print_always( 'scanning %s'%design) design_path = os.path.join(job_dir,design) for dir in os.listdir(design_path): #at here dir should be as "_cyclone4_syn" dir_scan = os.path.join(design_path,dir) if os.path.isdir(dir_scan): pass else: continue if 1: ##################### scanner = (scan_fit, scan_fmax,scan_time_mem) report_file = os.path.join(job_dir,dir+'.csv') report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name,subtex = os.path.splitext(report_file) report_file_fit = report_name+'_fit'+subtex if not_exists(report_file_fit): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_fit, ",".join(title)) report_file_sta = report_name+'_sta'+subtex if not_exists(report_file_sta): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_sta, ",".join(title)) report_file_time_mem = report_name+'_time_mem'+subtex if not_exists(report_file_time_mem): title = ["Design"] title += (scanner[2]).get_title() append_file(report_file_time_mem, ",".join(title)) ###################### recover = RecoverPath(dir_scan) fit_file = sta_file = map_file = srr_file = "" if 'rev_1' in os.listdir('.'): scan_time_mem.reset() foo = os.path.join(dir_scan,'rev_1') srr_file = get_unique_file(foo+'/'+'*.srr') if srr_file: scan_time_mem.scan_srr(srr_file) for foo in os.listdir("."): data = [] if os.path.isdir(foo): if re.search("Target", foo): scan_fit.reset() scan_fmax.reset() scan_time_mem.set_fit_data() scan_time_mem.set_map_data() print_always(" Scanning %s" % foo) fit_file = get_unique_file(foo+'/'+"*.fit.rpt") sta_file = get_unique_file(foo+'/'+"*.sta.rpt") map_file = get_unique_file(foo+'/'+'*.map.rpt') if fit_file: scan_fit.scan_report(fit_file) scan_time_mem.scan_fit(fit_file) data1=[design]+scan_fit.get_data() append_file(report_file_fit, ",".join(data1)) data = data + data1 if sta_file: scan_fmax.scan_report_temp(sta_file) data2=[design]+scan_fmax.get_data() append_file(report_file_sta, ",".join(data2)) data = data + data2[1:] if map_file: scan_time_mem.scan_map(map_file) scan_t_m = scan_time_mem.get_data() append_file(report_file_time_mem, ",".join([design]+scan_t_m)) data = data + scan_t_m append_file(report_file, ",".join(data)) recover.run() os.chdir(root_dir) for f in report_file_list: if os.path.isfile(f): print '#######################################' file_sorted,note,design_fmax = report_sort_temp.sort_csv(f,col_key='PAP',add_average=1) report_sort_temp.write_note(file_sorted,note)
def scan_report(): opt = option() job_dir = opt.job_dir design = opt.design syn = opt.synthesis #syn = '' family = opt.family report_name = opt.report_name #this is useless now for_pattern='' pap = opt.pap scan_mrp = tool_scan_lattice.ScanLatticeMrp() scan_twr = tool_scan_lattice.ScanLatticeTwr(pap) scan_time = tool_scan_lattice.ScanLatticeTime() if family: for_pattern = '_'+family if syn: for_pattern = for_pattern + '_'+syn dir_pattern = re.compile(for_pattern) all_designs = [] if os.path.isdir(job_dir): pass else: print 'The job_dir:%s is not a directory'%job_dir return for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir,dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass report_file_list = [] if design: print_always( 'scanning %s'%design) if design in all_designs: design_path = os.path.join(job_dir,design) for dir in os.listdir(design_path): if dir_pattern.search(dir) and os.path.isdir(os.path.join(design_path,dir)) and dir.startswith('_'): ##################### scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(opt.job_dir,dir+'.csv') report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name,subtex = os.path.splitext(report_file) report_file_mrp = report_name+'_mrp'+subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name+'_twr'+subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name+'_time'+subtex if not_exists(report_file_time): title = ["Design"] +['Target_fmax'] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) report_file_clock = report_name+'_clock'+subtex if not_exists(report_file_clock): append_file(report_file_clock, ",".join(['Design','Colck','Loads'])) ###################### dir_scan = os.path.join(design_path,dir) recover = RecoverPath(dir_scan) if not_exists('rev_1', "srr file directory"): continue srr_file = get_unique_file([os.path.join(design_path,dir,'rev_1'), ".srr"]) if srr_file: srr_file = os.path.join(design_path,dir,'rev_1',srr_file) scan_time.scan_srr(srr_file) _project_name = "%s_%s" % (dir, design[:7]) project_name = _project_name.strip("_") mrp_file = project_name + ".mrp" if not_exists(mrp_file, "map report file"): continue scan_mrp.scan_report(mrp_file) scan_time.scan_mrp(mrp_file) twr_file = time_file = par_file= "" target_fmax_for_time = '_' target_fmax_re = re.compile(r"Target_Fmax_is_(.+)MHz") for foo in os.listdir("."): if os.path.isdir(foo): if re.search("Target", foo): print_always(" Scanning %s" % foo) target_fmax_for_match = target_fmax_re.search(foo) if target_fmax_for_match: target_fmax_for_time = target_fmax_for_match.group(1) twr_file = get_unique_file([foo, ".twr"]) time_file = os.path.join(foo, time_file) par_file = get_unique_file([foo, ".par"]) elif re.search("\.dir$", foo): twr_file = get_unique_file([foo, ".twr"]) par_file = get_unique_file([foo, ".par"]) time_file = time_file else: continue if twr_file: scan_twr.scan_report(twr_file) scan_time.scan_report(time_file) scan_time.scan_par(par_file) ######################### #time_title = ['design']+scan_time.get_title2() srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) ######################### if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data_list = ['_']*len(scan_mrp.get_data()) +['_']*len(scan_twr.get_data()) + ['_']*len(data_list) else: data_list = scan_mrp.get_data() + \ scan_twr.get_data() + data_list data = [design] + data_list append_file(report_file, ",".join(data)) data = [design] append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data() )) append_file(report_file_twr,",".join([design]+ scan_twr.get_data() )) #append_file(report_file_time,",".join([design]+ scan_time.get_data())) if not twr_file: srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data = [design] + ['_']*len(scan_mrp.get_data())+ ['_']*len(data_list) else: data = [design] + scan_mrp.get_data()+ data_list append_file(report_file, ",".join(data)) append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data())) scan_mrp.scan_clocks(mrp_file) clock_dict = scan_mrp.get_parse_line_clocks() for key in clock_dict.keys(): line = design+','+key+','+clock_dict[key] append_file(report_file_clock,line) else: print 'The design is not exists in the job_dir' return else: root_dir = os.getcwd() for design in all_designs: print_always( 'scanning %s'%design) design_path = os.path.join(job_dir,design) for dir in os.listdir(design_path): if dir_pattern.search(dir) and os.path.isdir(os.path.join(design_path,dir)) and dir.startswith('_'): ##################### scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(opt.job_dir,dir+'.csv') if report_file in report_file_list: pass else: report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name,subtex = os.path.splitext(report_file) report_file_mrp = report_name+'_mrp'+subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name+'_twr'+subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name+'_time'+subtex if not_exists(report_file_time): title = ["Design"] +['Target_fmax'] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) report_file_clock = report_name+'_clock'+subtex append_file(report_file_clock, ",".join(['Design','Colck','Loads'])) ###################### dir_scan = os.path.join(design_path,dir) recover = RecoverPath(dir_scan) if not_exists('rev_1', "srr file directory"): continue srr_file = get_unique_file([os.path.join(design_path,dir,'rev_1'), ".srr"]) if not srr_file: pass else: srr_file = os.path.join(design_path,dir,'rev_1',srr_file) scan_time.scan_srr(srr_file) _project_name = "%s_%s" % (dir, design[:7]) project_name = _project_name.strip("_") mrp_file = project_name + ".mrp" if not_exists(mrp_file, "map report file"): continue scan_mrp.scan_report(mrp_file) scan_time.scan_mrp(mrp_file) twr_file = time_file = par_file= "" target_fmax_for_time = '_' target_fmax_re = re.compile(r"Target_Fmax_is_(.+)MHz") for foo in os.listdir("."): if os.path.isdir(foo): if re.search("Target", foo): scan_time.reset_par_time_data() print_always(" Scanning %s" % foo) target_fmax_for_match = target_fmax_re.search(foo) if target_fmax_for_match: target_fmax_for_time = target_fmax_for_match.group(1) twr_file = get_unique_file([foo, ".twr"]) time_file = os.path.join(foo, time_file) par_file = get_unique_file([foo, ".par"]) elif re.search("\.dir$", foo): twr_file = get_unique_file([foo, ".twr"]) par_file = get_unique_file([foo, ".par"]) time_file = time_file else: continue if twr_file: scan_twr.scan_report(twr_file) scan_time.scan_report(time_file) scan_time.scan_par(par_file) ######################### #time_title = ['design']+scan_time.get_title2() srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) ######################### #data = [design] + scan_mrp.get_data() + \ # scan_twr.get_data() + data_list if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data_list = ['_']*len(scan_mrp.get_data()) +['_']*len(scan_twr.get_data()) +\ ['_']*len(data_list) else: data_list = scan_mrp.get_data() + \ scan_twr.get_data() + data_list data = [design] + data_list append_file(report_file, ",".join(data)) data = [design] append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data() )) append_file(report_file_twr,",".join([design]+ scan_twr.get_data() )) #append_file(report_file_time,",".join([design]+ scan_time.get_data())) scan_time.reset_par_time_data() if not twr_file: srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + \ real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) #data = [design] + scan_mrp.get_data()+ data_list if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data = [design] + ['_']*len(scan_mrp.get_data())+ ['_']*len(data_list) else: data = [design] + scan_mrp.get_data()+ data_list append_file(report_file, ",".join(data)) append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data())) scan_time.reset_par_time_data() scan_mrp.scan_clocks(mrp_file) clock_dict = scan_mrp.get_parse_line_clocks() for key in clock_dict.keys(): line = design+','+key+','+clock_dict[key] append_file(report_file_clock,line) recover.run() os.chdir(root_dir) for f in report_file_list: if os.path.isfile(f): file_sorted,note,design_fmax = report_sort.sort_csv(f) report_sort.write_note(file_sorted,note) #----------------update run_stand-------------------# pass_log = glob.glob('*'+syn+'_pass_case.log') if pass_log: pass_log = pass_log[0] else: pass_log = '__' if os.path.isfile(pass_log): file_hand = file(pass_log,'r') lines = file_hand.readlines() file_hand.close() stand_name = glob.glob('*'+syn+'_run_standard.bat') if stand_name: stand_name = stand_name[0] #stand_name = os.path.join(top_base,top_base2+'_'+syn+'_run_standard.bat') else: stand_name = '' run_standard = file(stand_name,'r') run_standard_lines = run_standard.readlines() run_standard.close() useful_lines = [] for case in lines: case = case.strip() if not case: continue else: pass case_tab = '--design='+case case_tab_re = re.compile(case_tab+r'(\s+|$)') for line in run_standard_lines: line = line.strip() if not line: continue if case_tab_re.search(line): try: fmax = float( design_fmax[case.strip()] ) fmax = str( int( fmax )) #line = case_tab_re.sub('',line) line = re.sub(r'--fmax-sweep=[\s\d]+\d','',line) line2 = line + ' --fmax-sweep='+fmax+' '+fmax+' '+'10 \n' except: line2 = line useful_lines.append(line2) run_standard = file(stand_name,'w') run_standard.writelines(useful_lines) run_standard.close()
def scan_report(self): scan_mrp = tool_scan_lattice.ScanLatticeMrp() scan_twr = tool_scan_lattice.ScanLatticeTwr(self.pap) scan_time = tool_scan_lattice.ScanLatticeTime() if self.family: for_pattern = '_'+family if self.synthesis: for_pattern = for_pattern + '_'+syn dir_pattern = re.compile(for_pattern) if os.path.isdir(self.job_dir): pass else: print 'Error: The job_dir:%s is not a directory'%self.job_dir return report_file_list = [] all_designs = [] if not self.design: for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir,dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass else: dir = os.path.join(job_dir,self.design) if os.path.isdir(dir): all_designs.append(dir) else: print 'Error: Can not find case:%s'%self.design root_dir = os.getcwd() for design in all_designs: print_always( 'scanning %s'%design) design_path = os.path.join(job_dir,design) for dir in os.listdir(design_path): if dir_pattern.search(dir) and os.path.isdir(os.path.join(design_path,dir)) and dir.startswith('_'): ##################### scanner = (scan_mrp, scan_twr, scan_time) report_file = self.report_file if report_file in report_file_list: pass else: report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name,subtex = os.path.splitext(report_file) report_file_mrp = report_name+'_mrp'+subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name+'_twr'+subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name+'_time'+subtex if not_exists(report_file_time): title = ["Design"] +['Target_fmax'] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) report_file_clock = report_name+'_clock'+subtex if not_exists(report_file_clock): append_file(report_file_clock, ",".join(['Design','Colck','Loads'])) dir_scan = os.path.join(design_path,dir) recover = RecoverPath(dir_scan) if not_exists('rev_1', "srr file directory"): continue srr_file = get_unique_file([os.path.join(design_path,dir,'rev_1'), ".srr"]) if not srr_file: pass else: srr_file = os.path.join(design_path,dir,'rev_1',srr_file) scan_time.scan_srr(srr_file) _project_name = "%s_%s" % (dir, design[:7]) project_name = _project_name.strip("_") mrp_file = project_name + ".mrp" if not_exists(mrp_file, "map report file"): continue scan_mrp.scan_report(mrp_file) scan_time.scan_mrp(mrp_file) twr_file = time_file = par_file= "" target_fmax_for_time = '_' target_fmax_re = re.compile(r"Target_Fmax_is_(.+)MHz") for foo in os.listdir("."): if os.path.isdir(foo): if re.search("Target", foo): scan_time.reset_par_time_data() print_always(" Scanning %s" % foo) target_fmax_for_match = target_fmax_re.search(foo) if target_fmax_for_match: target_fmax_for_time = target_fmax_for_match.group(1) twr_file = get_unique_file([foo, ".twr"]) time_file = os.path.join(foo, time_file) par_file = get_unique_file([foo, ".par"]) elif re.search("\.dir$", foo): twr_file = get_unique_file([foo, ".twr"]) par_file = get_unique_file([foo, ".par"]) time_file = time_file else: continue if twr_file: scan_twr.scan_report(twr_file) scan_time.scan_report(time_file) scan_time.scan_par(par_file) ######################### #time_title = ['design']+scan_time.get_title2() srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) ######################### #data = [design] + scan_mrp.get_data() + \ # scan_twr.get_data() + data_list if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data_list = ['_']*len(scan_mrp.get_data()) +['_']*len(scan_twr.get_data()) +\ ['_']*len(data_list) else: data_list = scan_mrp.get_data() + \ scan_twr.get_data() + data_list data = [design] + data_list append_file(report_file, ",".join(data)) data = [design] append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data() )) append_file(report_file_twr,",".join([design]+ scan_twr.get_data() )) #append_file(report_file_time,",".join([design]+ scan_time.get_data())) scan_time.reset_par_time_data() if not twr_file: srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + \ real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) #data = [design] + scan_mrp.get_data()+ data_list if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data = [design] + ['_']*len(scan_mrp.get_data())+ ['_']*len(data_list) else: data = [design] + scan_mrp.get_data()+ data_list append_file(report_file, ",".join(data)) append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data())) scan_time.reset_par_time_data() scan_mrp.scan_clocks(mrp_file) clock_dict = scan_mrp.get_parse_line_clocks() for key in clock_dict.keys(): line = design+','+key+','+clock_dict[key] append_file(report_file_clock,line) recover.run() os.chdir(root_dir)
def set_synplify_envs(self): synplify = self.get_real_setting("synplify") os.environ["QASYNPLIFYBIN"] = synplify if not self.dry_run: return not_exists(synplify, "SynplifyPro Install path")
def scan_report(): opt = option() job_dir = opt.job_dir need_design = opt.design syn = opt.synthesis #syn = '' family = opt.family report_name = opt.report_name #this is useless now for_pattern='' pap = opt.pap scan_mrp = tool_scan_lattice.ScanLatticeMrp() scan_twr = tool_scan_lattice.ScanLatticeTwr(pap) scan_time = tool_scan_lattice.ScanLatticeTime() target_fmax_re = re.compile(r"Target_Seed_is_(.+)") if family: for_pattern = '_'+family if syn: for_pattern = for_pattern + '_'+syn dir_pattern = re.compile(for_pattern) all_designs = [] if os.path.isdir(job_dir): pass else: print 'The job_dir:%s is not a directory'%job_dir return for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir,dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass report_file_list = [] if 1: root_dir = os.getcwd() dir = os.path.basename(opt.job_dir) if 1: scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(opt.job_dir,dir+'.csv') if report_file in report_file_list: pass else: report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name,subtex = os.path.splitext(report_file) report_file_mrp = report_name+'_mrp'+subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name+'_twr'+subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name+'_time'+subtex if not_exists(report_file_time): title = ["Design"] +['Target_seed'] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) report_file_clock = report_name+'_clock'+subtex append_file(report_file_clock, ",".join(['Design','Colck','Loads'])) for design in all_designs: if need_design: if design == need_design: pass else: continue print_always( 'scanning %s'%design) design_path = os.path.join(job_dir,design)# e60_ecp3/g64 srr_file = '' for dir in os.listdir(design_path): # dir:Target_Fmax_is_060MHz dir_scan = os.path.join(design_path,dir) if (not re.search("Target", dir)) and os.path.isdir(dir_scan) and (not srr_file): srr_file = get_unique_file([dir_scan, ".srr"]) if re.search("Target", dir) and os.path.isdir(dir_scan): pass else: continue recover = RecoverPath(dir_scan) used_dir = '' for f_d in os.listdir(dir_scan): f_d_full = os.path.join(dir_scan,f_d) if os.path.isdir(f_d_full): if used_dir: print 'Worning: There are two implementation in the design' used_dir = f_d_full if not srr_file: pass else: srr_file = os.path.join(design_path,dir,srr_file) scan_time.scan_srr(srr_file) mrp_file = get_unique_file([used_dir, ".mrp"]) if not_exists(mrp_file, "map report file"): continue scan_mrp.scan_report(mrp_file) scan_time.scan_mrp(mrp_file) twr_file = time_file = par_file= "" target_fmax_for_time = '_' #------------------------------------------------# if 1: useful_dir = used_dir base_name = os.path.basename(useful_dir) scan_time.reset_par_time_data() target_fmax_for_match = target_fmax_re.search(dir) if target_fmax_for_match: target_fmax_for_time = target_fmax_for_match.group(1) twr_p = os.path.join(useful_dir,'*'+base_name+".twr") twr_file = get_unique_file(twr_p) if not twr_file: twr_file = get_unique_file([useful_dir, ".twr"]) time_file = os.path.join(useful_dir, time_file) par_file = get_unique_file([useful_dir, ".par"]) if twr_file: scan_twr.scan_report(twr_file) scan_time.scan_report(time_file) scan_time.scan_par(par_file) ######################### #time_title = ['design']+scan_time.get_title2() srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) ######################### #data = [design] + scan_mrp.get_data() + \ # scan_twr.get_data() + data_list if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data_list = ['_']*len(scan_mrp.get_data()) +['_']*len(scan_twr.get_data()) +\ ['_']*len(data_list) else: data_list = scan_mrp.get_data() + \ scan_twr.get_data() + data_list data = [design] + data_list append_file(report_file, ",".join(data)) data = [design] append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data() )) append_file(report_file_twr,",".join([design]+ scan_twr.get_data() )) #append_file(report_file_time,",".join([design]+ scan_time.get_data())) scan_time.reset_par_time_data() if not twr_file: srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + \ real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key,'NA') data_list.append(value) data_list2 = [design]+[target_fmax_for_time] +data_list append_file(report_file_time,",".join(data_list2)) #data = [design] + scan_mrp.get_data()+ data_list if all_time_data['Complete'] == 'NA' or all_time_data['Par_Done'] == 'NA': data = [design] + ['_']*len(scan_mrp.get_data())+ ['_']*len(data_list) else: data = [design] + scan_mrp.get_data()+ data_list append_file(report_file, ",".join(data)) append_file(report_file_mrp,",".join([design]+ scan_mrp.get_data())) scan_time.reset_par_time_data() scan_mrp.scan_clocks(mrp_file) clock_dict = scan_mrp.get_parse_line_clocks() for key in clock_dict.keys(): line = design+','+key+','+clock_dict[key] append_file(report_file_clock,line) recover.run() os.chdir(root_dir)
def scan_report(self, rpt_file): IoRpt = glob.glob(os.path.join(os.path.dirname(rpt_file),'*io_placed.rpt')) if IoRpt: IoRpt = IoRpt[0] target_fmax_dir = os.path.basename(os.path.dirname(IoRpt)) #Target_Fmax_is_060.00MHz target_pattern = re.compile('Target_Fmax_is_(\d+)') if target_pattern.search(target_fmax_dir): self.data[3] = str( int( (target_pattern.search(target_fmax_dir)).group(1) ) ) file_hand = file(IoRpt) line = file_hand.readline() begin_flag = 0 patten = re.compile('\|\s+(\d+)') while line: ''' +---------------+ | Total User IO | +---------------+ | 163 | +---------------+ ''' if line.find('Total User IO')!= -1: begin_flag = 1 line = file_hand.readline() elif begin_flag != 0: begin_flag += 1 if begin_flag == 3: patten_search = patten.search(line) if patten_search: self.data[0] = patten_search.group(1) break else: line = file_hand.readline() else: line = file_hand.readline() ''' Begin to get the fmax ''' if not_exists(rpt_file, "report file"): return 1 file_hand = file(rpt_file) line = file_hand.readline() begin_flag = 0 search_character = re.compile('[a-z,A-Z]') temp_fmax = 10000 temp_clock = '' temp_period = '' period_flag = 0 index = 1 while line: line = line.strip() ############################ At here, we need to find the period ################## #Clock Waveform(ns) Period(ns) Frequency(MHz) #----- ------------ ---------- -------------- #mclkin_c {0.000 1.724} 3.448 290.023 #print line line_temp = re.sub("\s","",line.strip()) if line_temp == "ClockWaveform(ns)Period(ns)Frequency(MHz)" : period_flag = 1 line = file_hand.readline() continue elif period_flag == 1: period_flag += 1 line = file_hand.readline() continue elif period_flag == 2: try: line_use = line.split("}")[1] line_use = line_use.strip() temp_period = float(line_use.split()[0]) except: pass period_flag += 1 line = file_hand.readline() continue else: pass #print period_flag #raw_input() if line.find('| Intra Clock Table') != -1: begin_flag = 1 elif line.startswith('Clock '): begin_flag += 1 index = line.index("WNS(ns)")+7 elif begin_flag >= 1: begin_flag += 1 if begin_flag >5 and line: if search_character.search(line): line_list = (line[0:index]).strip().split() if len(line_list)>=2: temp = line_list[1] try: temp = float(temp) temp = temp_period - temp temp = 1000/temp temp = "%.3f"%temp temp = float(temp) if temp < temp_fmax: temp_fmax = temp temp_clock = line_list[0] except: pass if begin_flag >5 and (not line): break elif begin_flag and not line: break line = file_hand.readline() if temp_fmax: self.data[1] = str(temp_fmax) self.data[2] = temp_clock
def scan_report(): opt = option() job_dir = opt.job_dir design = opt.design syn = opt.synthesis family = opt.family report_name = opt.report_name #this is useless now for_pattern = '' pap = opt.pap scan_mrp = tool_scan_xilinx.ScanXilinxMrp() scan_twr = tool_scan_xilinx.ScanXilinxTwr() scan_time = tool_scan_xilinx.ScanXilinxTimeMem() if family: for_pattern = '_' + family if syn: for_pattern = for_pattern + '_' + syn dir_pattern = re.compile(for_pattern) print for_pattern all_designs = [] if os.path.isdir(job_dir): pass else: print 'The job_dir:%s is not a directory' % job_dir return for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir, dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass report_file_list = [] if design: print_always('scanning %s' % design) if design in all_designs: design_path = os.path.join(job_dir, design) for dir in os.listdir(design_path): if dir_pattern.search(dir) and os.path.isdir( os.path.join(design_path, dir)): ##################### scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(job_dir, dir + '.csv') report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name, subtex = os.path.splitext(report_file) report_file_mrp = report_name + '_mrp' + subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name + '_twr' + subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name + '_time_mem' + subtex if not_exists(report_file_time): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) ###################### dir_scan = os.path.join(design_path, dir) twr_file = time_file = "" if 'rev_1' in os.listdir(dir_scan): scan_time.set_srr_data() foo = os.path.join(dir_scan, 'rev_1', '*.srr') srr_file = get_unique_file(foo) if srr_file: scan_time.scan_srr(srr_file) for foo in os.listdir(dir_scan): foo = os.path.join(dir_scan, foo) if os.path.isdir(foo): if re.search("Target", foo): print foo recover = RecoverPath( os.path.join(design_path, dir, foo)) scan_mrp.reset() #scan_time.reset() scan_time.set_map_data() scan_time.set_par_data() scan_twr.reset() print_always(" Scanning %s" % foo) _project_name = "%s_%s" % (dir, design[:7]) project_name = _project_name.strip("_") mrp_file = os.path.join( dir_scan, foo, project_name + "_map.mrp") scan_mrp.scan_report(mrp_file) scan_time.scan_map(mrp_file) par_file = os.path.join( dir_scan, foo, project_name + ".par") scan_time.scan_par(par_file) twr_file = os.path.join( dir_scan, foo, project_name + ".twr") scan_twr.scan_report(twr_file) time_file = os.path.join( dir_scan, foo, "mpar_log.time") #scan_time.scan_report(time_file) data1 = scan_mrp.get_data() append_file(report_file_mrp, ",".join([design] + data1)) data2 = scan_twr.get_data() append_file(report_file_twr, ",".join([design] + data2)) data3 = scan_time.get_data() append_file(report_file_time, ",".join([design] + data3)) data = data1 + data2 + data3 append_file(report_file, ",".join([design] + data)) recover.run() else: print 'The design is not exists in the job_dir' return else: root_dir = os.getcwd() for design in all_designs: print_always('scanning %s' % design) design_path = os.path.join(job_dir, design) for dir in os.listdir(design_path): if dir_pattern.search(dir) and os.path.isdir( os.path.join(design_path, dir)): ##################### scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(job_dir, dir + '.csv') report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name, subtex = os.path.splitext(report_file) report_file_mrp = report_name + '_mrp' + subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name + '_twr' + subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name + '_time_mem' + subtex if not_exists(report_file_time): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) ###################### dir_scan = os.path.join(design_path, dir) twr_file = time_file = "" if 'rev_1' in os.listdir(dir_scan): scan_time.set_srr_data() foo = os.path.join(dir_scan, 'rev_1', '*.srr') srr_file = get_unique_file(foo) if srr_file: scan_time.scan_srr(srr_file) for foo in os.listdir(dir_scan): if os.path.isdir(os.path.join(design_path, dir, foo)): if re.search("Target", foo): recover = RecoverPath( os.path.join(design_path, dir, foo)) scan_mrp.reset() #scan_time.reset() scan_time.set_map_data() scan_time.set_par_data() scan_twr.reset() print_always(" Scanning %s" % foo) _project_name = "%s_%s" % (dir, design[:7]) project_name = _project_name.strip("_") mrp_file = os.path.join( dir_scan, foo, project_name + "_map.mrp") scan_mrp.scan_report(mrp_file) scan_time.scan_map(mrp_file) par_file = os.path.join( dir_scan, foo, project_name + ".par") scan_time.scan_par(par_file) twr_file = os.path.join( dir_scan, foo, project_name + ".twr") scan_twr.scan_report(twr_file) #time_file = os.path.join(dir_scan,foo,"mpar_log.time") #scan_time.scan_report(time_file) data1 = scan_mrp.get_data() append_file(report_file_mrp, ",".join([design] + data1)) data2 = scan_twr.get_data() append_file(report_file_twr, ",".join([design] + data2)) data3 = scan_time.get_data() append_file(report_file_time, ",".join([design] + data3)) data = data1 + data2 + data3 append_file(report_file, ",".join([design] + data)) recover.run() os.chdir(root_dir) for f in report_file_list: if os.path.isfile(f): file_sorted, note, design_fmax = report_sort.sort_csv(f) report_sort.write_note(file_sorted, note) #----------------update run_stand-------------------# pass_log = glob.glob('*' + syn + '_pass_case.log') if pass_log: pass_log = pass_log[0] else: pass_log = '__' if os.path.isfile(pass_log): file_hand = file(pass_log, 'r') lines = file_hand.readlines() file_hand.close() stand_name = glob.glob('*' + syn + '_run_standard.bat') if stand_name: stand_name = stand_name[0] #stand_name = os.path.join(top_base,top_base2+'_'+syn+'_run_standard.bat') else: stand_name = '' run_standard = file(stand_name, 'r') run_standard_lines = run_standard.readlines() run_standard.close() useful_lines = [] for case in lines: case = case.strip() if not case: continue else: pass case_tab = '--design=' + case case_tab_re = re.compile(case_tab + r'(\s+|$)') for line in run_standard_lines: line = line.strip() if not line: continue if case_tab_re.search(line): try: fmax = float(design_fmax[case.strip()]) fmax = str(int(fmax)) #line = case_tab_re.sub('',line) line = re.sub(r'--fmax-sweep=[\s\d]+\d', '', line) line2 = line + ' --fmax-sweep=' + fmax + ' ' + fmax + ' ' + '10 \n' except: line2 = line useful_lines.append(line2) run_standard = file(stand_name, 'w') run_standard.writelines(useful_lines) run_standard.close() '''
def scan_report(): opt = option() job_dir = opt.job_dir job_dir = os.path.abspath(job_dir) need_design = opt.design if not need_design: print 'In the bqs, you have to specify the design name' return pap = opt.pap report_dir = opt.report_dir print '--==--' * 20 print report_dir print os.path.isdir(report_dir) print '--==--' * 20 scan_mrp = tool_scan_lattice.ScanLatticeMrp() scan_twr = tool_scan_lattice.ScanLatticeTwr(pap) scan_time = tool_scan_lattice.ScanLatticeTime() target_fmax_re = re.compile(r"Target_Fmax_is_(.+)MHz") if os.path.isdir(job_dir): pass else: print 'The job_dir:%s is not a directory' % job_dir return design_path = os.path.join(job_dir, need_design) design_path = re.sub(r'\\', '/', design_path) report_file_list = [] if 1: root_dir = os.getcwd() dir = os.path.basename(opt.job_dir) design = os.path.basename(design_path) if 1: scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(opt.job_dir, dir + '_' + design + '.csv') if report_file in report_file_list: pass else: report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name, subtex = os.path.splitext(report_file) report_file_mrp = report_name + '_mrp' + subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name + '_twr' + subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name + '_time' + subtex if not_exists(report_file_time): title = ["Design"] + ['Target_fmax'] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) report_file_clock = report_name + '_clock' + subtex append_file(report_file_clock, ",".join(['Design', 'Colck', 'Loads'])) if 1: # begin scan case if need_design: if design == need_design: pass else: return print_always('scanning %s' % design) srr_file = '' for dir in os.listdir(design_path): # dir:Target_Fmax_is_060MHz dir_scan = os.path.join(design_path, dir) if (not re.search("Target", dir) ) and os.path.isdir(dir_scan) and not srr_file: srr_file = get_unique_file([dir_scan, ".srr"]) for dir in os.listdir(design_path): # dir:Target_Fmax_is_060MHz dir_scan = os.path.join(design_path, dir) #if (not re.search("Target", dir)) and os.path.isdir(dir_scan) and not srr_file: # srr_file = get_unique_file([dir_scan, ".srr"]) if re.search("Target", dir) and os.path.isdir(dir_scan): pass else: continue recover = RecoverPath(dir_scan) used_dir = '' for f_d in os.listdir(dir_scan): f_d_full = os.path.join(dir_scan, f_d) if os.path.isdir(f_d_full): if used_dir: print 'Warning: There are two implementation in the design' used_dir = f_d_full #srr_file = get_unique_file([used_dir, ".srr"]) if not srr_file: pass else: srr_file = os.path.join(design_path, dir, srr_file) scan_time.scan_srr(srr_file) mrp_file = get_unique_file([used_dir, ".mrp"]) if not_exists(mrp_file, "map report file"): continue scan_mrp.scan_report(mrp_file) scan_time.scan_mrp(mrp_file) twr_file = time_file = par_file = "" target_fmax_for_time = '_' #------------------------------------------------# if 1: useful_dir = used_dir base_name = os.path.basename(useful_dir) scan_time.reset_par_time_data() target_fmax_for_match = target_fmax_re.search(dir) if target_fmax_for_match: target_fmax_for_time = target_fmax_for_match.group(1) twr_p = os.path.join(useful_dir, '*' + base_name + ".twr") twr_file = get_unique_file(twr_p) if not twr_file: twr_file = get_unique_file([useful_dir, ".twr"]) time_file = os.path.join(useful_dir, time_file) par_file = get_unique_file([useful_dir, ".par"]) if twr_file: scan_twr.scan_report(twr_file) scan_time.scan_report(time_file) scan_time.scan_par(par_file) ######################### #time_title = ['design']+scan_time.get_title2() srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items() + mrp_data.items() + par_data.items() + real_cpu_total.items()) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key, 'NA') data_list.append(value) data_list2 = [design] + [target_fmax_for_time ] + data_list append_file(report_file_time, ",".join(data_list2)) ######################### #data = [design] + scan_mrp.get_data() + \ # scan_twr.get_data() + data_list if all_time_data['Complete'] == 'NA' or all_time_data[ 'Par_Done'] == 'NA': data_list = ['_']*len(scan_mrp.get_data()) +['_']*len(scan_twr.get_data()) +\ ['_']*len(data_list) else: data_list = scan_mrp.get_data() + \ scan_twr.get_data() + data_list data = [design] + data_list append_file(report_file, ",".join(data)) #------------------- #report_sort_bqs.sort_csv(report_file,job_dir) #------------------- data = [design] append_file(report_file_mrp, ",".join([design] + scan_mrp.get_data())) append_file(report_file_twr, ",".join([design] + scan_twr.get_data())) #append_file(report_file_time,",".join([design]+ scan_time.get_data())) scan_time.reset_par_time_data() if not twr_file: srr_data = scan_time.get_srr_time_data() mrp_data = scan_time.get_mrp_time_data() par_data = scan_time.get_par_time_data() real_cpu_total = scan_time.get_total_time() all_time_data = dict(srr_data.items()+mrp_data.items()+par_data.items() + \ real_cpu_total.items() ) data_list = [] for key in scan_time.get_title(): value = all_time_data.get(key, 'NA') data_list.append(value) data_list2 = [design] + [target_fmax_for_time ] + data_list append_file(report_file_time, ",".join(data_list2)) #data = [design] + scan_mrp.get_data()+ data_list if all_time_data['Complete'] == 'NA' or all_time_data[ 'Par_Done'] == 'NA': data = [design] + ['_'] * len( scan_mrp.get_data()) + ['_'] * len(data_list) else: data = [design] + scan_mrp.get_data() + data_list append_file(report_file, ",".join(data)) #------------------- #report_sort_bqs.sort_csv(report_file,job_dir) #------------------- append_file(report_file_mrp, ",".join([design] + scan_mrp.get_data())) scan_time.reset_par_time_data() scan_mrp.scan_clocks(mrp_file) clock_dict = scan_mrp.get_parse_line_clocks() for key in clock_dict.keys(): line = design + ',' + key + ',' + clock_dict[key] append_file(report_file_clock, line) recover.run() report_sort_bqs.sort_csv(report_file, report_dir) os.chdir(root_dir)
def scan_report(): opt = option() job_dir = opt.job_dir design = opt.design scan_mrp = tool_scan_xilinx.ScanXilinxMrp() scan_twr = tool_scan_xilinx.ScanXilinxTwr() scan_time = tool_scan_xilinx.ScanXilinxTimeMem() scan_timing_rpt = tool_scan_xilinx.ScanXilinxTimingRpt() scan_placed_rpt = tool_scan_xilinx.ScanXilinxPlacedRpt() tag = opt.special_structure report_name = opt.report_name #this is useless now if report_name == 'report.csv': report_file = os.path.basename(job_dir) + '.csv' report_path = opt.report_path if not report_path: report_path = job_dir report_file = os.path.join(report_path, report_file) all_designs = [] if os.path.isdir(job_dir): pass else: print 'The job_dir:%s is not a directory' % job_dir return if not design: for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir, dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass else: all_designs.append(design) report_file_list = [] if 1: root_dir = os.getcwd() for design in all_designs: print_always('scanning %s' % design) design_path = os.path.join(job_dir, design, tag) for dir in os.listdir(design_path): if dir.endswith('.runs') and os.path.isdir( os.path.join(design_path, dir)): ##################### scanner = (scan_mrp, scan_twr, scan_time) report_file_list.append(report_file) ###################### dir_scan = os.path.join(design_path, dir) twr_file = time_file = "" if 'rev_1' in os.listdir(dir_scan): scan_time.set_srr_data() foo = os.path.join(dir_scan, 'rev_1', '*.srr') srr_file = get_unique_file(foo) if srr_file: scan_time.scan_srr(srr_file) for foo in os.listdir(dir_scan): if os.path.isdir(os.path.join(design_path, dir, foo)): if re.search("Target", foo): # this is ISE recover = RecoverPath( os.path.join(design_path, dir, foo)) scan_mrp.reset() #scan_time.reset() scan_time.set_map_data() scan_time.set_par_data() scan_twr.reset() print_always(" Scanning %s" % foo) _project_name = "%s_%s" % (dir, design[:7]) project_name = _project_name.strip("_") mrp_file = os.path.join( dir_scan, foo, project_name + "_map.mrp") if os.path.isfile(mrp_file): if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name, subtex = os.path.splitext( report_file) report_file_mrp = report_name + '_mrp' + subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file( report_file_mrp, ",".join(title)) report_file_twr = report_name + '_twr' + subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file( report_file_twr, ",".join(title)) report_file_time = report_name + '_time_mem' + subtex if not_exists(report_file_time): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file( report_file_time, ",".join(title)) scan_mrp.scan_report(mrp_file) scan_time.scan_map(mrp_file) par_file = os.path.join( dir_scan, foo, project_name + ".par") scan_time.scan_par(par_file) twr_file = os.path.join( dir_scan, foo, project_name + ".twr") scan_twr.scan_report(twr_file) #time_file = os.path.join(dir_scan,foo,"mpar_log.time") #scan_time.scan_report(time_file) data1 = scan_mrp.get_data() append_file(report_file_mrp, ",".join([design] + data1)) data2 = scan_twr.get_data() append_file(report_file_twr, ",".join([design] + data2)) data3 = scan_time.get_data() append_file(report_file_time, ",".join([design] + data3)) data = data1 + data2 + data3 append_file(report_file, ",".join([design] + data)) #for vivado if not os.path.isfile(twr_file): if 1: title = ["Design"] for item in [ scan_timing_rpt, scan_placed_rpt ]: #mrp twr time title += item.get_title() if not_exists(report_file): append_file( report_file, ",".join(title)) if 0: report_name, subtex = os.path.splitext( report_file) report_file_mrp = report_name + '_mrp' + subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += ( scanner[0]).get_title() append_file( report_file_mrp, ",".join(title)) report_file_twr = report_name + '_twr' + subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += ( scanner[1]).get_title() append_file( report_file_twr, ",".join(title)) report_file_time = report_name + '_time_mem' + subtex if not_exists(report_file_time): title = ["Design"] #for item in scanner: #mrp twr time title += ( scanner[2]).get_title() append_file( report_file_time, ",".join(title)) timing_rpt = os.path.join( dir_scan, foo, '*timing_summary_routed.rpt') timing_rpt = get_unique_file(timing_rpt) total_data = [design] if timing_rpt: scan_timing_rpt.scan_report(timing_rpt) total_data += scan_timing_rpt.get_data( ) else: total_data += ['NA'] * len( scan_timing_rpt.get_title()) placed_rpt = os.path.join( dir_scan, foo, '*utilization_placed.rpt') placed_rpt = glob.glob(placed_rpt) used_rpt = '' for rpt in placed_rpt: if rpt.find('clock_utilization') != -1: continue else: used_rpt = rpt break if used_rpt: scan_placed_rpt.scan_report(used_rpt) total_data += scan_placed_rpt.get_data( ) else: total_data += ['NA'] * len( scan_placed_rpt.get_title()) append_file(report_file, ",".join(total_data)) for_bqs_data = '<scan_case>\n' for id1, t in enumerate(title): for_bqs_data = for_bqs_data + "\t<%s>" % t + total_data[ id1] + "</%s>\n" % t for_bqs_data = for_bqs_data + "</scan_case>" print '#BQS_RETRN_DATA_BEGIN#' print for_bqs_data print '#BQS_RETRN_DATA_END#' recover.run() os.chdir(root_dir) for f in report_file_list: if os.path.isfile(f): file_sorted, note, design_fmax = report_sort.sort_csv(f) report_sort.write_note(file_sorted, note) #----------------update run_stand-------------------# if 0: pass_log = glob.glob('*' + syn + '_pass_case.log') if pass_log: pass_log = pass_log[0] else: pass_log = '__' if os.path.isfile(pass_log): file_hand = file(pass_log, 'r') lines = file_hand.readlines() file_hand.close() stand_name = glob.glob('*' + syn + '_run_standard.bat') if stand_name: stand_name = stand_name[0] #stand_name = os.path.join(top_base,top_base2+'_'+syn+'_run_standard.bat') else: stand_name = '' run_standard = file(stand_name, 'r') run_standard_lines = run_standard.readlines() run_standard.close() useful_lines = [] for case in lines: case = case.strip() if not case: continue else: pass case_tab = '--design=' + case case_tab_re = re.compile(case_tab + r'(\s+|$)') for line in run_standard_lines: line = line.strip() if not line: continue if case_tab_re.search(line): try: fmax = float(design_fmax[case.strip()]) fmax = str(int(fmax)) #line = case_tab_re.sub('',line) line = re.sub(r'--fmax-sweep=[\s\d]+\d', '', line) line2 = line + ' --fmax-sweep=' + fmax + ' ' + fmax + ' ' + '10 \n' except: line2 = line useful_lines.append(line2) run_standard = file(stand_name, 'w') run_standard.writelines(useful_lines) run_standard.close()
def scan_report(): opt = option() job_dir = opt.job_dir job_dir = os.path.abspath(job_dir) need_design = opt.design syn = opt.synthesis #syn = '' family = opt.family report_name = opt.report_name #this is useless now for_pattern = '' pap = opt.pap scan_mrp = tool_scan_lattice.ScanLatticeMrp() scan_twr = tool_scan_lattice.ScanLatticeTwr(pap) scan_time = tool_scan_lattice.ScanLatticeTime() target_fmax_re = re.compile(r"Target_Fmax_is_(.+)MHz") if family: for_pattern = '_' + family if syn: for_pattern = for_pattern + '_' + syn dir_pattern = re.compile(for_pattern) all_designs = [] if os.path.isdir(job_dir): for d in os.listdir(job_dir): if d.endswith('.csv'): d = os.path.join(job_dir, d) os.remove(d) pass else: print 'The job_dir:%s is not a directory' % job_dir return for dir in os.listdir(job_dir): # get all the design path dir2 = os.path.join(job_dir, dir) if os.path.isdir(dir2): all_designs.append(dir) else: pass report_file_list = [] if 1: root_dir = os.getcwd() dir = os.path.basename(opt.job_dir) if 1: scanner = (scan_mrp, scan_twr, scan_time) report_file = os.path.join(opt.job_dir, dir + '.csv') if report_file in report_file_list: pass else: report_file_list.append(report_file) if not_exists(report_file): title = ["Design"] for item in scanner: #mrp twr time title += item.get_title() append_file(report_file, ",".join(title)) if 1: report_name, subtex = os.path.splitext(report_file) report_file_mrp = report_name + '_mrp' + subtex if not_exists(report_file_mrp): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[0]).get_title() append_file(report_file_mrp, ",".join(title)) report_file_twr = report_name + '_twr' + subtex if not_exists(report_file_twr): title = ["Design"] #for item in scanner: #mrp twr time title += (scanner[1]).get_title() append_file(report_file_twr, ",".join(title)) report_file_time = report_name + '_time' + subtex if not_exists(report_file_time): title = ["Design"] + ['Target_fmax'] #for item in scanner: #mrp twr time title += (scanner[2]).get_title() append_file(report_file_time, ",".join(title)) report_file_clock = report_name + '_clock' + subtex append_file(report_file_clock, ",".join(['Design', 'Colck', 'Loads'])) #for design in all_designs: for root1, dirs1, files1 in os.walk(job_dir): root1 = os.path.abspath(root1) find_mrp = 0 for f in files1: if f.endswith('.mrp'): find_mrp = 1 break if find_mrp == 1: pass else: continue design_path = os.path.join(root1) # e60_ecp3/g64 srr_file = '' for dir in os.listdir(design_path): # dir:Target_Fmax_is_060MHz dir_scan = os.path.join(design_path, dir) if (not re.search("Target", dir) ) and os.path.isdir(dir_scan) and not srr_file: srr_file = get_unique_file([dir_scan, ".srr"]) for dir in os.listdir(design_path): # dir:Target_Fmax_is_060MHz #if (not re.search("Target", dir)) and os.path.isdir(dir_scan) and not srr_file recover = RecoverPath(design_path) mrp_file = get_unique_file([design_path, ".mrp"]) if not_exists(mrp_file, "map report file"): continue scan_mrp.scan_report(mrp_file) scan_time.scan_mrp(mrp_file) append_file(report_file_mrp, ",".join([design_path] + scan_mrp.get_data())) recover.run() os.chdir(root_dir) for f in report_file_list: if os.path.isfile(f): file_sorted, note, design_fmax = report_sort.sort_csv(f) report_sort.write_note(file_sorted, note) file_hand_pass = file('pass_case.log', 'w') #----------read fail case first: try: file_hand_fail = file('fail_case.log', 'r') fail_case_lines = file_hand_fail.readlines() file_hand_fail.close() fail_case_lines2 = [ f_c.split(':')[0].strip() for f_c in fail_case_lines ] except: fail_case_lines2 = [] file_hand_fail = file('fail_case.log', 'w') for key in design_fmax.keys(): v = design_fmax[key] if re.search(r'\d', v): file_hand_pass.write(key + '\n') else: if key in fail_case_lines2: fail_k = fail_case_lines[fail_case_lines2.index(key)] file_hand_fail.write(fail_k.strip() + '\n') else: file_hand_fail.write(key + '\n') file_hand_pass.close() file_hand_fail.close()