示例#1
0
 def check_release(self):
     """to check all released information"""
     env_boot.EnvBoot.__init__(self)
     self.boot_env()
     LOG.info(":: release info checking ...")
     relz_lst = []
     for relz_file in pcom.find_iter(
             f"{self.ced['PROJ_RELEASE_TO_TOP']}{os.sep}.json", "*.json"):
         with open(relz_file) as r_f:
             relz_dic = json.load(r_f)
         relz_lst.append(relz_dic)
     LOG.info("all released block info")
     pcom.pp_list(relz_lst)
     relz_blk_lst = [c_c.get("block", "") for c_c in relz_lst]
     table_rows = [["Block", "Release Status", "Owner", "Time"]]
     for blk_dir in pcom.find_iter(self.ced["PROJ_ROOT"], "*", True, True,
                                   [".git", "share"]):
         blk_name = os.path.basename(blk_dir)
         owner_set = set()
         time_set = set()
         for relz_dic in relz_lst:
             if relz_dic.get("block", "") == blk_name:
                 owner_set.add(relz_dic.get("user", ""))
                 time_set.add(relz_dic.get("time", ""))
         table_rows.append(
             [blk_name, "Ready", owner_set, time_set] if blk_name in
             relz_blk_lst else [blk_name, "N/A", "N/A", "N/A"])
     table = texttable.Texttable()
     table.set_cols_width([30, 15, 15, 30])
     table.add_rows(table_rows)
     relz_table = table.draw()
     LOG.info("release status table:%s%s", os.linesep, relz_table)
示例#2
0
 def update_blocks(self, blk_lst):
     """to obtain blocks input data from release directory"""
     env_boot.EnvBoot.__init__(self)
     self.boot_env()
     LOG.info(":: updating blocks ...")
     for data_src in list(
             pcom.find_iter(
                 self.ced["PROJ_RELEASE_TO_BLK"], "*", True)) + list(
                     pcom.find_iter(self.ced["PROJ_RELEASE_TO_BLK"], "*")):
         blk_name = os.path.basename(data_src).split(os.extsep)[0]
         if not blk_name:
             continue
         if blk_lst:
             if blk_name not in blk_lst:
                 continue
         blk_tar = data_src.replace(
             self.ced["PROJ_RELEASE_TO_BLK"],
             f"{self.ced['PROJ_ROOT']}{os.sep}{blk_name}")
         LOG.info("linking block files %s from %s", blk_tar, data_src)
         pcom.mkdir(LOG, os.path.dirname(blk_tar))
         if not os.path.exists(blk_tar):
             os.symlink(data_src, blk_tar)
         elif os.path.islink(blk_tar):
             os.remove(blk_tar)
             os.symlink(data_src, blk_tar)
示例#3
0
 def fill_blocks(self, blk_lst):
     """to fill blocks config dir after initialization"""
     env_boot.EnvBoot.__init__(self)
     self.boot_env()
     for blk_name in blk_lst:
         LOG.info(":: filling block %s ...", blk_name)
         os.environ["BLK_NAME"] = blk_name
         os.environ[
             "BLK_ROOT"] = blk_root_dir = f"{self.ced['PROJ_ROOT']}{os.sep}{blk_name}"
         pcom.mkdir(LOG, blk_root_dir)
         proj_cfg_dir = os.path.expandvars(settings.PROJ_CFG_DIR).rstrip(
             os.sep)
         blk_cfg_dir = os.path.expandvars(settings.BLK_CFG_DIR).rstrip(
             os.sep)
         for cfg_kw in self.cfg_dic:
             if cfg_kw in settings.BLK_CFG_UNFILL_LST:
                 continue
             proj_cfg = f"{proj_cfg_dir}{os.sep}{cfg_kw}.cfg"
             blk_cfg = f"{blk_cfg_dir}{os.sep}{cfg_kw}.cfg"
             LOG.info("generating block config %s", blk_cfg)
             pcom.mkdir(LOG, os.path.dirname(blk_cfg))
             with open(proj_cfg) as pcf, open(blk_cfg, "w") as bcf:
                 for line in pcom.gen_pcf_lst(pcf):
                     bcf.write(line)
         for dir_cfg_kw in self.dir_cfg_dic:
             if dir_cfg_kw == "lib":
                 continue
             proj_dir_cfg = f"{proj_cfg_dir}{os.sep}{dir_cfg_kw}"
             blk_dir_cfg = f"{blk_cfg_dir}{os.sep}{dir_cfg_kw}{os.sep}DEFAULT"
             LOG.info("generating block config directory %s", blk_dir_cfg)
             if os.path.isdir(blk_dir_cfg):
                 LOG.info(
                     "block level config directory %s already exists, "
                     "please confirm to overwrite it", blk_dir_cfg)
                 pcom.cfm()
             shutil.rmtree(blk_dir_cfg, True)
             shutil.copytree(proj_dir_cfg, blk_dir_cfg)
             for blk_cfg in pcom.find_iter(blk_dir_cfg,
                                           "*.cfg",
                                           cur_flg=True):
                 with open(blk_cfg) as ocf:
                     blk_lines = pcom.gen_pcf_lst(ocf)
                 with open(blk_cfg, "w") as ncf:
                     for line in blk_lines:
                         ncf.write(line)
         proj_share_dir = os.path.expandvars(settings.PROJ_SHARE).rstrip(
             os.sep)
         proj_blk_cmn_dir = f"{proj_share_dir}{os.sep}block_common"
         blk_cmn_dir = f"{blk_root_dir}{os.sep}block_common"
         if not os.path.isdir(proj_blk_cmn_dir):
             continue
         if os.path.isdir(blk_cmn_dir):
             LOG.info(
                 "block level common directory %s already exists, "
                 "please confirm to overwrite it", blk_cmn_dir)
             pcom.cfm()
         shutil.rmtree(blk_cmn_dir, True)
         shutil.copytree(proj_blk_cmn_dir, blk_cmn_dir)
示例#4
0
 def release(self):
     """to generate release directory"""
     env_boot.EnvBoot.__init__(self)
     self.boot_env()
     LOG.info(":: release content generating ...")
     for relz_json_file in pcom.find_iter(
             f"{self.ced['PROJ_RELEASE_TO_TOP']}{os.sep}.json", "*.json"):
         LOG.info("generating release of %s", relz_json_file)
         with open(relz_json_file) as rjf:
             relz_dic = json.load(rjf)
         relz_path = os.sep.join([
             relz_dic["proj_root"], relz_dic["block"], "run",
             relz_dic["rtl_netlist"], relz_dic["flow"]
         ])
         relz_file_lst = list(pcom.find_iter(relz_path, "*"))
         for relz_k in self.cfg_dic["proj"]["release"]:
             for relz_v in pcom.rd_cfg(self.cfg_dic["proj"], "release",
                                       relz_k):
                 relz_pattern = (
                     f"{relz_path}{os.sep}*{relz_dic['stage']}*"
                     f"{os.path.splitext(relz_dic['sub_stage'])[0]}*{relz_v}"
                 )
                 match_relz_lst = fnmatch.filter(relz_file_lst,
                                                 relz_pattern)
                 if not match_relz_lst:
                     LOG.warning("no %s %s files found", relz_k, relz_v)
                 else:
                     LOG.info("copying %s %s files", relz_k, relz_v)
                 for relz_file in match_relz_lst:
                     dst_dir = os.sep.join([
                         self.ced["PROJ_RELEASE_TO_TOP"], relz_dic["block"],
                         self.ced["DATE"], relz_k
                     ])
                     pcom.mkdir(LOG, dst_dir)
                     if relz_v.endswith("/*"):
                         dst_file = re.sub(
                             rf".*?(?={relz_v[:-2]})",
                             f"{dst_dir}{os.sep}{relz_dic['block']}",
                             relz_file)
                     else:
                         dst_file = f"{dst_dir}{os.sep}{relz_dic['block']}{relz_v}"
                     pcom.mkdir(LOG, os.path.dirname(dst_file))
                     shutil.copyfile(relz_file, dst_file)
         os.remove(relz_json_file)
示例#5
0
 def backup_date(self):
     """to backup date auto generation part of project"""
     LOG.info(":: backup date part")
     date_root = os.path.join(self.dst_root, dt.datetime.now().strftime("%Y_%m_%d"))
     pcom.mkdir(LOG, date_root)
     src_work_dir = f"{self.src_root}{os.sep}WORK"
     for user_path in pcom.find_iter(src_work_dir, "*", True, True):
         for block_path in pcom.find_iter(user_path, "*", True, True, ["share"]):
             can_lst = list(pcom.find_iter(block_path, "*"))
             for date_pat in backup_cfg.BACKUP_CFG_DATE_LST:
                 glob_lst = fnmatch.filter(can_lst, os.path.join(block_path, date_pat))
                 glob_tup_lst = [(os.path.getmtime(c_c), c_c) for c_c in glob_lst]
                 if glob_tup_lst:
                     glob_max_tup = max(glob_tup_lst)
                     src_file = glob_max_tup[-1]
                     dst_file = src_file.replace(self.src_root, date_root)
                     LOG.info("backup from %s to %s", src_file, dst_file)
                     pcom.mkdir(LOG, os.path.dirname(dst_file))
                     shutil.copyfile(src_file, dst_file)
示例#6
0
 def boot_cfg(self):
     """to process project and block global cfg dic used only by op"""
     base_proj_cfg_dir = os.path.expandvars(settings.PROJ_CFG_DIR).rstrip(
         os.sep)
     base_blk_cfg_dir = os.path.expandvars(settings.BLK_CFG_DIR).rstrip(
         os.sep)
     for proj_cfg in pcom.find_iter(base_proj_cfg_dir,
                                    "*.cfg",
                                    cur_flg=True):
         cfg_kw = os.path.splitext(os.path.basename(proj_cfg))[0]
         if cfg_kw == "proj":
             continue
         if self.blk_flg and cfg_kw not in settings.BLK_CFG_UNFILL_LST:
             blk_cfg = proj_cfg.replace(base_proj_cfg_dir, base_blk_cfg_dir)
             if not os.path.isfile(blk_cfg):
                 LOG.warning("block config file %s is NA", blk_cfg)
             self.cfg_dic[cfg_kw] = pcom.gen_cfg([proj_cfg, blk_cfg])
         else:
             self.cfg_dic[cfg_kw] = pcom.gen_cfg([proj_cfg])
     for proj_cfg_dir in pcom.find_iter(base_proj_cfg_dir, "*", True, True):
         cfg_dir_kw = os.path.basename(proj_cfg_dir)
         self.dir_cfg_dic[cfg_dir_kw] = {"DEFAULT": {}}
         for proj_cfg in pcom.find_iter(proj_cfg_dir, "*.cfg",
                                        cur_flg=True):
             cfg_kw = os.path.splitext(os.path.basename(proj_cfg))[0]
             self.dir_cfg_dic[cfg_dir_kw]["DEFAULT"][cfg_kw] = pcom.gen_cfg(
                 [proj_cfg])
         if not self.blk_flg:
             continue
         blk_cfg_dir = proj_cfg_dir.replace(base_proj_cfg_dir,
                                            base_blk_cfg_dir)
         if not os.path.isdir(blk_cfg_dir):
             continue
         for blk_ins in pcom.find_iter(blk_cfg_dir, "*", True, True):
             blk_ins_kw = os.path.basename(blk_ins)
             self.dir_cfg_dic[cfg_dir_kw][blk_ins_kw] = {}
             for blk_cfg in pcom.find_iter(blk_ins, "*.cfg", cur_flg=True):
                 cfg_kw = os.path.splitext(os.path.basename(blk_cfg))[0]
                 proj_cfg = f"{proj_cfg_dir}{os.sep}{os.path.basename(blk_cfg)}"
                 self.dir_cfg_dic[cfg_dir_kw][blk_ins_kw][
                     cfg_kw] = pcom.gen_cfg([proj_cfg, blk_cfg])
示例#7
0
 def test_find_iter(self):
     """test case"""
     test_dir_tup = (f"{self.base_dir}{os.sep}test1",
                     f"{self.base_dir}{os.sep}test2",
                     f"{self.base_dir}{os.sep}test3")
     test_tup = (f"{self.base_dir}{os.sep}test.log",
                 f"{self.base_dir}{os.sep}test.txt",
                 f"{self.base_dir}{os.sep}test.cfg")
     test1_tup = (f"{test_dir_tup[0]}{os.sep}test1.log",
                  f"{test_dir_tup[0]}{os.sep}test1.txt",
                  f"{test_dir_tup[0]}{os.sep}test1.cfg")
     test2_tup = (f"{test_dir_tup[1]}{os.sep}test2.log",
                  f"{test_dir_tup[1]}{os.sep}test2.txt",
                  f"{test_dir_tup[1]}{os.sep}test2.cfg")
     test3_tup = (f"{test_dir_tup[2]}{os.sep}test3.log",
                  f"{test_dir_tup[2]}{os.sep}test3.txt",
                  f"{test_dir_tup[2]}{os.sep}test3.cfg")
     test_test_dir_tup = tuple(f"{cc}{os.sep}test" for cc in test_dir_tup)
     for test_dir in test_dir_tup + test_test_dir_tup:
         os.makedirs(test_dir)
     for test_file in test_tup + test1_tup + test2_tup + test3_tup:
         open(test_file, "w").close()
     self.assertEqual(
         set(pcom.find_iter(self.base_dir, "*.log")),
         {test_tup[0], test1_tup[0], test2_tup[0], test3_tup[0]})
     self.assertEqual(set(pcom.find_iter(self.base_dir, "*test1*")),
                      set(test1_tup))
     self.assertEqual(set(pcom.find_iter(self.base_dir, "*.log", True)),
                      set())
     self.assertEqual(set(pcom.find_iter(self.base_dir, "*test*", True)),
                      set(test_dir_tup + test_test_dir_tup))
     self.assertEqual(
         set(pcom.find_iter(self.base_dir, "*.log", cur_flg=True)),
         {test_tup[0]})
     self.assertEqual(
         set(pcom.find_iter(test_dir_tup[0], "*.log", cur_flg=True)),
         {test1_tup[0]})
     self.assertEqual(
         set(pcom.find_iter(self.base_dir, "*test*", True, True)),
         set(test_dir_tup))
示例#8
0
 def init(self, init_lst):
     """to perform flow initialization"""
     if not self.blk_flg:
         LOG.error("it's not in a block directory, please cd into one")
         raise SystemExit()
     for init_name in init_lst:
         LOG.info(":: initializing flow %s directories ...", init_name)
         parent_flow = pcom.rd_cfg(self.cfg_dic.get("flow", {}), init_name, "pre_flow", True)
         if parent_flow:
             src_dir = f"{self.ced['BLK_CFG_FLOW']}{os.sep}{parent_flow}"
             LOG.info("inheriting from %s", parent_flow)
         else:
             src_dir = f"{self.ced['PROJ_SHARE_CFG']}{os.sep}flow"
             LOG.info("inheriting from project share")
         dst_dir = f"{self.ced['BLK_CFG_FLOW']}{os.sep}{init_name}"
         if not os.path.isdir(src_dir):
             LOG.error("parent flow directory %s is NA", src_dir)
             raise SystemExit()
         if os.path.isdir(dst_dir):
             if self.cfm_yes:
                 LOG.warning(
                     "initializing flow directory %s already exists, "
                     "confirmed to overwrite the previous flow config and plugins", dst_dir)
             else:
                 LOG.info(
                     "initializing flow directory %s already exists, "
                     "please confirm to overwrite the previous flow config and plugins", dst_dir)
                 pcom.cfm()
             shutil.rmtree(dst_dir, True)
         shutil.copytree(src_dir, dst_dir)
         if not parent_flow:
             for blk_cfg in pcom.find_iter(dst_dir, "*.cfg", cur_flg=True):
                 with open(blk_cfg) as ocf:
                     blk_lines = pcom.gen_pcf_lst(ocf)
                 with open(blk_cfg, "w") as ncf:
                     for line in blk_lines:
                         ncf.write(line)
示例#9
0
 def gen_link_liblist(self, dst_root, lib_dir_cfg_dic):
     """to process project or block lib mapping links"""
     LOG.info(":: library mapping of files linking ...")
     liblist_var_dic = collections.defaultdict(dict)
     for process, process_sec in lib_dir_cfg_dic.get("process", {}).items():
         if process == "DEFAULT":
             continue
         for lib_type, lib_type_sec in lib_dir_cfg_dic.get("search",
                                                           {}).items():
             if lib_type == "DEFAULT":
                 continue
             can_root = pcom.ren_tempstr(
                 LOG, pcom.rd_sec(lib_type_sec, "src", True), process_sec)
             if not can_root:
                 LOG.warning(
                     "library mapping search root path option 'src' of %s "
                     "is not defined in lib/search.cfg, skipped", lib_type)
                 continue
             elif not os.path.isdir(can_root):
                 LOG.warning("library mapping search root path %s is NA",
                             can_root)
                 continue
             can_tar = pcom.rd_sec(lib_type_sec, "dst", True)
             if not can_tar:
                 LOG.warning(
                     "library mapping destination directory option 'dst' of %s "
                     "is not defined in lib/search.cfg, skipped", lib_type)
                 continue
             LOG.info("library mapping for part %s", lib_type)
             pcom.mkdir(LOG, can_tar)
             can_ignore_lst = pcom.rd_sec(lib_type_sec, "ignore")
             can_lst = list(
                 pcom.find_iter(can_root, "*", i_lst=can_ignore_lst))
             for lib_type_k in lib_type_sec:
                 if not lib_type_k.startswith("pattern"):
                     continue
                 pattern_search = pcom.rd_sec(lib_type_sec, lib_type_k,
                                              True)
                 var_set = set(re.findall(r"{{(.*?)}}", pattern_search))
                 for var_dic in pcom.prod_vs_iter(var_set, process_sec):
                     pattern_search_str = pcom.ren_tempstr(
                         LOG, pattern_search, var_dic)
                     for match_file in fnmatch.filter(
                             can_lst,
                             f"{can_root}{os.sep}{pattern_search_str}"):
                         self.link_src_dst(match_file, can_tar, can_root)
         match_lst_file = f"{dst_root}{os.sep}{process}.match_lst"
         LOG.info("generating library map list file %s", match_lst_file)
         with open(match_lst_file, "w") as mlf:
             json.dump(self.match_lst, mlf, indent=4)
         liblist_dir = f"{dst_root}{os.sep}liblist"
         pcom.mkdir(LOG, liblist_dir)
         var_name_line_dic = {}
         liblist_cfg = lib_dir_cfg_dic.get("liblist", {})
         try:
             custom_dic = {
                 c_k: pcom.rd_cfg(liblist_cfg, f"custom:{process}", c_k)
                 for c_k in liblist_cfg[f"custom:{process}"]
             }
         except KeyError:
             custom_dic = {}
         if "var" not in liblist_cfg:
             LOG.error("var section is NA in lib/liblist.cfg")
             raise SystemExit()
         for var_name in liblist_cfg["var"]:
             match_file_lst = []
             match_pattern_lst = (custom_dic[var_name] if var_name
                                  in custom_dic else pcom.rd_cfg(
                                      liblist_cfg, "var", var_name))
             for match_pattern in match_pattern_lst:
                 var_set = set(re.findall(r"{{(.*?)}}", match_pattern))
                 for var_dic in pcom.prod_vs_iter(var_set, process_sec):
                     fnmatch_lst = ([
                         pcom.ren_tempstr(LOG, match_pattern, var_dic)
                     ] if var_name in custom_dic else fnmatch.filter(
                         self.match_lst,
                         pcom.ren_tempstr(LOG, match_pattern, var_dic)))
                     match_file_lst.extend(fnmatch_lst)
             var_name_line_dic[var_name] = match_file_lst
         LOG.info("generating library liblist files in %s", liblist_dir)
         #file generation and liblist dic generation for templates
         tcl_line_lst = []
         for var_name, match_file_lst in var_name_line_dic.items():
             liblist_var_dic[process][var_name] = f" \\{os.linesep} ".join(
                 match_file_lst)
             tcl_value_str = f" \\{os.linesep}{' '*(6+len(var_name))}".join(
                 match_file_lst)
             tcl_line_lst.append(f'set {var_name} "{tcl_value_str}"')
         with open(f"{liblist_dir}{os.sep}{process}.tcl", "w") as lltf:
             lltf.write(os.linesep.join(tcl_line_lst))
         self.match_lst = []
     with open(f"{liblist_dir}{os.sep}liblist.json", "w") as lljf:
         json.dump(liblist_var_dic, lljf, indent=4)