def proc_inst(self, multi_inst, inst_dic): """to process particular inst""" flow_root_dir = inst_dic["stage_dic"]["flow_root_dir"] stage_name = inst_dic["stage_dic"]["stage"] sub_stage_name = inst_dic["stage_dic"]["sub_stage"] if self.restore_dic: sub_stage_name = f".restore_{sub_stage_name}" dst_file = os.path.join( flow_root_dir, "scripts", stage_name, multi_inst, sub_stage_name) if multi_inst else os.path.join( flow_root_dir, "scripts", stage_name, sub_stage_name) inst_dic["tmp_dic"]["local"]["_multi_inst"] = multi_inst pcom.mkdir(LOG, os.path.dirname(dst_file)) LOG.info(":: generating file %s ...", dst_file) if os.path.isfile(dst_file): shutil.copyfile(dst_file, f"{dst_file}.pre") pcom.ren_tempfile(LOG, inst_dic["tmp_file"], dst_file, inst_dic["tmp_dic"]) if os.path.isfile(f"{dst_file}.pre"): pre_cur_same_flg = cmp(f'{dst_file}.pre', dst_file) if not pre_cur_same_flg: if self.cfm_yes or self.cfm_flg: LOG.warning( "%s is modified, " "confirmed to re-run the stage", os.path.join(multi_inst, sub_stage_name)) else: LOG.info( "%s is modified, " "pleasse confirm to re-run the stage and the following stages", os.path.join(multi_inst, sub_stage_name)) try: pcom.cfm() self.cfm_flg = True except: shutil.copyfile(f"{dst_file}.pre", dst_file) raise
def fill_blocks(self, blk_lst): """to fill blocks config dir after initialization""" env_boot.EnvBoot.__init__(self) self.boot_env() for blk_name in blk_lst: LOG.info(":: filling block %s ...", blk_name) os.environ["BLK_NAME"] = blk_name os.environ[ "BLK_ROOT"] = blk_root_dir = f"{self.ced['PROJ_ROOT']}{os.sep}{blk_name}" pcom.mkdir(LOG, blk_root_dir) proj_cfg_dir = os.path.expandvars(settings.PROJ_CFG_DIR).rstrip( os.sep) blk_cfg_dir = os.path.expandvars(settings.BLK_CFG_DIR).rstrip( os.sep) for cfg_kw in self.cfg_dic: if cfg_kw in settings.BLK_CFG_UNFILL_LST: continue proj_cfg = f"{proj_cfg_dir}{os.sep}{cfg_kw}.cfg" blk_cfg = f"{blk_cfg_dir}{os.sep}{cfg_kw}.cfg" LOG.info("generating block config %s", blk_cfg) pcom.mkdir(LOG, os.path.dirname(blk_cfg)) with open(proj_cfg) as pcf, open(blk_cfg, "w") as bcf: for line in pcom.gen_pcf_lst(pcf): bcf.write(line) for dir_cfg_kw in self.dir_cfg_dic: if dir_cfg_kw == "lib": continue proj_dir_cfg = f"{proj_cfg_dir}{os.sep}{dir_cfg_kw}" blk_dir_cfg = f"{blk_cfg_dir}{os.sep}{dir_cfg_kw}{os.sep}DEFAULT" LOG.info("generating block config directory %s", blk_dir_cfg) if os.path.isdir(blk_dir_cfg): LOG.info( "block level config directory %s already exists, " "please confirm to overwrite it", blk_dir_cfg) pcom.cfm() shutil.rmtree(blk_dir_cfg, True) shutil.copytree(proj_dir_cfg, blk_dir_cfg) for blk_cfg in pcom.find_iter(blk_dir_cfg, "*.cfg", cur_flg=True): with open(blk_cfg) as ocf: blk_lines = pcom.gen_pcf_lst(ocf) with open(blk_cfg, "w") as ncf: for line in blk_lines: ncf.write(line) proj_share_dir = os.path.expandvars(settings.PROJ_SHARE).rstrip( os.sep) proj_blk_cmn_dir = f"{proj_share_dir}{os.sep}block_common" blk_cmn_dir = f"{blk_root_dir}{os.sep}block_common" if not os.path.isdir(proj_blk_cmn_dir): continue if os.path.isdir(blk_cmn_dir): LOG.info( "block level common directory %s already exists, " "please confirm to overwrite it", blk_cmn_dir) pcom.cfm() shutil.rmtree(blk_cmn_dir, True) shutil.copytree(proj_blk_cmn_dir, blk_cmn_dir)
def fill_proj(self): """to fill project config and template dir after initialization""" LOG.info(":: filling project %s repo ...", self.repo_dic["init_proj_name"]) proj_gi_file = f"{self.repo_dic['repo_dir']}{os.sep}.gitignore" with open(proj_gi_file, "w") as g_f: g_f.write(settings.GITIGNORE) proj_flg_file = f"{self.repo_dic['repo_dir']}{os.sep}{settings.FLG_FILE}" LOG.info("generating op project flag file %s", settings.FLG_FILE) with open(proj_flg_file, "w") as f_f: f_f.write(self.repo_dic["init_proj_name"]) LOG.info("generating op project level configs and templates") env_boot.EnvBoot.__init__(self) suite_dict = dict( enumerate(os.listdir(os.path.expandvars(settings.OP_PROJ)))) pcom.pp_list(suite_dict) index_str = f"please input from {list(suite_dict)} to specify the suite for project" while True: index_rsp = input(f"{index_str}{os.linesep}--> ") if index_rsp.isdigit() and int(index_rsp) in suite_dict: break else: LOG.warning("please input a correct index") suite_name = suite_dict[int(index_rsp)] suite_dst_dir = os.path.expandvars(settings.PROJ_SHARE) if os.path.isdir(suite_dst_dir): LOG.info( "project share dir %s already exists, continue to initialize the project will " "overwrite the current project configs, templates and plugins", suite_dst_dir) pcom.cfm() shutil.rmtree(suite_dst_dir, True) shutil.copytree(f"{settings.OP_PROJ}{os.sep}{suite_name}", suite_dst_dir) self.boot_env() utils_dst_dir = self.ced.get("PROJ_UTILS", "") if not utils_dst_dir: LOG.error("project level proj.cfg env PROJ_UTILS is not defined") raise SystemExit() if os.path.isdir(utils_dst_dir): LOG.info( "project utils dir %s already exists, continue to initialize the project will " "overwrite the current project utils", utils_dst_dir) pcom.cfm() shutil.rmtree(utils_dst_dir, True) shutil.copytree(settings.OP_UTILS, utils_dst_dir) for prex_dir_k in (self.cfg_dic["proj"]["prex_admin_dir"] if "prex_admin_dir" in self.cfg_dic["proj"] else {}): prex_dir = pcom.rd_cfg(self.cfg_dic["proj"], "prex_admin_dir", prex_dir_k, True) LOG.info("generating pre-set admin directory %s", prex_dir) pcom.mkdir(LOG, prex_dir) LOG.info("please perform the git commit and git push actions " "after project and block items are settled down")
def proc_release(self, flow_release_lst): """to process release info""" relz_blk_json_dir = f"{self.ced['PROJ_RELEASE_TO_TOP']}{os.sep}.json" pcom.mkdir(LOG, relz_blk_json_dir) for flow_release in flow_release_lst: flow_release = flow_release.strip("""'":""") err_log_str = ( "begin sub-stage format is incorrect, " "it should be <flow>::<stage>:<sub_stage>") flow_relz_dic = {} try: flow_relz_dic["flow"], stage_str = flow_release.split("::") flow_relz_dic["stage"], flow_relz_dic["sub_stage"] = stage_str.split(":") except ValueError: LOG.error(err_log_str) raise SystemExit() if not all([bool(c_c) for c_c in flow_relz_dic.values()]): LOG.error(err_log_str) raise SystemExit() _, ver_dic, err_lst = self.exp_stages_misc( [], {}, [], self.cfg_dic.get("flow", {}), flow_relz_dic["flow"]) if err_lst: LOG.error("flow %s has the following errors in flow.cfg", flow_relz_dic["flow"]) pcom.pp_list(err_lst) raise SystemExit() flow_relz_dic["user"] = self.ced["USER"] flow_relz_dic["block"] = self.ced["BLK_NAME"] flow_relz_dic["time"] = self.ced["DATETIME"].isoformat() flow_relz_dic["rtl_netlist"] = ver_dic.get("rtl_netlist", "") flow_relz_dic["proj_root"] = self.ced["PROJ_ROOT"] file_name = pcom.re_str("_".join( [flow_relz_dic["user"], flow_relz_dic["block"], flow_release])) relz_json_file = f"{relz_blk_json_dir}{os.sep}{file_name}.json" if os.path.isfile(relz_json_file): if self.cfm_yes: LOG.info( "flow %s already released, confimed to overwrite", flow_release) else: LOG.info( "flow %s already released, please confirm to overwrite", flow_release) pcom.cfm() with open(relz_json_file, "w") as rjf: json.dump(flow_relz_dic, rjf) LOG.info("flow %s released", flow_release)
def git_proj(self, block_lst): """to check out project by using git""" try: self.repo_dic["repo"] = repo = git.Repo.init( self.repo_dic["repo_dir"]) except PermissionError as err: LOG.error(err) raise SystemExit() repo.git.execute(["git", "config", "core.sparseCheckout", "true"]) rmt = repo.remote() if repo.remotes else repo.create_remote( "origin", os.path.expandvars(self.repo_dic["repo_url"])) LOG.info("git pulling project %s from repository to %s", self.repo_dic["init_proj_name"], self.repo_dic["repo_dir"]) pcom.cfm() rmt.fetch() sc_file = os.sep.join( [self.repo_dic["repo_dir"], ".git", "info", "sparse-checkout"]) with open(sc_file, "w") as scf: if block_lst: scf.write( f"{os.sep}share{os.sep}{os.linesep}{os.sep}" f"{f'{os.sep}{os.linesep}{os.sep}'.join(block_lst)}{os.sep}" ) else: scf.write("*") try: # rmt.pull("master") repo.active_branch.checkout() except git.GitCommandError as err: if any([c_c in str(err) for c_c in settings.REPO_AUTH_ERR_STR_LST]): LOG.error("password (AD pwd) incorrect") raise SystemExit() elif any( [c_c in str(err) for c_c in settings.REPO_BRANCH_ERR_STR_LST]): pass else: LOG.error(err) raise SystemExit() LOG.info("please run op cmds under the project dir %s", self.repo_dic["repo_dir"])
def init(self, init_lst): """to perform flow initialization""" if not self.blk_flg: LOG.error("it's not in a block directory, please cd into one") raise SystemExit() for init_name in init_lst: LOG.info(":: initializing flow %s directories ...", init_name) parent_flow = pcom.rd_cfg(self.cfg_dic.get("flow", {}), init_name, "pre_flow", True) if parent_flow: src_dir = f"{self.ced['BLK_CFG_FLOW']}{os.sep}{parent_flow}" LOG.info("inheriting from %s", parent_flow) else: src_dir = f"{self.ced['PROJ_SHARE_CFG']}{os.sep}flow" LOG.info("inheriting from project share") dst_dir = f"{self.ced['BLK_CFG_FLOW']}{os.sep}{init_name}" if not os.path.isdir(src_dir): LOG.error("parent flow directory %s is NA", src_dir) raise SystemExit() if os.path.isdir(dst_dir): if self.cfm_yes: LOG.warning( "initializing flow directory %s already exists, " "confirmed to overwrite the previous flow config and plugins", dst_dir) else: LOG.info( "initializing flow directory %s already exists, " "please confirm to overwrite the previous flow config and plugins", dst_dir) pcom.cfm() shutil.rmtree(dst_dir, True) shutil.copytree(src_dir, dst_dir) if not parent_flow: for blk_cfg in pcom.find_iter(dst_dir, "*.cfg", cur_flg=True): with open(blk_cfg) as ocf: blk_lines = pcom.gen_pcf_lst(ocf) with open(blk_cfg, "w") as ncf: for line in blk_lines: ncf.write(line)
def proc_merge(self): """ proc to merge with reference directory """ # setting default src/dst directories. if not self.src_dir: self.src_dir = os.path.expandvars( f"{settings.PROJ_CFG_DIR}{os.sep}flow") if not self.dst_dir: self.dst_dir = os.getcwd() # check src/dst directories. if not os.path.isdir(self.src_dir) and not os.path.isfile( self.src_dir): LOG.error("source %s is not found", self.src_dir) raise SystemExit() if not os.path.isdir(self.dst_dir) and not os.path.isfile( self.dst_dir): LOG.error("destination %s is not found", self.dst_dir) raise SystemExit() if (os.path.isdir(self.src_dir) != os.path.isdir(self.dst_dir) or os.path.isfile(self.src_dir) != os.path.isfile(self.dst_dir)): LOG.error("invalid src/dst specification") raise SystemExit() self.src_dir = os.path.abspath(self.src_dir) self.dst_dir = os.path.abspath(self.dst_dir) # run merge if os.path.isdir(self.src_dir): LOG.info('src dir: %s', self.src_dir) LOG.info('dst dir: %s', self.dst_dir) LOG.info("please confirm to start merging") pcom.cfm() dcmp = dircmp(self.src_dir, self.dst_dir) self.merge_files(dcmp) else: LOG.info('src file: %s', self.src_dir) LOG.info('dst file: %s', self.dst_dir) LOG.info("please confirm to start merging") pcom.cfm() self.merge_file()
def run_admin(args): """to run admin sub cmd""" admin_proc = AdminProc() if args.admin_list_proj: admin_proc.gen_all_proj() admin_proc.list_proj() elif args.admin_list_lab: admin_proc.gen_all_proj() admin_proc.list_lab() elif args.admin_proj_name: admin_proc.gen_all_proj() admin_proc.repo_proj(args.admin_proj_name) admin_proc.fill_proj() elif args.admin_block_lst: LOG.info( "generating block level directories and configs of %s, which will overwrite " "all the existed block level configs ...", args.admin_block_lst) pcom.cfm() admin_proc.fill_blocks(args.admin_block_lst) admin_proc.update_blocks(args.admin_block_lst) elif args.admin_update_blk is not None: LOG.info( "updating all block level directories according to RELEASE directory, " "which will overwrite the existed block files ...") pcom.cfm() admin_proc.update_blocks(args.admin_update_blk) elif args.admin_lib: LOG.info( "generating library mapping links and files, " "which will overwrite all the existed library mapping links and files ..." ) pcom.cfm() admin_proc.fill_lib() elif args.admin_release_check: admin_proc.check_release() elif args.admin_release: admin_proc.release() else: LOG.critical("no actions specified in op admin sub cmd") raise SystemExit()