def load_config(self, arg_cfg_file): # ok there is a config file to load, there are two possibilities, # neither require path manipulation # 1. the config file is specified as a relative path from the launch # directory # 2. the config file is specified as a fully qualified path try: # load the config file if it exists my_content = open(arg_cfg_file).read() except Exception as arg_ex: Msg.err("Unable to open Config File: %s" % (str(arg_cfg_file))) raise try: my_glb, my_loc = SysUtils.exec_content(my_content) self.load_config_data(my_loc) except Exception as arg_ex: Msg.err("Unable to Process Config File, Message: %s" % (str(arg_ex))) raise return True
def _getWorkflow(self, aConfigPath): """Retrieve workflow from the provided path to add to aSysArgs to parse when building mCmdLineOpts later""" # Check workflow in config file first try: Msg.info("Using workflow from: %s" % aConfigPath) config_module = SourceFileLoader("config", aConfigPath).load_module() return ( config_module.single_run_app_opts, config_module.sequence_app_opts, ) except AttributeError: # aConfigPath is None or the specified config file does not # contain workflow if aConfigPath: Msg.info("Workflow improperly defined in %s" % aConfigPath) else: Msg.info("Config not specified.") # Check environment variable next try: Msg.info("Attempting to use MASTER_RUN_CONFIG environment variable.") config_module = SourceFileLoader( "config", os.environ.get("MASTER_RUN_CONFIG") ).load_module() return ( config_module.single_run_app_opts, config_module.sequence_app_opts, ) except AttributeError: if os.environ.get("MASTER_RUN_CONFIG"): Msg.info( "Workflow improperly defined in MASTER_RUN_CONFIG: " "%s" % os.environ.get("MASTER_RUN_CONFIG") ) else: Msg.info("MASTER_RUN_CONFIG environment variable is not set.") except FileNotFoundError: # MASTER_RUN_CONFIG environment variable # is set, but cannot be found Msg.err( "MASTER_RUN_CONFIG is currently set to %s. " "Please ensure that it exists." % os.environ.get("MASTER_RUN_CONFIG") ) sys.exit(1) # Assume typo so quit # Use default last try: default_config_file = "%s/config/%s" % ( sys.path[0], Defaults.fcfg_name, ) Msg.info("Using workflow from default config file: %s" % default_config_file) config_module = SourceFileLoader("config", default_config_file).load_module() return ( config_module.single_run_app_opts, config_module.sequence_app_opts, ) except FileNotFoundError: # default config file cannot be found Msg.err("Please ensure the default config file exists.") sys.exit(1) # Assume typo so quit
def process_group_items(self, arg_ofile, arg_items): # Msg.trace("PerformanceSummaryItem::process_group_items") my_grp_count = 0 my_grp_elapsed = 0 try: for my_item in arg_items: my_item_elapsed = my_item.force_end - my_item.force_start my_item_count = my_item.total my_grp_elapsed += my_item_elapsed my_grp_count += my_item_count my_line = "\nTask: %s, Instructions: %d, Elapsed: %0.3f\n" % ( my_item.task_id, my_item_count, my_item_elapsed) arg_ofile.write(my_line) # Msg.dbg( my_line ) #end: for my_task in my_group["tasks"]: except Exception as arg_ex: Msg.error_trace() Msg.err("Error Processing Summary, Reason: %s" % (str(arg_ex))) return my_grp_count, my_grp_elapsed
def load(self): # Msg.user( "MasterRun::load" ) self.init_all() # create the top level FileController self.fctrl = FileController(self.process_queue, self.m_app_info) # Msg.lout( self.options, "user", "Initial Option Values" ) self.item_data[CtrlItmKeys.fname] = self.fctrl_name self.item_data[CtrlItmKeys.options] = self.options if self.rtl is not None: self.item_data["rtl"] = self.rtl try: self.ctrl_item.load(self.m_app_info, self.item_data) except BaseException: Msg.err("Unable to load initial control item.") raise # populate the controller if not self.fctrl.load(self.ctrl_item): raise LoadError("Unable to load initial Control File .... ") # initialize the callbacks, if these are None then these are ignored self.fctrl.set_on_fail_proc(self.on_fail_proc) self.fctrl.set_is_term_proc(self.is_term_proc)
def process_summary(self, sum_level=SummaryLevel.Fail): my_file_name = "%sperformance_summary.log" % ( PathUtils().include_trailing_path_delimiter(self.summary_dir)) Msg.dbg("Master Log File: %s" % (my_file_name)) my_utcdt = DateTime.UTCNow() my_ofile = None try: # First try to open file with open(my_file_name, "w") as my_ofile: my_ofile.write("Date: %s\n" % (DateTime.DateAsStr(my_utcdt))) my_ofile.write("Time: %s\n" % (DateTime.TimeAsStr(my_utcdt))) self.process_errors(my_ofile) my_total_count, my_total_elapsed = self.process_groups( my_ofile) Msg.blank("info") my_line = "Total Instructions Generated: %3d\n" % ( my_total_count) my_line += "Total Elapsed Time: %0.3f\n" % (my_total_elapsed) my_line += "Overall Instructions per Second: %0.3f\n" % ( SysUtils.ifthen(bool(my_total_elapsed), my_total_count / my_total_elapsed, 0)) Msg.info(my_line) my_ofile.write(my_line) except Exception as arg_ex: Msg.error_trace() Msg.err("Error Processing Summary, " + str(arg_ex)) finally: my_ofile.close()
def run(self): # Msg.user( "Worker Thread Instance Id: %s, (1)" % ( str( id( self ))) , "WORK-THREAD" ) my_sum_qitem = None try: self.setupWorkDir() my_launcher = self.create_launcher() Msg.user("Launcher Id 1: %s" % (str(id(my_launcher))), "WORK-THREAD") # Msg.user( "Worker Thread Instance Id: %s, (2)" % ( str( id( self ))) , "WORK-THREAD" ) my_launcher.launch() Msg.user("Launcher Id 2: %s" % (str(id(my_launcher))), "WORK-THREAD") # Msg.user( "Worker Thread Instance Id: %s, (3)" % ( str( id( self ))) , "WORK-THREAD" ) my_process_result = my_launcher.extract_results() Msg.user("Process Result: %s" % (my_process_result), "WORK-THREAD") Msg.user("Launcher Id 3: %s" % (str(id(my_launcher))), "WORK-THREAD") # self.launcher = my_launcher Msg.user("Process Result: %s" % (my_process_result), "WORK-THREAD") my_sum_qitem = SummaryQueueItem(my_process_result) Msg.user("Created Summary Queue Item", "WORK-THREAD") except Exception as arg_ex: Msg.error_trace(str(arg_ex)) Msg.err("Message: %s, Control File Path: %s" % (str(arg_ex), self.queue_item.work_dir)) my_sum_qitem = SummaryErrorQueueItem({ "error": arg_ex, "message": "Error Processing Task ...", "path": self.queue_item.ctrl_item.file_path(), "type": str(type(arg_ex)) }) finally: # my_launcher = None my_attempt = 0 while (self.summary.queue.enqueue(my_sum_qitem) == False): SysUtils.sleep(100) #heartbeat my_attempt += 1 if (my_attempt % 10) == 0: Msg.dbg("Attempt %d to insert into summary queue" % (my_attempt)) self.thread_count.delta(-1) Msg.user("Thread Count Decremented", "WORK-THREAD") self.done_semaphore.release() Msg.user("Semaphore Released", "WORK-THREAD") Msg.user("Launcher Id 5: %s" % (str(id(self.launcher))), "WORK-THREAD")
def process_summary(self, arg_sum_level=SummaryLevel.Fail): # Msg.user( "process_summary()", "REG-SUMMARY" ) my_utcdt = DateTime.UTCNow() my_file_name = "%sregression_summary.log" % ( PathUtils().include_trailing_path_delimiter(self.summary_dir) ) # Msg.user( "Master Log File: %s" % ( my_file_name )) my_ofile = None myLines = [] # First try to open file with open(my_file_name, "w") as my_ofile: try: my_ofile.write("Date: %s\n" % (DateTime.DateAsStr(my_utcdt))) my_ofile.write("Time: %s\n" % (DateTime.TimeAsStr(my_utcdt))) self.process_errors(my_ofile) my_instr_count, my_cycle_count = self.process_summary_tasks( my_ofile, arg_sum_level ) self.process_summary_totals(my_ofile, my_instr_count, my_cycle_count) except Exception as arg_ex: Msg.error_trace() Msg.err("Processing Summary, " + str(arg_ex)) finally: my_ofile.close()
def query_logs(self, arg_log, arg_elog): my_errors = None with self.open_log_file(arg_log, "r") as my_hfile: Msg.dbg("File Open: %s" % arg_log) try: my_results = self.query_result_log(my_hfile) except Exception as arg_ex: # NOTE: Determine the possible errors and handle accordingly, # for now just keep processing Msg.error_trace() Msg.err(str(arg_ex)) raise finally: my_hfile.close() if arg_elog is not None: with self.open_log_file(arg_elog, "r") as my_hfile: Msg.dbg("File Open: %s" % arg_elog) try: my_errors = self.query_errors(my_hfile) except Exception as arg_ex: # NOTE: Determine the possible errors and handle # accordingly, for now just keep processing Msg.error_trace() Msg.err(str(arg_ex)) raise finally: my_hfile.close() return my_results, my_errors
def process_task(self, arg_task_file, aTaskDir): try: # get the subdirectory index my_ndx = self.mAppsInfo.getNextIndex(aTaskDir) # form sub task directory sub_task_dir = PathUtils.append_path( PathUtils.include_trailing_path_delimiter(aTaskDir), "%05d" % my_ndx) # save the task template file name with path to the control item self.ctrl_item.fname = arg_task_file # prepare control item content, TODO don't really need it. my_content = self.ctrl_item.prepare(self.mAppsInfo, arg_task_file) my_queue_item = ProcessQueueItem(sub_task_dir, self.ctrl_item, self.mAppsInfo, my_content) self.mProcessQueue.enqueue(my_queue_item) except Exception as arg_ex: Msg.error_trace() Msg.err(str(arg_ex)) # reraise to prevent adding to summary instance raise finally: pass
def remove(cls, arg_path, arg_force=False): try: os.remove(arg_path) Msg.dbg("Success, File Removed: %s" % (arg_path)) except OSError as arg_ex: Msg.err(str(arg_ex)) return False
def execute( self ): my_result = None test_passed = True try: self.build_cmd() self.copyWavesFsdbDoFile() self.outputRerunScript() # report the command line Msg.info( "RTLCommand = " + str( { "rtl-command": self.rtl_cmd } )) # execute the simulation my_result = SysUtils.exec_process( self.rtl_cmd, self.rtl_log, self.rtl_log, self.ctrl_item.timeout, True ) Msg.user( "Results: %s" % ( str( my_result )), "RTLEX-RESULT" "") my_extract_results = self.extract_results( my_result, "./" + self.rtl_log, None ) # report the results Msg.info( "RTLResult = " + str( my_extract_results )) except Exception as arg_ex: Msg.error_trace( "RTL Process Failure" ) Msg.err( "RTL did not properly execute, Reason: %s" % ( str( arg_ex ))) return False finally: pass #return SysUtils.success( int(my_result[ RtlResult.process_retcode ]) ) return test_passed
def load_signaled(self, arg_dict): Msg.user(str(arg_dict), "SIGNALED") try: self.signal_id = arg_dict["retcode"] self.signal_message = arg_dict["message"] except BaseException: self.signal_id = -1 Msg.err("Signal Info Corrupt")
def move(arg_class, arg_src, arg_target="."): Msg.dbg("PathUtils::move( %s, %s )" % (arg_src, arg_target)) try: shutil.move(arg_src, arg_target) except shutil.Error as arg_ex: Msg.err(str(arg_ex)) return False return True
def resolvePath(aClass, aFilePath): try: return str(Path(aFilePath).resolve() ) # return as a string instead of a Path object except FileNotFoundError: Msg.err('Unable to locate %s. Please ensure that it exists.' % aFilePath) sys.exit( 1 ) # Quit to prevent continuing with a file that was probably improperly specified
def rename(arg_class, arg_src, arg_tgt): Msg.dbg("PathUtils::rename( %s, %s )" % (arg_src, arg_tgt)) try: os.rename(arg_src, arg_tgt) except Exception as arg_ex: Msg.err(str(arg_ex)) return False return True
def process_summary_task(self, arg_ofile, arg_task, arg_sum_level): my_instr_count = 0 my_cycle_count = 0 # Msg.lout( arg_task, "user", "process_summary_task" ) for my_item in arg_task: try: # Msg.user("arg_sum_level: %s, my_item.force_level: %s " % (str( arg_sum_level ), str( my_item.force_level ))) # Msg.lout( my_item, "user", "Process Summary Item" ) if my_item.has_generate(): # post the generate results my_outline = my_item.get_gen_line() if not my_outline is None: arg_ofile.write(my_outline) if arg_sum_level >= my_item.force_level: Msg.info(my_outline) if my_item.has_simulate(): # post the simulate results my_outline = my_item.get_iss_line() if not my_outline is None: arg_ofile.write(my_outline) if arg_sum_level >= my_item.iss_level: Msg.info(my_outline) my_instr_count += my_item.instr_count if my_item.has_rtl(): # post the rtl run results my_outline = my_item.get_rtl_line() if not my_outline is None: arg_ofile.write(my_outline) if arg_sum_level >= my_item.rtl_level: Msg.info(my_outline) my_cycle_count += my_item.cycle_count try: if my_item.has_trace_cmp(): # post the simulate results my_outline = my_item.get_trace_cmp_line() if not my_outline is None: arg_ofile.write(my_outline) if arg_sum_level >= my_item.trace_cmp_level: Msg.info(my_outline) except: Msg.user("Exception Raised", "GOT HERE") except: Msg.error_trace() Msg.err("Processing Task Index: %s" % (str(my_item.index))) return my_instr_count, my_cycle_count
def copy_file(arg_class, arg_src, arg_dest="."): try: shutil.copy(arg_src, arg_dest) except shutil.SameFileError as arg_ex: Msg.err(str(arg_ex)) return False except OSError as arg_ex: Msg.err(str(arg_ex)) return False return True
def load(self, arg_queue_item): try: self.load_process_info(arg_queue_item.process_info) self.load_task_info() self.load_process_log() self.report() except Exception as arg_ex: Msg.error_trace() Msg.err(str(arg_ex)) except BaseException: Msg.error_trace()
def write_file(aClass, aFilePath, aContent, aFileType): with open(aFilePath, "w") as my_ofile: try: return (my_ofile.write(aContent) > 0) except Exception as arg_ex: Msg.error_trace() Msg.err("Error Writing %s File, %s" % (aFileType, str(arg_ex))) finally: my_ofile.close() return False
def instruction_counts(self): my_lines = None my_glog = "%s%s" % ( PathUtils.include_trailing_path_delimiter(self.work_dir), self.force_log, ) Msg.user("Path: %s" % my_glog) with open(my_glog, "r") as my_log: my_lines = my_log.readlines() Msg.dbg("Line %d: %s" % (len(my_lines), my_lines[-1])) my_log.close() try: my_results = [ my_tmp for my_tmp in my_lines if re.search(" Instructions Generated", my_tmp) ] Msg.lout(my_results, "dbg") if not my_results: raise Exception('Instruction Count Not Found in "gen.log"') # ok there are instruction counts located for my_line in my_results: my_line = my_line.strip() # find the instruction type (Total, Default, Secondary) my_lpos = my_line.find("]") my_rpos = my_line.find("Instr") my_type = PerformanceInstructionType.instruction_type( (my_line[my_lpos + 1:my_rpos - 1]).strip()) # get the count for this instruction type my_lpos = my_line.find(":") my_count = int(my_line[my_lpos + 2:].strip()) if my_type == PerformanceInstructionType.Total: self.count = my_count elif my_type == PerformanceInstructionType.Secondary: self.secondary = my_count elif my_type == PerformanceInstructionType.Default: self.default = my_count except ValueError: Msg.error_trace() Msg.err("Unable to extract instruction count from %s" % (int(my_lines[-1]))) return 0
def load_trace_cmp_result(self, arg_dict): # Msg.lout( arg_dict, "user", "CMP Results Dictionary ... " ) self.trace_cmp_log = arg_dict["trace-cmp-log"] try: self.trace_cmp_retcode = int(arg_dict["trace-cmp-retcode"]) if self.signal_id is None: self.trace_cmp_msg = str(arg_dict["trace-cmp-msg"]) else: self.trace_cmp_msg = "Incomplete, Signal Id: %s, %s " % (str( self.signal_id), str(self.signal_message)) except: self.trace_cmp_retcode = -1 Msg.err("CMP Return Code in unrecognizable format")
def process(self): for my_ndx in range(0, self.ctrl_item.iterations): # shutdown has been triggered do not do the next iteration if self.is_terminated(): return try: self.process_task_list() except Exception as arg_ex: Msg.error_trace() Msg.err(str(arg_ex)) Msg.blank()
def _getWorkflow(self, aConfigPath): # Check workflow in config file first try: Msg.info('Using workflow from: %s' % aConfigPath) config_module = SourceFileLoader('config', aConfigPath).load_module() return (config_module.single_run_app_opts, config_module.sequence_app_opts) except AttributeError: # aConfigPath is None or the specified config file does not contain workflow if aConfigPath: Msg.info('Workflow improperly defined in %s' % aConfigPath) else: Msg.info('Config not specified.') # Check environment variable next try: Msg.info( 'Attempting to use MASTER_RUN_CONFIG environment variable.') config_module = SourceFileLoader( 'config', os.environ.get('MASTER_RUN_CONFIG')).load_module() return (config_module.single_run_app_opts, config_module.sequence_app_opts) except AttributeError: if os.environ.get('MASTER_RUN_CONFIG'): Msg.info( 'Workflow improperly defined in MASTER_RUN_CONFIG: %s' % os.environ.get('MASTER_RUN_CONFIG')) else: Msg.info('MASTER_RUN_CONFIG environment variable is not set.') except FileNotFoundError: # MASTER_RUN_CONFIG environment variable is set, but cannot be found Msg.err( 'MASTER_RUN_CONFIG is currently set to %s. Please ensure that it exists.' % os.environ.get('MASTER_RUN_CONFIG')) sys.exit(1) # Assume typo so quit # Use default last try: default_config_file = '%s/config/%s' % (sys.path[0], Defaults.fcfg_name) Msg.info('Using workflow from default config file: %s' % default_config_file) config_module = SourceFileLoader( 'config', default_config_file).load_module() return (config_module.single_run_app_opts, config_module.sequence_app_opts) except FileNotFoundError: # default config file cannot be found Msg.err('Please ensure the default config file exists.') sys.exit(1) # Assume typo so quit
def archive_dir(cls, arg_srcdir): try: # get the base name my_basename = "%s" % (str(DateTime.YMD())) Msg.dbg("Base Name: %s" % (my_basename)) # set the directory mask my_srcdir = PathUtils.exclude_trailing_path_delimiter(arg_srcdir) my_srcmask = "%s_%s_???" % (my_srcdir, my_basename) Msg.dbg("Directory Mask: %s" % (my_srcmask)) # list any previous copies my_dirlist = sorted(PathUtils.list_files(my_srcmask)) Msg.lout(my_dirlist, "dbg") my_findex = 0 # there are only two possiblities here # 1. there is only one match, in which case the mask does not # include a number # 2. there are more than one match in which case the last match # should contain a number if len(my_dirlist) > 0: # remove the wildcards my_srcmask = my_srcmask.replace("???", "") Msg.dbg("New File Mask: %s" % (my_srcmask)) my_tmp = my_dirlist[-1] Msg.dbg("Last Filename: %s" % (my_tmp)) my_tmp = my_tmp.replace(my_srcmask, "") Msg.dbg("My Index Last Filename: %s" % (my_tmp)) my_findex = int(my_tmp) + 1 Msg.dbg("My New Index Filename: %s" % (my_findex)) # get the target name my_tgtdir = "%s_%s_%0.3d" % (my_srcdir, my_basename, my_findex) Msg.dbg("Target Directory: %s" % (my_tgtdir)) return PathUtils.move(my_srcdir, my_tgtdir) except Exception as arg_ex: Msg.error_trace(str(arg_ex)) Msg.err(str(arg_ex)) return False
def rmdir(arg_class, arg_dir, arg_force=False): Msg.dbg("PathUtils.rmdir( %s, %s )" % (arg_dir, str(arg_force))) try: if arg_force: # remove a directory regardless of the contents shutil.rmtree(arg_dir) else: os.rmdir(arg_dir) Msg.dbg("Success: Directory Removed: %s" % (arg_dir)) except OSError as arg_ex: if PathUtils.check_dir(arg_dir): Msg.err("Fail, Unable to Remove Directory: %s" % (arg_dir)) return False Msg.warn("Directory does not exists, Remove Failed: %s " % (arg_dir)) return True
def process_task_list(self): for my_task_file in self.task_list: # shutdown has been triggered stop processing tasks if self.is_terminated(): return my_curdir = PathUtils.current_dir() Msg.user("Task list current dir %s" % my_curdir) try: # self.process_task_file( my_task_file ) self.process_task_file(my_task_file, my_curdir) except Exception as arg_ex: Msg.error_trace() Msg.err(str(arg_ex)) Msg.blank()
def process_summary_tasks( self, arg_ofile, arg_sum_level ): Msg.blank( "info" ) my_instr_count = 0 my_cycle_count = 0 # get the outer task for my_key in self.tasks: try: arg_ofile.write( "\nTask: %s\n" % ( my_key )) task_instr_count, task_cycle_count = self.process_summary_task( arg_ofile, self.tasks[ my_key ], arg_sum_level ) my_instr_count += task_instr_count my_cycle_count += task_cycle_count except: Msg.error_trace() Msg.err( "Processing Task %s, Skipping Item ...." % ( my_key )) self.total_instruction_count = my_instr_count self.total_cycle_count = my_cycle_count return my_instr_count, my_cycle_count
def open_output(arg_class, arg_fout=None, arg_ferr=None): Msg.dbg("arg_fout: %s, arg_ferr: %s" % (str(arg_fout), str(arg_ferr))) try: my_fout = SysUtils.ifthen(arg_fout is None, PIPE, open(arg_fout, "w")) except Exception as arg_ex: Msg.err("stdout: " + str(arg_ex)) my_fout = PIPE # SysUtils.ifthen( my_fout is None, my_fout ) try: my_ferr = SysUtils.ifthen(arg_ferr is None, PIPE, open(arg_ferr, "w")) except Exception as arg_ex: Msg.err("stderr: " + str(arg_ex)) my_ferr = PIPE # SysUtils.ifthen( my_ferr is None, my_ferr ) return (my_fout, my_ferr)
def process_groups(self, arg_ofile): my_total_count = 0 my_total_elapsed = 0 my_groups = self.groups.task_groups() # Msg.trace("PerformanceSummaryItem::process_groups") for my_group, my_items in my_groups.items(): try: my_str = "\nBegin Group: %s\n" % (my_group) arg_ofile.write(my_str) Msg.blank("info") Msg.info(my_str) my_grp_count, my_grp_elapsed = self.process_group_items( arg_ofile, my_items ) my_total_count += my_grp_count my_total_elapsed += my_grp_elapsed my_line = "\nGroup Instructions: %3d\n" % (my_grp_count) my_line += "Group Elapsed Time: %0.3f\n" % (my_grp_elapsed) my_line += "Group Instructions per Second: %0.3f\n" % ( SysUtils.ifthen( bool(my_grp_elapsed), my_grp_count / my_grp_elapsed, 0 ) ) my_line += "End Group: %s\n" % (my_group) Msg.info(my_line) arg_ofile.write(my_line) except Exception as arg_ex: Msg.error_trace() Msg.err( "Unable to process, Group: %s, Reason: %s" % (my_group, type(arg_ex)) ) return my_total_count, my_total_elapsed
def open_output(cls, arg_fout=None, arg_ferr=None): Msg.dbg("arg_fout: %s, arg_ferr: %s" % (str(arg_fout), str(arg_ferr))) try: my_fout = SysUtils.ifthen(arg_fout is None, subprocess.PIPE, open(arg_fout, "w")) except Exception as arg_ex: Msg.err("stdout: " + str(arg_ex)) my_fout = subprocess.PIPE try: my_ferr = SysUtils.ifthen(arg_ferr is None, subprocess.PIPE, open(arg_ferr, "w")) except Exception as arg_ex: Msg.err("stderr: " + str(arg_ex)) my_ferr = subprocess.PIPE return (my_fout, my_ferr)