def update_period(self, fName, blk, i): # t_r contains ( fBase, StartTime, EndTime ) t_r = qa_util.f_time_range(fName) try: ix = self.prd_name.index(t_r[0]) except: ix = len(self.prd_name) self.prd_name.append(t_r[0]) self.prd_beg[ix] = 'x' # greater than any date self.prd_end[ix] = '' # smaller than any date words = blk[i+1].split() if words[0] == 'begin:' and words[1] < self.prd_beg[ix]: self.prd_beg[ix] = words[1] words = blk[i+2].split if words[0] == 'end:' and words[1] > self.prd_end[ix] : self.prd_end[ix] = words[1] return
def update_period(self, fName, blk, i): # t_r contains ( fBase, StartTime, EndTime ) t_r = qa_util.f_time_range(fName) try: ix = self.prd_name.index(t_r[0]) except: ix = len(self.prd_name) self.prd_name.append(t_r[0]) self.prd_beg[ix] = 'x' # greater than any date self.prd_end[ix] = '' # smaller than any date words = blk[i + 1].split() if words[0] == 'begin:' and words[1] < self.prd_beg[ix]: self.prd_beg[ix] = words[1] words = blk[i + 2].split if words[0] == 'end:' and words[1] > self.prd_end[ix]: self.prd_end[ix] = words[1] return
def run(): if qaConf.isOpt("SHOW_EXP"): f_log = get_all_logfiles() sys.exit(0) if qaConf.isOpt('SHOW') or qaConf.isOpt('NEXT'): g_vars.thread_num = 1 # the queue is two items longer than the number of threads queue = Queue(maxsize=g_vars.thread_num + 2) launch_list = [] if g_vars.thread_num < 2: # a single thread qaExec = QaExec(log, qaConf, g_vars) launch_list.append(qaExec) else: for i in range(g_vars.thread_num): launch_list.append(QaLauncher(log, qaConf, g_vars)) for i in range(g_vars.thread_num): t = Thread(target=launch_list[i].start, args=(queue, )) t.daemon = True t.start() is_next_var = False if qaConf.isOpt('NEXT_VAR'): is_next_var = True next_var = qaConf.getOpt('NEXT_VAR') count_next_var = 0 while True: if qaConf.isOpt('EXPLICIT_FILES'): if len(qaConf.dOpts['EXPLICIT_FILES']): data_path, f = os.path.split(qaConf.dOpts['EXPLICIT_FILES'][0]) t_r = qa_util.f_time_range(f) fBase = t_r[0] fNames = [f] del qaConf.dOpts['EXPLICIT_FILES'][0] else: queue.put(('---EOQ---', '', t_vars), block=True) break else: if is_next_var: if count_next_var == next_var: break count_next_var += 1 try: # fBase: list of filename bases corresponding to variables; # usually a single one. F # fNames: corresponding sub-temporal files data_path, fBase, fNames = getPaths.next() except StopIteration: queue.put(('---EOQ---', '', t_vars), block=True) break else: isNoPath = False # return a list of files which have not been processed, yet. # Thus, the list could be empty fL = get_next_variable(data_path, fBase, fNames) if len(fL) == 0: continue t_vars.fBase = fBase #queue.put( (data_path, fL, t_vars), block=True) queue.put((data_path, fL, copy.deepcopy(t_vars)), block=True) if g_vars.thread_num < 2: # a single thread if not launch_list[0].start(queue, ): break if g_vars.thread_num > 1: queue.join() if g_vars.thread_num < 2: launch_list[0].printStatusLine() else: launch_list[0].qa_exec.printStatusLine() return
def run(self, f_log): # extraction of annotations and atomic time ranges from log-files log_path, log_name = os.path.split(f_log) log_name = log_name[0:-4] # sub-directories in check_logs self.f_annot = os.path.join(log_path, 'Annotations') self.f_perd = os.path.join(log_path, 'Period') self.sum_dir = os.path.join(log_path, 'Summary', log_name) qa_util.mkdirP(self.f_annot) qa_util.mkdirP(self.f_perd) qa_util.mkdirP(self.sum_dir) self.f_perd = os.path.join(self.f_perd, log_name + '.period') # time range of atomic variables; in order to save mem, # beg and end, respectively, are linked to the name by the # index of the corresponding atomic variable name in var self.fName_ids=[] self.fName_dt_id={} # each fName_id gets a list of dt ids self.path_ids=[] self.f_p_ids={} self.f_items=[] self.p_items=[] self.p_drs=[] self.var_ids={} # contains all [ids] with the same variable name in {} self.atomicBeg=[] # atomic time interval self.atomicEnd=[] # atomic time interval self.dt=[] # time intervals of sub-temp files self.annot_capt=[] # brief annotations self.annot_tag=[] # corresponding impact-tag self.annot_scope=[] # brief annotations self.annot_fName_id=[] # for each var involved self.annot_path_id=[] # self.annot_var_ix=[] # only project variable names self.annot_fName_dt_id=[] # for each time interval of each var involved # count total occurrences (good and bad) self.file_count=0 self.var_dt_count=[] # for all frequencies # reading and processing of the logfile with open(f_log, 'r') as fd: while True: # read the lines of the next check blk = self.get_next_blk(fd=fd) sz = len(blk) - 1 if sz == -1: break isMissPeriod=True # fx or segmentation fault i=-1 while i < sz : i = i+1 words = blk[i].lstrip(' -').split(None,1) if words[0] == 'file:': # fse contains ( var, StartTime, EndTime ); the # times could be empty strings or EndTime could be empty fse = qa_util.f_time_range(words[1]) self.set_curr_dt(fse) fName_id_ix = self.decomposition(words[1], self.f_items, self.fName_ids, self.prj_fName_sep) self.file_count += 1 # for counting atomic variable's sub_temps for all freqs try: self.fName_dt_id[fName_id_ix] except: self.fName_dt_id[fName_id_ix] = [self.dt_id] else: self.fName_dt_id[fName_id_ix].append(self.dt_id) try: vName = self.f_items[self.fName_ids[fName_id_ix][self.prj_var_ix]] except: pass else: # dict of varNames to contained var_ids try: self.var_ids[vName] except: self.var_ids[vName] = [fName_id_ix] else: try: self.var_ids[vName].index(fName_id_ix) except: self.var_ids[vName].append(fName_id_ix) if fName_id_ix > len(self.atomicBeg) - 1 : # init for a new variable self.atomicBeg.append('') # greater than any date self.atomicEnd.append('') # smaller than any date elif words[0] == 'data_path:': path_id = self.decomposition(words[1], self.p_items, self.path_ids, self.prj_path_sep) # in particular for paths with several variables self.f_p_ids[fName_id_ix] = path_id elif words[0] == 'period:': # time ranges of atomic variables # indexed by self.curr_dt within the function i = self.period_add(fName_id_ix, path_id, fse, blk, i) isMissPeriod=False elif words[0] == 'event:': if isMissPeriod and len(fse[2]): self.subst_period(fName_id_ix, path_id, fse) isMissPeriod=False # annotation and associated indices of properties i = self.annotation_add(path_id, fName_id_ix, blk, i) elif words[0] == 'status:': if isMissPeriod and len(fse[2]): self.subst_period(fName_id_ix, path_id, fse) # test for ragged time intervals of atomic variables for given frequency self.period_final() self.annotation_merge() return
def run(self, f_log): if len(f_log) == 0: return if not os.path.isfile(f_log): print('qa_summary: ' + f_log + ' : no such file') return self.logfile = f_log # extraction of annotations and atomic time ranges from log-files self.log_path, self.log_name = os.path.split(f_log) self.log_name = self.log_name[0:-4] # sub-directories in check_logs self.f_annot = os.path.join(self.log_path, 'Annotations') self.f_period = os.path.join(self.log_path, 'Period') self.tag_dir = os.path.join(self.log_path, 'Tags', self.log_name) qa_util.mkdirP(self.f_annot) qa_util.mkdirP(self.f_period) #qa_util.mkdirP(self.tag_dir) # time range of atomic variables; in order to save mem, # beg and end, respectively, are linked to the name by the # index of the corresponding atomic variable name in var self.fName_ids = [] self.fName_dt_id = {} # each fName_id gets a list of dt ids self.path_ids = [] self.fp_ids = [] self.f_items = [] self.p_items = ['*'] # a placeholder self.atomicBeg = [] # atomic time interval: index by self.atomicEnd = [] # 'var_id'_'path_id' self.dt = [] # time intervals of sub-temp files self.annot_capt = [] # brief annotations self.annot_impact = [] # corresponding severity level self.annot_tag = [] # corresponding tag self.annot_fName_id = [] # for each var involved self.annot_path_id = [] self.annot_fName_dt_id = [ ] # for each time interval of each var involved self.annot_example_capt = [] # example for grouped annotations self.annot_example_isGroup = [] # # count total occurrences (good and bad) self.file_count = 0 # reading and processing of the logfile if not os.path.isfile(f_log): return line_num = 0 isMissedStatus = False with open(f_log, 'r') as fd: while True: if isMissedStatus: print 'incomplete log-file at line ' + str(line_num) sys.exit(1) # read the lines of the next check blk, ln = self.get_next_blk(fd=fd) line_num += ln #print line_num sz = len(blk) - 1 if sz == -1: break isMissedPeriod = True # fx or segmentation fault isMissedStatus = True i = 0 while i < sz: i += 1 words = blk[i].lstrip(' -').split(None, 1) if len(words) == 0: # a string of just '-' would results in this words = ['-----------'] if words[0] == 'file:' or words[0] == 'data-set:': # fse contains ( var, StartTime, EndTime ); the # times could be empty strings or EndTime could be empty fse = qa_util.f_time_range(words[1]) self.set_curr_dt(fse) file_id = self.decomposition(words[1], self.f_items, self.fName_ids, self.prj_fName_sep) self.file_count += 1 elif words[0] == 'data_path:': # used later path_id = self.decomposition(words[1], self.p_items, self.path_ids, self.prj_data_sep) fp_id = str(file_id) + '_' + str(path_id) try: fp_ix = self.fp_ids.index(fp_id) except: fp_ix = len(self.fp_ids) self.fp_ids.append(fp_id) # for counting atomic variable's sub_temps for all freqs try: self.fName_dt_id[fp_ix] except: self.fName_dt_id[fp_ix] = [self.dt_id] else: self.fName_dt_id[fp_ix].append(self.dt_id) if fp_ix > len(self.atomicBeg) - 1: # init for a new variable self.atomicBeg.append('') # greater than any date self.atomicEnd.append('') # smaller than any date elif words[0] == 'period:': # time ranges of atomic variables # indexed by self.curr_dt within the function i = self.period_add(fp_ix, fse, blk, i) isMissedPeriod = False elif words[0] == 'event:': if isMissedPeriod and len(fse[2]): self.subst_period(fp_ix, fse) isMissedPeriod = False # annotation and associated indices of properties i = self.annotation_add(file_id, path_id, blk, i) elif words[0] == 'status:': isMissedStatus = False if isMissedPeriod and len(fse[2]): self.subst_period(fp_ix, fse) if self.file_count == 0: return # test for ragged time intervals of atomic variables for given frequency self.period_final() self.annot_synthetic_tag() self.annotation_merge() self.sendMail() return