def get_failed_attributes(self,t=7200): vals = self.load_last_values(self.get_attributes()) nones = [k for k,v in vals.items() if (not v or v[1] is None)] nones = [k for k in nones if fn.read_attribute(k) is not None] lost = [k for k,v in vals.items() if k not in nones and v[0] < fn.now()-t] lost = [k for k in lost if fn.read_attribute(k) is not None] failed = nones+lost return sorted(failed)
def get_failed_attributes(self, t=7200): vals = self.load_last_values(self.get_attributes()) nones = [k for k, v in vals.items() if (not v or v[1] is None)] nones = [k for k in nones if fn.read_attribute(k) is not None] lost = [ k for k, v in vals.items() if k not in nones and v[0] < fn.now() - t ] lost = [k for k in lost if fn.read_attribute(k) is not None] failed = nones + lost return sorted(failed)
def apply_search(self): signs = "==|=|<|>" txt = str(self.search.text() or '*') if fandango.clsearch(signs, txt): t = fandango.re.split(signs, txt, 1)[0] txt, formula = t, txt.replace(t, '') else: formula = '' txt = txt.replace(' ', '*').strip() txt = self.preffix + txt + self.suffix model = fandango.get_matching_attributes(txt) if self.labels and '/' in txt: dev, attr = txt.rsplit('/', 1) model.extend( fandango.tango.get_matching_device_attribute_labels( dev, attr).keys()) if formula and model: nm = [] for m in model: try: f = '%s %s' % (fandango.read_attribute(m), formula) if eval(f): nm.append(m) except: pass model = nm self.form.setModel(model)
def check_attributes(self, attrs='', load=False, t0=0): db, t0, result, vals = self, t0 or fn.now(), {}, {} print('Checking %s' % str(db)) if fn.isDictionary(attrs): attrs, vals = attrs.keys(), attrs if isinstance(vals.values()[0], dict): vals = dict((k, v.values()[0]) for k, v in vals.items()) else: if fn.isString(attrs): attrs = fn.filtersmart(db.get_attributes(), attrs) load = True if load: [vals.update(db.load_last_values(a)) for a in attrs] print('\t%d attributes' % len(attrs)) result['attrs'] = attrs result['vals'] = vals result['novals'] = [a for a, v in vals.items() if not v] result['nones'], result['down'], result['lost'] = [], [], [] for a, v in vals.items(): if not v or [1] is None: if not fn.read_attribute(a): #USE read not check!! result['down'].append(a) else: result['novals' if not v else 'nones'].append(a) elif v[0] < (t0 - 7200): result['lost'].append(a) print('\t%d attributes have no values' % len(result['novals'])) print('\t%d attributes are not readable' % len(result['down'])) print('\t%d attributes are not updated' % len(result['lost'])) print('\t%d attributes have None values' % len(result['nones'])) return result
def check_attributes(self,attrs = '', load = False, t0 = 0): db,t0,result,vals = self,t0 or fn.now(),{},{} print('Checking %s' % str(db)) if fn.isDictionary(attrs): attrs,vals = attrs.keys(),attrs if isinstance(vals.values()[0],dict): vals = dict((k,v.values()[0]) for k,v in vals.items()) else: if fn.isString(attrs): attrs = fn.filtersmart(db.get_attributes(),attrs) load = True if load: [vals.update(db.load_last_values(a)) for a in attrs] print('\t%d attributes'%len(attrs)) result['attrs'] = attrs result['vals'] = vals result['novals'] = [a for a,v in vals.items() if not v] result['nones'],result['down'],result['lost'] = [],[],[] for a,v in vals.items(): if not v or [1] is None: if not fn.read_attribute(a): #USE read not check!! result['down'].append(a) else: result['novals' if not v else 'nones'].append(a) elif v[0] < (t0 - 7200): result['lost'].append(a) print('\t%d attributes have no values'%len(result['novals'])) print('\t%d attributes are not readable'%len(result['down'])) print('\t%d attributes are not updated'%len(result['lost'])) print('\t%d attributes have None values'%len(result['nones'])) return result
def check_archiving_schema( schema='hdb', attributes=[],values={}, ti = None, period = 7200, old_period=24*3600*90,\ exclude=['*/waveid','*/wavename','*/elotech-*'], use_index = True, loads = True, action=False, trace=True, export=None): ti = fn.now() if ti is None else str2time(ti) if isString(ti) else ti api = pta.api(schema) is_hpp = isinstance(api, pta.HDBpp) check = dict() old_period = 24*3600*old_period if old_period < 1000 \ else (24*old_period if old_period<3600 else old_period) allattrs = api.get_attributes() if hasattr( api, 'get_attributes') else api.keys() print('%s contains %d attributes' % (schema, len(allattrs))) if attributes: if fn.isString(attributes) and fn.isRegexp(attributes): tattrs = [a for a in allattrs if clsearch(attributes, a)] else: attributes = map(fn.tango.get_normal_name, fn.toList(attributes)) tattrs = [ a for a in allattrs if fn.tango.get_normal_name(a) in allattrs ] else: tattrs = allattrs excluded = [a for a in tattrs if any(fn.clmatch(e, a) for e in exclude)] tattrs = [a for a in tattrs if a not in excluded] print('%d attributes to check' % len(tattrs)) if not len(tattrs): return if excluded: print('\t%d attributes excluded' % len(excluded)) archived = {} for a in tattrs: if hasattr(api, 'get_attribute_archiver'): arch = api.get_attribute_archiver(a) else: arch = api[a].archiver if arch: archived[a] = arch print('\t%d attributes are archived' % len(archived)) #Getting Tango devices currently not running alldevs = set(t.rsplit('/', 1)[0] for t in tattrs) #tdevs = filter(fn.check_device,alldevs) #nodevs = [fn.tango.get_normal_name(d) for d in alldevs if d not in tdevs] #if nodevs: #print('\t%d devices are not running' % len(nodevs)) archs = sorted(set(archived.values())) if loads: astor = fn.Astor() astor.load_from_devs_list(archs) loads = fn.defaultdict(list) for k, s in astor.items(): for d in s.get_device_list(): d = fn.tango.get_normal_name(d) for a in archived: if fn.tango.get_normal_name(archived[a]) == d: loads[k].append(a) for k, s in sorted(loads.items()): print('\t%s archives %d attributes' % (k, len(s))) noarchs = [ fn.tango.get_normal_name(d) for d in archs if not fn.check_device(d) ] if noarchs: print('\t%d archivers are not running: %s' % (len(noarchs), noarchs)) ########################################################################### if isString(values) and values.endswith('.pck'): print('\nLoading last values from %s file\n' % values) import pickle values = pickle.load(open(values)) elif isString(values) and values.endswith('.json'): print('\nLoading last values from %s file\n' % values) values = fn.json2dict(values) elif not use_index or is_hpp: print('\nGetting last values ...\n') for a in tattrs: values[a] = api.load_last_values(a) else: print('\nGetting updated tables from database ...\n') tups = pta.utils.get_table_updates(schema) # Some tables do not update MySQL index tables t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]] check.update((t, check_attribute(a, readable=True)) for t in t0 if not check.get(t)) t0 = [t for t in t0 if check[t]] print('%d/%d archived attributes have indexes not updated ...' % (len(t0), len(tarch))) if t0 and len(t0) < 100: vs = api.load_last_values(t0) tups.update((api[t].table, api[t].last_date) for t in t0) for a in tattrs: if a in tups: values[a] = [tups[api[a].table], 0] for k, v in values.items(): if (len(v) if isSequence(v) else v): if isinstance(v, dict): v = v.values()[0] if isSequence(v) and len(v) == 1: v = v[0] if v and not isNumber(v[0]): v = [date2time(v[0]), v[1]] values[k] = v else: values[k] = [] if isSequence(v) else None print('%d values obtained' % len(values)) ########################################################################### now = fn.now() result = fn.Struct() times = [t[0] for t in values.values() if t] futures = [t for t in times if t > now] times = [t for t in times if t < now] tmiss = [] tfutures = [k for k, v in values.items() if v and v[0] in futures] tmin, tmax = min(times), max(times) print('\toldest update was %s' % time2str(tmin)) print('\tnewest update was %s' % time2str(tmax)) if futures: print('\t%d attributes have values in the future!' % len(futures)) tnovals = [a for a in archived if not values.get(a, None)] if tnovals: print('\t%d archived attributes have no values' % len(tnovals)) try: tmiss = [ a for a, v in values.items() if v and old_period < v[0] < ti - period and a not in archived ] except: print(values.items()[0]) if tmiss: print('\t%d/%d attrs with values are not archived anymore' % (len(tmiss), len(tattrs))) result.Excluded = excluded result.Schema = schema result.All = tattrs result.Archived = values result.NoValues = tnovals result.MissingOrRemoved = tmiss result.TMin = tmin result.TMax = tmax result.Futures = tfutures tup = sorted(a for a in values if values[a] and values[a][0] > ti - period) tok = [a for a in tup if values[a][1] not in (None, [])] print('\n%d/%d archived attributes are updated since %s - %s' % (len(tup), len(archived), ti, period)) print('%d archived attributes are fully ok\n' % (len(tok))) tnotup = sorted(a for a in values if values[a] and values[a][0] < ti - period) print('\t%d archived attrs are not updated' % len(tnotup)) tupnoread = [ a for a in tup if not values[a][1] and fn.read_attribute(a) is None ] reads = dict((a, fn.read_attribute(a)) for a in tnotup) tnotupread = [a for a in tnotup if reads[a] is not None] print('\t%d not updated attrs are readable (Lost)' % len(tnotupread)) print('\t%d of them are not floats' % len([t for t in tnotupread if not isinstance(reads[t], float)])) print('\t%d of them are states' % len([t for t in tnotupread if t.lower().endswith('/state')])) print('\t%d of them seem motors' % len([t for t in tnotupread if t.lower().endswith('/position')])) tnotupevs = [a for a in tnotupread if fn.tango.check_attribute_events(a)] print('\t%d not updated attrs are readable and have events (LostEvents)' % len(tnotupevs)) tnotupnotread = [a for a in tnotup if a not in tnotupread] print('\t%d not updated attrs are not readable' % len(tnotupnotread)) result.Lost = tnotupread result.LostEvents = tnotupevs losts = (tnotupevs if is_hpp else tnotupread) diffs = dict() for a in losts: try: v, vv = values.get(a, (None, ))[1], reads[a] if fn.isSequence(v): v = fn.toList(v) if fn.isSequence(vv): vv = fn.toList(vv) diffs[a] = v != vv if fn.isSequence(diffs[a]): diffs[a] = any(diffs[a]) else: diffs[a] = bool(diffs[a]) except: diffs[a] = None fams = fn.defaultdict(list) for a in tnotupread: fams['/'.join(a.split('/')[-4:-2])].append(a) for f in sorted(fams): print('\t%s: %d attrs not updated' % (f, len(fams[f]))) print() differ = [a for a in losts if diffs[a]] #is True] print('\t%d/%d not updated attrs have also wrong values!!!' % (len(differ), len(losts))) rd = pta.Reader() only = [a for a in tnotupread if len(rd.is_attribute_archived(a)) == 1] print('\t%d/%d not updated attrs are archived only in %s' % (len(only), len(losts), schema)) result.LostDiff = differ print() archs = sorted(set(archived.values())) astor = fn.Astor() astor.load_from_devs_list(archs) badloads = fn.defaultdict(list) for k, s in astor.items(): for d in s.get_device_list(): d = fn.tango.get_normal_name(d) for a in losts: if fn.tango.get_normal_name(archived[a]) == d: badloads[k].append(a) for k, s in badloads.items(): if len(s): print('\t%s archives %d lost attributes' % (k, len(s))) print('\t%d updated attrs are not readable' % len(tupnoread)) result.ArchivedAndReadable = tok result.Updated = tup result.NotUpdated = tnotup result.Unreadable = tnotupnotread #result.DeviceNotRunning = nodevs result.ArchiverNotRunning = noarchs result.LostFamilies = fams # Tnones is for readable attributes not being archived tnones = [ a for a in archived if (a not in values or values[a] and values[a][1] in (None, [])) and a not in tupnoread and a not in tnotupread ] tupnones = [a for a in tnones if a in tup] if tupnones: print('\t%d archived readable attrs record empty values' % len(tupnones)) result.Nones = tnones if 0: get_ratio = lambda a, b: float(len(a)) / float(len(b)) #result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch) #result.ReadRatio = get_ratio(result.Readable,tattrs) #result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread) #result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread) #result.OkRatio = 1.0-result.LostRatio-result.MissRatio #result.Summary = '\n'.join(( #('Checking archiving of %s attributes'%(len(attributes) if attributes else schema)) #,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch))) #,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs]))) #,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch))) #,('%d readable attributes are not archived'%(len(tmiss))) #,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio)) #,('-'*80) #,('%d archived attributes (readable or not) are not updated!'%len(tnotup)) #,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup)) #,('-'*80) #,('%d readable attributes have been removed in the last %d days!'%(len(removed),old_period/(24*3600))) #,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar)) #,('%d readable array attributes are not being archived (Ok)'%len(tmarray)) #,('%d readable array attributes are archived (Expensive)'%len(tarray)) #,(''))) #if trace: print(result.Summary) #print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\ #(len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio)) if action: if action == 'start_devices': print('Executing action %s' % action) api.start_devices() if action == 'restart_all': print('Executing action %s' % action) devs = api.get_archivers() astor = fn.Astor() print('Restarting %d devs:' % (len(devs), devs)) astor.load_from_devs_list(devs) astor.stop_servers() fn.wait(10.) astor.start_servers() #print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS') #print(""" #api = PyTangoArchiving.HDBpp(schema) #api.start_devices() #or #api = PyTangoArchiving.ArchivingAPI('%s') #lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated)) #print(lostdevs) #if lostdevs < a_reasonable_number: #astor = fn.Astor() #astor.load_from_devs_list(lostdevs) #astor.stop_servers() #fn.time.sleep(10.) #astor.start_servers() #"""%schema) print('\nfinished in %d seconds\n\n' % (fn.now() - ti)) if export is not None: if export is True: export = 'txt' for x in (export.split(',') if isString(export) else export): if x in ('json', 'pck', 'pickle', 'txt'): x = '/tmp/%s.%s' % (schema, x) print('Saving %s file with keys:\n%s' % (x, result.keys())) if 'json' in x: fn.dict2json(result.dict(), x) else: f = open(x, 'w') if 'pck' in x or 'pickle' in x: pickle.dump(result.dict(), f) else: f.write(fn.dict2str(result.dict())) f.close() return result
def get_attributes_errors(self, regexp='*', timeout=3*3600, from_db=False, extend = False): """ Returns a dictionary {attribute, error/last value} If from_db=True and extend=True, it performs a full attribute check """ if regexp == '*': self.status = fn.defaultdict(list) if from_db or extend: timeout = fn.now()-timeout attrs = self.get_attributes(True) attrs = fn.filtersmart(attrs,regexp) print('get_attributes_errors([%d/%d])' % (len(attrs),len(self.attributes))) vals = self.load_last_values(attrs) for a,v in vals.items(): if v and v[0] > timeout: self.status['Updated'].append(a) if v[1] is not None: self.status['Readable'].append(a) else: rv = fn.read_attribute(a) if rv is not None: self.status['WrongNone'].append(a) else: self.status['None'].append(a) vals.pop(a) if not extend: self.status['NotUpdated'] = vals.keys() else: for a,v in vals.items(): c = fn.check_attribute(a) if c is None: vals[a] = 'Unreadable' self.status['Unreadable'].append(a) elif isinstance(c,Exception): vals[a] = str(c) self.status['Exception'].append(a) else: ev = fn.tango.check_attribute_events(a) if not ev: vals[a] = 'NoEvents' self.status['NoEvents'].append(a) else: d = self.get_attribute_archiver(a) e = self.get_archiver_errors(d) if a in e: vals[a] = e[a] self.status['ArchiverError'].append(a) else: rv = fn.read_attribute(a) if v and str(rv) == str(v[1]): vals[a] = 'NotChanged' self.status['NotChanged'].append(a) else: self.status['NotUpdated'].append(a) if regexp == '*': for k,v in self.status.items(): print('%s: %s' % (k,len(v))) return vals else: # Should inspect the Subscribers Error Lists vals = dict() for d in self.get_archivers(): err = self.get_archiver_errors(d) for a,e in err.items(): if fn.clmatch(regexp,a): vals[a] = e return vals
def updateAttrDict(self): self.debug_stream("in updateAttrDict()") self._attrDict = {} self._attributesList = [] self._periodList = [] self._errorList = [] self._avgPeriodList = [] self._OKList = [] self._OKNumber = 0 self._errorNumber = 0 self._discarded = [] self.get_device_properties(self.get_device_class()) for item in self.AttributeList: if item[0] == "#": continue period = self.DefaultAttPeriod try: els = item.split(";") attribute = els[0].lower() for el in els: if 'period=' in el: try: period = int(el.split("=")[1]) except: continue except: attribute = item.lower() aux = {} check = fn.read_attribute(attribute,timeout=100) if attribute in self._attrDict.keys(): if check is None: self._attrDict.pop(attribute) else: aux = self._attrDict[attribute] aux['period'] = period else: if check is None: self.error_stream('%s discarded!' % attribute) self._discarded.append(attribute) continue else: aux['last_update'] = 0 aux['average_period'] = 0 aux['update'] = False aux['period'] = period aux['attempts'] = 0 aux['started'] = True aux['error_st'] = False aux['avg_per_buffer'] = [] self._attrDict[attribute.lower()] = aux # Update attributes List self._attributesList = [] self._periodList = [] if self._attrDict != {}: for att, item in self._attrDict.iteritems(): self._attributesList.append(att) self._periodList.append(item['period']) # Update periodic Thread if enabled if self._periodicArch_thread is not None: self._periodicArch_thread.setAttributeDict(self._attrDict)
def get_attributes_errors(self, regexp='*', timeout=3 * 3600, from_db=False, extend=False): """ Returns a dictionary {attribute, error/last value} If from_db=True and extend=True, it performs a full attribute check """ if regexp == '*': self.status = fn.defaultdict(list) if from_db or extend: timeout = fn.now() - timeout attrs = self.get_attributes(True) attrs = fn.filtersmart(attrs, regexp) print('get_attributes_errors([%d/%d])' % (len(attrs), len(self.attributes))) vals = self.load_last_values(attrs) for a, v in vals.items(): if v and v[0] > timeout: self.status['Updated'].append(a) if v[1] is not None: self.status['Readable'].append(a) else: rv = fn.read_attribute(a) if rv is not None: self.status['WrongNone'].append(a) else: self.status['None'].append(a) vals.pop(a) if not extend: self.status['NotUpdated'] = vals.keys() else: for a, v in vals.items(): c = fn.check_attribute(a) if c is None: vals[a] = 'Unreadable' self.status['Unreadable'].append(a) elif isinstance(c, Exception): vals[a] = str(c) self.status['Exception'].append(a) else: ev = fn.tango.check_attribute_events(a) if not ev: vals[a] = 'NoEvents' self.status['NoEvents'].append(a) else: d = self.get_attribute_archiver(a) e = self.get_archiver_errors(d) if a in e: vals[a] = e[a] self.status['ArchiverError'].append(a) else: rv = fn.read_attribute(a) if v and str(rv) == str(v[1]): vals[a] = 'NotChanged' self.status['NotChanged'].append(a) else: self.status['NotUpdated'].append(a) if regexp == '*': for k, v in self.status.items(): print('%s: %s' % (k, len(v))) return vals else: # Should inspect the Subscribers Error Lists vals = dict() for d in self.get_archivers(): err = self.get_archiver_errors(d) for a, e in err.items(): if fn.clmatch(regexp, a): vals[a] = e return vals