def minimal_check(schema, interval=3 * 3600, exclude=".*(wavename|waveid)$", csvs=""): import re, time from PyTangoArchiving import Reader from PyTangoArchiving.utils import check_attribute r = fun.Struct() rd = Reader(schema) db = rd.get_database() r.ids = db.get_attributes_IDs() r.active = db.get_attribute_names(active=True) r.shouldbe = get_all_config_attrs(schema, csvs) if csvs else r.active r.shouldbe = [a for a in r.shouldbe if not re.match(exclude, a.lower())] print('%d attributes are active' % len(r.active)) print('%d attributes should be active' % len(r.shouldbe)) r.missing = [a for a in r.shouldbe if a not in r.ids] r.polizon = [a for a in r.active if a not in r.shouldbe] print('%d attributes are archived but not configured' % len(r.polizon)) r.updates = db.get_table_updates() r.notupdated = [ a for a in r.active if r.updates[db.get_table_name(r.ids[a])] < time.time() - interval ] print('%d active attributes are not updated' % len(r.notupdated)) print('%d shouldbe attributes are missing' % len(r.missing)) r.lost = [ a for a in r.shouldbe if a in r.ids and r.updates[db.get_table_name(r.ids[a])] < time.time() - interval ] r.lost = filter(check_attribute, r.lost) print('%d shouldbe attributes are active but lost' % len(r.lost)) return r
def set_push_events(filein, period=3000, diff=1e-5): print('set_push_events(%s,%s,%s)' % (filein, period, diff)) devs = fandango.get_matching_devices(filein) if devs: devs = dict( (d, fandango.Struct( {'attrs': fandango.get_device(d).get_attribute_list()})) for d in devs) else: devs = pickle.load(open(filein)) for d, t in sorted(devs.items()): print('Setting events (%s,%s) for %s' % (period, diff, d)) dp = PyTango.DeviceProxy(d) for a in t.attrs: dp.poll_attribute(a, int(period)) if period > 0: ac = dp.get_attribute_config(a) cei = PyTango.ChangeEventInfo() cei.rel_change = str(diff) ac.events.ch_event = cei try: dp.set_attribute_config(ac) except: pass print('done')
def get_att_conf_table(self): t0 = fn.now() #types = self.Query('select att_conf_data_type_id, data_type from att_conf_data_type') #types = dict(types) q = "select att_name,att_conf_id,att_conf.att_conf_data_type_id,data_type " q += " from att_conf, att_conf_data_type where " q += "att_conf.att_conf_data_type_id = att_conf_data_type.att_conf_data_type_id" ids = self.Query(q) #self.debug(str((q, ids))) #ids = [list(t)+[types[t[-1]]] for t in ids] for i in ids: attr, aid, tid, table = i self.attributes[attr] = fn.Struct() self.attributes[attr].id = aid self.attributes[attr].tid = tid self.attributes[attr].type = table self.attributes[attr].table = 'att_' + table self.attributes[attr].modes = {'MODE_E': True} return ids
def set_push_events(filein, period=3000, diff=1e-5): print('set_push_events(%s,%s,%s)' % (filein, period, diff)) devs = fd.get_matching_devices(filein) for d in devs[:]: if not check_device(d): q = raw_input('Unable to configure events for %s, ' 'do you wish to continue?' % d).lower() if 'y' not in q: return devs.remove(d) if devs: devs = dict( (d, fd.Struct({'attrs': fd.get_device(d).get_attribute_list()})) for d in devs) else: devs = pickle.load(open(filein)) for d, t in sorted(devs.items()): print('Setting events (%s,%s) for %s' % (period, diff, d)) try: dp = PyTango.DeviceProxy(d) for a in t.attrs: dp.poll_attribute(a, int(period)) if period > 0: ac = dp.get_attribute_config(a) cei = PyTango.ChangeEventInfo() cei.rel_change = str(diff) ac.events.ch_event = cei try: dp.set_attribute_config(ac) except: pass except: q = raw_input('Unable to configure events for %s, ' 'do you wish to continue?' % d) if 'y' not in q.lower(): break print('done')
def unpack_window_message(msg, trace=False): data = list(msg) if TRACE or trace: print('unpack_window_message(%s[%d])' % (data, len(data))) data.pop(0) #STX a = data.pop(0) #Address) crc, data = data[-2:], data[:-2] e = data.pop(-1) #ETX if len(data) >= 3: w, data = data[:3], data[3:] #Window c = data.pop(0) if data else '' #Comm else: w, c = '', data.pop(0) assert ord(c) == ACK, '%s Error Received!' % ERRORS.get( ord(c), 'Unknown') return fandango.Struct({ 'data': data, 'CRC': crc, 'command': c, 'window': w, 'address': a })
def check_archiving_performance(schema='hdb',attributes=[],period=24*3600*90,\ exclude=['*/waveid','*/wavename','*/elotech-*'],action=False,trace=True): import PyTangoArchiving as pta import fandango as fn ti = fn.now() api = pta.api(schema) check = dict() period = 24*3600*period if period < 1000 else (24*period if period<3600 else period) attributes = fn.get_matching_attributes(attributes) if fn.isString(attributes) else map(str.lower,attributes) tattrs = [a for a in api if not attributes or a in attributes] excluded = [a for a in tattrs if any(fn.clmatch(e,a) for e in exclude)] tattrs = [a for a in tattrs if a not in excluded] #Getting Tango devices currently not running alldevs = set(t.rsplit('/',1)[0] for t in tattrs if api[t].archiver) tdevs = filter(fn.check_device,alldevs) nodevs = [d for d in alldevs if d not in tdevs] #Updating data from archiving config tables if not attributes: tattrs = sorted(a for a in api if a.rsplit('/',1)[0] in tdevs) tattrs = [a for a in tattrs if not any(fn.clmatch(e,a) for e in exclude)] print('%d attributes will not be checked (excluded or device not running)'%(len(api)-len(tattrs))) tarch = sorted(a for a in api if api[a].archiver) tnoread = sorted(t for t in tarch if t not in tattrs) check.update((t,None) for t in tnoread) #Getting attributes archived in the past and not currently active tmiss = [t for t in tattrs if not api[t].archiver] check.update((t,fn.check_attribute(t,readable=True)) for t in tmiss) tmiss = [t for t in tmiss if check[t]] tmarray = [t for t in tmiss if fn.isString(check[t].value) or fn.isSequence(check[t].value)] tmscalar = [t for t in tmiss if t not in tmarray] #Getting updated tables from database tups = pta.utils.get_table_updates(schema) # Some tables do not update MySQL index tables t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]] check.update((t,check_attribute(a,readable=True)) for t in t0 if not check.get(t)) t0 = [t for t in t0 if check[t]] print('%d/%d archived attributes have indexes not updated ...'%(len(t0),len(tarch))) if t0 and len(t0)<100: vs = api.load_last_values(t0); tups.update((api[t].table,api[t].last_date) for t in t0) tnotup = [a for a in tarch if tups[api[a].table]<fn.now()-1800] check.update((t,1) for t in tarch if t not in tnotup) #Updating readable attributes (all updated are considered as readable) tread = sorted(t for t in tattrs if t not in tnoread) for t in tattrs: if t not in check: check[t] = fn.check_attribute(t,readable=True) tread = sorted(t for t in tattrs if check[t]) tnoread.extend(t for t in tread if not check[t]) tnoread = sorted(set(tnoread)) #tread contains all readable attributes from devices with some attribute archived #tnoread contains all unreadable attributes from already archived #Calcullating all final stats #tok will be all archivable attributes that are archived #tnotup = [a for a in tnotup if check[a]] #tok = [t for t in tread if t in tarch and t not in tnotup] tok = [t for t in tarch if t not in tnotup] readarch = [a for a in tread if a in tarch] treadnotup = [t for t in readarch if t in tnotup] #tnotup contains only data from tarch tokread = [t for t in readarch if t not in tnotup] #Useless, all archived are considered readable tarray = [t for t in tarch if check[t] and get_attribute_pytype(t) in (str,list)] removed = [a for a in tattrs if not api[a].archiver and tups[api[a].table]>fn.now()-period] result = fn.Struct() result.Excluded = excluded result.Schema = schema result.All = api.keys() result.Archived = tarch result.Readable = tread result.ArchivedAndReadable = readarch result.Updated = tok #tokread result.Lost = treadnotup result.Removed = removed result.TableUpdates = tups result.NotUpdated = tnotup result.Missing = tmiss result.MissingScalars = tmscalar result.MissingArrays = tmarray result.ArchivedArray = tarray result.Unreadable = tnoread result.DeviceNotRunning = nodevs get_ratio = lambda a,b:float(len(a))/float(len(b)) result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch) result.ReadRatio = get_ratio(result.Readable,tattrs) result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread) result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread) result.OkRatio = 1.0-result.LostRatio-result.MissRatio result.Summary = '\n'.join(( ('Checking archiving of %s attributes'%(len(attributes) if attributes else schema)) ,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch))) ,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs]))) ,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch))) ,('%d readable attributes are not archived'%(len(tmiss))) ,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio)) ,('-'*80) ,('%d archived attributes (readable or not) are not updated!'%len(tnotup)) ,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup)) ,('-'*80) ,('%d readable attributes have been removed in the last %d days!'%(len(removed),period/(24*3600))) ,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar)) ,('%d readable array attributes are not being archived (Ok)'%len(tmarray)) ,('%d readable array attributes are archived (Expensive)'%len(tarray)) ,(''))) if trace: print(result.Summary) print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\ (len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio)) if action: print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS') print(""" api = PyTangoArchiving.ArchivingAPI('%s') lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated)) print(lostdevs) if lostdevs < a_reasonable_number: astor = fn.Astor() astor.load_from_devs_list(lostdevs) astor.stop_servers() fn.time.sleep(10.) astor.start_servers() """%schema) if trace: print('finished in %d seconds'%(fn.now()-ti)) return result
def check_archiving_schema( schema='hdb', attributes=[],values={}, ti = None, period = 7200, old_period=24*3600*90,\ exclude=['*/waveid','*/wavename','*/elotech-*'], use_index = True, loads = True, action=False, trace=True, export=None): ti = fn.now() if ti is None else str2time(ti) if isString(ti) else ti api = pta.api(schema) is_hpp = isinstance(api, pta.HDBpp) check = dict() old_period = 24*3600*old_period if old_period < 1000 \ else (24*old_period if old_period<3600 else old_period) allattrs = api.get_attributes() if hasattr( api, 'get_attributes') else api.keys() print('%s contains %d attributes' % (schema, len(allattrs))) if attributes: if fn.isString(attributes) and fn.isRegexp(attributes): tattrs = [a for a in allattrs if clsearch(attributes, a)] else: attributes = map(fn.tango.get_normal_name, fn.toList(attributes)) tattrs = [ a for a in allattrs if fn.tango.get_normal_name(a) in allattrs ] else: tattrs = allattrs excluded = [a for a in tattrs if any(fn.clmatch(e, a) for e in exclude)] tattrs = [a for a in tattrs if a not in excluded] print('%d attributes to check' % len(tattrs)) if not len(tattrs): return if excluded: print('\t%d attributes excluded' % len(excluded)) archived = {} for a in tattrs: if hasattr(api, 'get_attribute_archiver'): arch = api.get_attribute_archiver(a) else: arch = api[a].archiver if arch: archived[a] = arch print('\t%d attributes are archived' % len(archived)) #Getting Tango devices currently not running alldevs = set(t.rsplit('/', 1)[0] for t in tattrs) #tdevs = filter(fn.check_device,alldevs) #nodevs = [fn.tango.get_normal_name(d) for d in alldevs if d not in tdevs] #if nodevs: #print('\t%d devices are not running' % len(nodevs)) archs = sorted(set(archived.values())) if loads: astor = fn.Astor() astor.load_from_devs_list(archs) loads = fn.defaultdict(list) for k, s in astor.items(): for d in s.get_device_list(): d = fn.tango.get_normal_name(d) for a in archived: if fn.tango.get_normal_name(archived[a]) == d: loads[k].append(a) for k, s in sorted(loads.items()): print('\t%s archives %d attributes' % (k, len(s))) noarchs = [ fn.tango.get_normal_name(d) for d in archs if not fn.check_device(d) ] if noarchs: print('\t%d archivers are not running: %s' % (len(noarchs), noarchs)) ########################################################################### if isString(values) and values.endswith('.pck'): print('\nLoading last values from %s file\n' % values) import pickle values = pickle.load(open(values)) elif isString(values) and values.endswith('.json'): print('\nLoading last values from %s file\n' % values) values = fn.json2dict(values) elif not use_index or is_hpp: print('\nGetting last values ...\n') for a in tattrs: values[a] = api.load_last_values(a) else: print('\nGetting updated tables from database ...\n') tups = pta.utils.get_table_updates(schema) # Some tables do not update MySQL index tables t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]] check.update((t, check_attribute(a, readable=True)) for t in t0 if not check.get(t)) t0 = [t for t in t0 if check[t]] print('%d/%d archived attributes have indexes not updated ...' % (len(t0), len(tarch))) if t0 and len(t0) < 100: vs = api.load_last_values(t0) tups.update((api[t].table, api[t].last_date) for t in t0) for a in tattrs: if a in tups: values[a] = [tups[api[a].table], 0] for k, v in values.items(): if (len(v) if isSequence(v) else v): if isinstance(v, dict): v = v.values()[0] if isSequence(v) and len(v) == 1: v = v[0] if v and not isNumber(v[0]): v = [date2time(v[0]), v[1]] values[k] = v else: values[k] = [] if isSequence(v) else None print('%d values obtained' % len(values)) ########################################################################### now = fn.now() result = fn.Struct() times = [t[0] for t in values.values() if t] futures = [t for t in times if t > now] times = [t for t in times if t < now] tmiss = [] tfutures = [k for k, v in values.items() if v and v[0] in futures] tmin, tmax = min(times), max(times) print('\toldest update was %s' % time2str(tmin)) print('\tnewest update was %s' % time2str(tmax)) if futures: print('\t%d attributes have values in the future!' % len(futures)) tnovals = [a for a in archived if not values.get(a, None)] if tnovals: print('\t%d archived attributes have no values' % len(tnovals)) try: tmiss = [ a for a, v in values.items() if v and old_period < v[0] < ti - period and a not in archived ] except: print(values.items()[0]) if tmiss: print('\t%d/%d attrs with values are not archived anymore' % (len(tmiss), len(tattrs))) result.Excluded = excluded result.Schema = schema result.All = tattrs result.Archived = values result.NoValues = tnovals result.MissingOrRemoved = tmiss result.TMin = tmin result.TMax = tmax result.Futures = tfutures tup = sorted(a for a in values if values[a] and values[a][0] > ti - period) tok = [a for a in tup if values[a][1] not in (None, [])] print('\n%d/%d archived attributes are updated since %s - %s' % (len(tup), len(archived), ti, period)) print('%d archived attributes are fully ok\n' % (len(tok))) tnotup = sorted(a for a in values if values[a] and values[a][0] < ti - period) print('\t%d archived attrs are not updated' % len(tnotup)) tupnoread = [ a for a in tup if not values[a][1] and fn.read_attribute(a) is None ] reads = dict((a, fn.read_attribute(a)) for a in tnotup) tnotupread = [a for a in tnotup if reads[a] is not None] print('\t%d not updated attrs are readable (Lost)' % len(tnotupread)) print('\t%d of them are not floats' % len([t for t in tnotupread if not isinstance(reads[t], float)])) print('\t%d of them are states' % len([t for t in tnotupread if t.lower().endswith('/state')])) print('\t%d of them seem motors' % len([t for t in tnotupread if t.lower().endswith('/position')])) tnotupevs = [a for a in tnotupread if fn.tango.check_attribute_events(a)] print('\t%d not updated attrs are readable and have events (LostEvents)' % len(tnotupevs)) tnotupnotread = [a for a in tnotup if a not in tnotupread] print('\t%d not updated attrs are not readable' % len(tnotupnotread)) result.Lost = tnotupread result.LostEvents = tnotupevs losts = (tnotupevs if is_hpp else tnotupread) diffs = dict() for a in losts: try: v, vv = values.get(a, (None, ))[1], reads[a] if fn.isSequence(v): v = fn.toList(v) if fn.isSequence(vv): vv = fn.toList(vv) diffs[a] = v != vv if fn.isSequence(diffs[a]): diffs[a] = any(diffs[a]) else: diffs[a] = bool(diffs[a]) except: diffs[a] = None fams = fn.defaultdict(list) for a in tnotupread: fams['/'.join(a.split('/')[-4:-2])].append(a) for f in sorted(fams): print('\t%s: %d attrs not updated' % (f, len(fams[f]))) print() differ = [a for a in losts if diffs[a]] #is True] print('\t%d/%d not updated attrs have also wrong values!!!' % (len(differ), len(losts))) rd = pta.Reader() only = [a for a in tnotupread if len(rd.is_attribute_archived(a)) == 1] print('\t%d/%d not updated attrs are archived only in %s' % (len(only), len(losts), schema)) result.LostDiff = differ print() archs = sorted(set(archived.values())) astor = fn.Astor() astor.load_from_devs_list(archs) badloads = fn.defaultdict(list) for k, s in astor.items(): for d in s.get_device_list(): d = fn.tango.get_normal_name(d) for a in losts: if fn.tango.get_normal_name(archived[a]) == d: badloads[k].append(a) for k, s in badloads.items(): if len(s): print('\t%s archives %d lost attributes' % (k, len(s))) print('\t%d updated attrs are not readable' % len(tupnoread)) result.ArchivedAndReadable = tok result.Updated = tup result.NotUpdated = tnotup result.Unreadable = tnotupnotread #result.DeviceNotRunning = nodevs result.ArchiverNotRunning = noarchs result.LostFamilies = fams # Tnones is for readable attributes not being archived tnones = [ a for a in archived if (a not in values or values[a] and values[a][1] in (None, [])) and a not in tupnoread and a not in tnotupread ] tupnones = [a for a in tnones if a in tup] if tupnones: print('\t%d archived readable attrs record empty values' % len(tupnones)) result.Nones = tnones if 0: get_ratio = lambda a, b: float(len(a)) / float(len(b)) #result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch) #result.ReadRatio = get_ratio(result.Readable,tattrs) #result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread) #result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread) #result.OkRatio = 1.0-result.LostRatio-result.MissRatio #result.Summary = '\n'.join(( #('Checking archiving of %s attributes'%(len(attributes) if attributes else schema)) #,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch))) #,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs]))) #,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch))) #,('%d readable attributes are not archived'%(len(tmiss))) #,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio)) #,('-'*80) #,('%d archived attributes (readable or not) are not updated!'%len(tnotup)) #,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup)) #,('-'*80) #,('%d readable attributes have been removed in the last %d days!'%(len(removed),old_period/(24*3600))) #,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar)) #,('%d readable array attributes are not being archived (Ok)'%len(tmarray)) #,('%d readable array attributes are archived (Expensive)'%len(tarray)) #,(''))) #if trace: print(result.Summary) #print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\ #(len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio)) if action: if action == 'start_devices': print('Executing action %s' % action) api.start_devices() if action == 'restart_all': print('Executing action %s' % action) devs = api.get_archivers() astor = fn.Astor() print('Restarting %d devs:' % (len(devs), devs)) astor.load_from_devs_list(devs) astor.stop_servers() fn.wait(10.) astor.start_servers() #print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS') #print(""" #api = PyTangoArchiving.HDBpp(schema) #api.start_devices() #or #api = PyTangoArchiving.ArchivingAPI('%s') #lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated)) #print(lostdevs) #if lostdevs < a_reasonable_number: #astor = fn.Astor() #astor.load_from_devs_list(lostdevs) #astor.stop_servers() #fn.time.sleep(10.) #astor.start_servers() #"""%schema) print('\nfinished in %d seconds\n\n' % (fn.now() - ti)) if export is not None: if export is True: export = 'txt' for x in (export.split(',') if isString(export) else export): if x in ('json', 'pck', 'pickle', 'txt'): x = '/tmp/%s.%s' % (schema, x) print('Saving %s file with keys:\n%s' % (x, result.keys())) if 'json' in x: fn.dict2json(result.dict(), x) else: f = open(x, 'w') if 'pck' in x or 'pickle' in x: pickle.dump(result.dict(), f) else: f.write(fn.dict2str(result.dict())) f.close() return result
#!/usr/bin/python import time, os, sys, traceback sys.path.append('/homelocal/sicilia/lib/python/site-packages/') import fandango as fun import PyTangoArchiving as pta V = fun.Struct({'torestart': []}) WAIT_TIME = 150 MAX_ERROR = 7200 STOP_WAIT = 30. last_restart = 0 def trace(msg): print('%s: %s' % (time.ctime(), msg)) def restart_server(ss, api='hdb', wait=WAIT_TIME): if fun.isString(api): api = pta.api('hdb') trace('restart_server(%s)' % ss) api.servers.stop_servers(ss) time.sleep(STOP_WAIT) api.servers.start_servers(ss, wait=0.1) last_restart = time.time() if wait: print '\tWaiting %s seconds' % wait time.sleep(wait) return ss
from fandango.tango import PyTango, get_tango_host try: tango_host = get_tango_host().split(':')[0] except: tango_host = 'PyAlarm' #@TODO: ERROR as a positive value causes problems to Eval #(but it must be positive to appear first on lists?) #The higher the value, the higher it will appear on View lists AlarmStates = fd.Struct({ 'NORM': 0, #Normal state 'RTNUN': 1, #Active but returned to normal 'ACKED': 2, #Acknowledged by operator 'ACTIVE': 3, #UNACK alias 'UNACK': 4, #Active and unacknowledged 'ERROR': -1, #PyAlarm not working properly, exception on formula 'SHLVD': -2, #Silenced, hidden, ignored, (DEBUG), temporary state 'DSUPR': -3, #Disabled by a process condition (Enabled), failed not throwed 'OOSRV': -4, #Unconditionally disabled, Enable = False, Device is OFF }) ACTIVE_STATES = 'ACTIVE', 'UNACK', 'ACKED', 'RTNUN' DISABLED_STATES = 'ERROR', 'SHLVD', 'OOSRV', 'DSUPR' SORT_ORDER = ('Error', 'Active', '_State', 'Priority', 'Time') #@TODO: Rename to PRIORITIES, adapt to IEC Document SEVERITIES = { 'DEBUG': 0, 'INFO': 1,
def check_db_schema(schema, tref=None): r = fn.Struct() r.api = api = pta.api(schema) r.tref = fn.notNone(tref, fn.now() - 3600) r.attrs = api.keys() r.on = api.get_archived_attributes() r.off = [a for a in r.attrs if a not in r.on] if schema in ('tdb', 'hdb'): ups = api.db.get_table_updates() r.vals = dict((k, (ups[api[k].table], None)) for k in r.on) else: r.vals = dict(fn.kmap(api.load_last_values, r.on)) r.vals = dict((k, v and v.values()[0]) for k, v in r.vals.items()) dups = fn.defaultdict(list) if getattr(api, 'dedicated', None): [ dups[a].append(k) for a in r.on for k, v in api.dedicated.items() if a in v ] nups = [a for a, v in dups.items() if len(v) <= 1] [dups.pop(a) for a in nups] r.dups = dict(dups) # Get all updated attributes r.ok = [a for a, v in r.vals.items() if v and v[0] > r.tref] # Try to read not-updated attributes r.check = dict((a, fn.check_attribute(a)) for a in r.on if a not in r.ok) r.nok, r.stall, r.noev, r.lost, r.evs = [], [], [], [], {} # Method to compare numpy values fbool = lambda x: all(x) if fn.isSequence(x) else bool(x) for a, v in r.check.items(): # Get current value/timestamp vv, t = getattr(v, 'value', v), getattr(v, 'time', 0) t = t and fn.ctime2time(t) if isinstance(vv, (type(None), Exception)): # attribute is not readable r.nok.append(a) elif r.vals[a] and 0 < t <= r.vals[a][0]: # attribute timestamp doesnt change r.stall.append(a) elif r.vals[a] and fbool(vv == r.vals[a][1]): # attribute value doesnt change r.stall.append(a) else: r.evs[a] = fn.tango.check_attribute_events(a) if not r.evs[a]: # attribute doesnt send events r.noev.append(a) else: # archiving failure (events or polling) r.lost.append(a) # SUMMARY print(schema) for k in 'attrs on off dups ok nok noev stall lost'.split(): print('\t%s:\t:%d' % (k, len(r.get(k)))) return r
# archiving failure (events or polling) r.lost.append(a) # SUMMARY print(schema) for k in 'attrs on off dups ok nok noev stall lost'.split(): print('\t%s:\t:%d' % (k, len(r.get(k)))) return r CheckState = fn.Struct( on=0, # archived off=1, # not archived ok=2, # device up and running, values updated nok=3, # device not running stall=4, # value not changing noev=5, # not sending events lost=6, # value changed, but not updated in db ) def check_archived_attribute(): # Get current value/timestamp vv, t = getattr(v, 'value', v), getattr(v, 'time', 0) t = t and fn.ctime2time(t) if isinstance(vv, (type(None), Exception)): # attribute is not readable r.nok.append(a)
def check_db_schema(schema, attributes = None, values = None, tref = -12*3600, n = 1, filters = '*', export = 'json', restart = False, subscribe = False): """ tref is the time that is considered updated (e.g. now()-86400) n is used to consider multiple values attrs: all attributes in db on: archived off: in db but not currently archived ok: updated known error causes (attrs not lost but not updated): nok: attributes are not currently readable noevs: attributes not sending events novals: attributes never recorded a value stall: not updated, but current value matches archiving lost: not updated, and values doesn't match with current """ t0 = fn.now() if hasattr(schema,'schema'): api,schema = schema,api.schema else: api = pta.api(schema) r = fn.Struct(api=api,schema=schema) if isString(tref): tref = fn.str2time(tref) r.tref = fn.now()+tref if tref < 0 else tref r.attrs = [a for a in (attributes or api.get_attributes()) if fn.clmatch(filters,a)] print('check_db_schema(%s,attrs[%s],tref="%s",export as %s)' % (schema,len(r.attrs),fn.time2str(r.tref),export)) if restart and schema!='hdbpc': archs = [a for a in api.get_archivers() if not fn.check_device(a)] if archs: try: print('Restarting archivers: %s' % str(archs)) astor = fn.Astor(archs) astor.stop_servers() astor.start_servers() except: traceback.print_exc() stopped = api.get_stopped_attributes() print('Restarting %d stopped attributes' % len(stopped)) api.restart_attributes(stopped) r.on = [a for a in api.get_archived_attributes() if a in r.attrs] r.off = [a for a in r.attrs if a not in r.on] r.archs = fn.defaultdict(list) r.pers = fn.defaultdict(list) r.values = load_schema_values(api,r.on,values,n,tref=tref) if schema in ('tdb','hdb'): [r.archs[api[k].archiver].append(k) for k in r.on] else: r.rvals = r.values r.freq, r.values = {}, {} for k,v in r.rvals.items(): try: if n > 1: v = v[0] if isSequence(v) and len(v) else v r.values[k] = v[0] if isSequence(v) and len(v) else v r.freq[k] = v and float(len(v))/abs(v[0][0]-v[-1][0]) else: r.values[k] = v except Exception as e: print(k,v) print(fn.except2str()) for k in api.get_archivers(): r.archs[k] = api.get_archiver_attributes(k) for k in api.get_periodic_archivers(): r.pers[k] = api.get_periodic_archivers_attributes(k) # Get all updated attributes r.ok = [a for a,v in r.values.items() if v and v[0] > r.tref] # Try to read not-updated attributes r.check = dict((a,fn.check_attribute(a) ) for a in r.on if a not in r.ok) #r.novals = [a for a,v in r.values.items() if not v] r.nok, r.stall, r.noevs, r.lost, r.novals, r.evs, r.rem = [],[],[],[],[],{},[] # Method to compare numpy values for a,v in r.check.items(): state = check_archived_attribute(a, v, default=CheckState.LOST, cache=r, tref=r.tref, check_events = subscribe and not api.is_periodic_archived(a)) { #CheckState.ON : r.on, #CheckState.OFF : r.off, CheckState.OK : r.ok, #Shouldn't be any ok in check list CheckState.NO_READ : r.nok, CheckState.STALL : r.stall, CheckState.NO_EVENTS : r.noevs, CheckState.LOST : r.lost, CheckState.UNK : r.novals, }[state].append(a) # SUMMARY r.summary = schema +'\n' r.summary += ','.join( """on: archived off: not archived ok: updated nok: not readable noevs: no events novals: no values stall: not changing lost: not updated """.split('\n'))+'\n' getline = lambda k,v,l: '\t%s:\t:%d\t(%s)' % (k,len(v),l) r.summary += '\n\t%s:\t:%d\tok+stall: %2.1f %%' % ( 'attrs',len(r.attrs), (100.*(len(r.ok)+len(r.stall))/(len(r.on) or 1e12))) r.summary += '\n\t%s/%s:\t:%d/%d' % ( 'on','off',len(r.on),len(r.off)) #if r.off > 20: r.summary+=' !!!' r.summary += '\n\t%s/%s:\t:%d/%d' % ( 'ok','nok',len(r.ok),len(r.nok)) if len(r.nok) > 10: r.summary+=' !!!' r.summary += '\n\t%s/%s:\t:%d/%d' % ( 'noevs','novals',len(r.noevs),len(r.novals)) if len(r.novals) > 1: r.summary+=' !!!' r.summary += '\n\t%s/%s:\t:%d/%d' % ( 'lost','stall',len(r.lost),len(r.stall)) if len(r.lost) > 1: r.summary+=' !!!' r.summary += '\n' r.archivers = dict.fromkeys(api.get_archivers()) for d in sorted(r.archivers): r.archivers[d] = api.get_archiver_attributes(d) novals = [a for a in r.archivers[d] if a in r.novals] lost = [a for a in r.archivers[d] if a in r.lost] if (len(novals)+len(lost)) > 2: r.summary += ('\n%s (all/novals/lost): %s/%s/%s' % (d,len(r.archivers[d]),len(novals),len(lost))) if hasattr(api,'get_periodic_archivers'): r.periodics = dict.fromkeys(api.get_periodic_archivers()) for d in sorted(r.periodics): r.periodics[d] = api.get_periodic_archiver_attributes(d) novals = [a for a in r.periodics[d] if a in r.novals] lost = [a for a in r.periodics[d] if a in r.lost] if len(novals)+len(lost) > 2: r.summary += ('\n%s (all/novals/lost): %s/%s/%s' % (d,len(r.periodics[d]),len(novals),len(lost))) r.perattrs = [a for a in r.on if a in api.get_periodic_attributes()] r.notper = [a for a in r.on if a not in r.perattrs] r.summary += '\nfinished in %d seconds\n\n'%(fn.now()-t0) print(r.summary) if restart: try: retries = r.lost+r.novals+r.nok print('restarting %d attributes' % len(retries)) api.restart_attributes(retries) except: traceback.print_exc() if export is not None: if export is True: export = 'txt' for x in (export.split(',') if isString(export) else export): if x in ('json','pck','pickle','txt'): x = '/tmp/%s.%s' % (schema,x) print('Saving %s file with keys:\n%s' % (x,r.keys())) if 'json' in x: fn.dict2json(r.dict(),x) else: f = open(x,'w') if 'pck' in x or 'pickle' in x: pickle.dump(r.dict(),f) else: f.write(fn.dict2str(r.dict())) f.close() for k,v in r.items(): if fn.isSequence(v): r[k] = sorted(v) return r
try: v = [date2time(v[0]),v[1]] except: print('unable to parse %s' % str(v)) values[k] = v print('%d values obtained' % len(values)) return values CheckState = fn.Struct( ON = 0, # archived OFF = 1, # not archived OK = 2, # device up and running, values updated NO_READ = 3, # device not running STALL = 4, # value not changing NO_EVENTS = 5, # not sending events LOST = 6, # value changed, but not updated in db UNK = 7, # value cannot be evaluated ) def check_attribute_exists(model): model = fn.tango.parse_tango_model(model) alldevs = fn.tango.get_all_devices() device = fn.tango.get_normal_name(model.device) if device not in alldevs: return False #alldevs = fn.tango.get_all_devices(exported=True) #if not device in alldevs: #return True if not fn.tango.check_device(device):