def get_deactivated_attributes(api='hdb',
                               updates=None,
                               period=6 * 30 * 24 * 3600):
    # Returns the list of attributes that are not archived despite readable and having data from the last months
    if fun.isString(api): api = pta.api(api)
    if updates is None: updates = get_table_updates(api)
    now = fun.time.time()
    return sorted(
        a for a, t in updates.items()
        if (now - period) < t < (now - 24 * 3600) and fun.check_attribute(a))
Exemple #2
0
def check_archived_attribute(attribute, value = False, state = CheckState.OK, 
        default = CheckState.LOST, cache = None, tref = None, 
        check_events = True):
    """
    generic method to check the state of an attribute (readability/events)
    
    value = AttrValue object returned by check_attribute
    cache = result from check_db_schema containing archived values
    
    this method will not query the database; database values should be 
    given using the chache dictionary argument
    """
    # readable and/or no reason known for archiving failure
    state = default # do not remove this line
    
    # Get current value/timestamp
    if cache:
        stored = cache.values[attribute]
        #evs = cache.evs[attribute]
        if stored is None or (fn.isSequence(stored) and not len(stored)):
            return CheckState.UNK
        else:
            t,v = stored[0],stored[1]
            if t>=tref and not isinstance(v,(type(None),Exception)):
                print('%s should not be in check list! (%s,%s)' % (attribute,t,v))
                return CheckState.OK
        
    if value is False:
        value = fn.check_attribute(attribute, brief=False)
        
    vv,t = getattr(value,'value',value),getattr(value,'time',0)
    t = t and fn.ctime2time(t)
    
    if isinstance(vv,(type(None),Exception)):
        # attribute is not readable
        state = CheckState.NO_READ
    elif cache and stored and 0 < t <= stored[0]:
        # attribute timestamp doesnt change
        state = CheckState.STALL
    elif cache and stored and fbool(vv == stored[1]):
        # attribute value doesnt change
        state = CheckState.STALL
    elif check_events:
        # READABLE NOT STORED WILL ARRIVE HERE
        evs = fn.tango.check_attribute_events(attribute)
        if cache:
            cache.evs[attribute] = evs
        if not evs:
            # attribute doesnt send events
            state = CheckState.NO_EVENTS

    return state
 def applyModes(self):
     self.logger().show()
     #Qt.QApplication.instance().setOverrideCursor(Qt.QCursor(Qt.Qt.WaitCursor))
     try:
         attr = self.getModel()
         v = F.check_attribute(attr, brief=True)
         if isinstance(v, (type(None), Exception)):
             Qt.QMessageBox.warning(
                 self, "Warning",
                 "%s is not readable nor archivable" % attr)
             self.logger().hide()
             return
         if fun.isSequence(v) or fun.isString(v):
             Qt.QMessageBox.warning(self, "Warning",
                                    "%s array type is not supported" % attr)
             self.logger().hide()
             return
         modes = self.getModes() or {'MODE_P': [60000]}
         schema = self.getSchema()
         print('%s: applyModes(%s)' % (fun.time2str(), modes))
         msg = 'Modes to be applied:\n'
         for m, v in modes.items():
             msg += '\t%s.%s: %s\n' % (schema, m, v)
         qm = Qt.QMessageBox(Qt.QMessageBox.Warning, 'Confirmation', msg,
                             Qt.QMessageBox.Ok | Qt.QMessageBox.Cancel)
         r = qm.exec_()
         if r == Qt.QMessageBox.Ok:
             if not self.api:
                 self.api = pta.api(self.getSchema().lower(),
                                    logger=self.logger())
             self.api.log = self.logger()
             #self.emit(Qt.SIGNAL('archive'),attr,modes)
             Qt.QApplication.instance().setOverrideCursor(
                 Qt.QCursor(Qt.Qt.WaitCursor))
             thr = threading.Thread(target=self.startArchiving,
                                    args=(attr, modes))
             QLoggerDialog._threads = filter(Thread.is_alive,
                                             self.threads()) + [thr]
             thr.start()
         else:
             self.logger().hide()
     except:
         self.logger().error(traceback.print_exc())
     print('%s: applyModes(...): running!' % (fun.time2str()))
def check_config_files(schema,restart=False,save='',email='',csvfolder=""):
    api = pta.api(schema)
    csv_ats = get_all_config_attrs(schema,csvfolder)

    active = api.get_archived_attributes()
    missing = [a for a in csv_ats if a not in active]
    missread = [a for a in missing if fun.check_attribute(a)]
    msg = '%d CSV(%s) attributes missing in %s (%d readable)'%(len(missing),schema,fun.tango.get_tango_host(),len(missread))
    print msg
    txt = '\n'.join(sorted(missread))
    print txt
    try:
        if save:
            trace('Saving results in %s'%save)
            import pickle
            pickle.dump(missing,open(save,'w'))
        if email and missing:
            fun.linos.sendmail(msg,txt,email) 
    except:
        print traceback.format_exc()
    return missread
def check_config_files(schema, restart=False, save='', email=''):
    api = pta.api(schema)
    csv_ats = get_all_config_attrs(schema)

    active = api.get_archived_attributes()
    missing = [a for a in csv_ats if a not in active]
    missread = [a for a in missing if fun.check_attribute(a)]
    msg = '%d CSV(%s) attributes missing in %s (%d readable)' % (
        len(missing), schema, fun.tango.get_tango_host(), len(missread))
    print msg
    txt = '\n'.join(sorted(missread))
    print txt
    try:
        if save:
            trace('Saving results in %s' % save)
            import pickle
            pickle.dump(missing, open(save, 'w'))
        if email and missing:
            fun.linos.sendmail(msg, txt, email)
    except:
        print traceback.format_exc()
    return missread
Exemple #6
0
 def applyModes(self):
   self.logger().show()
   #Qt.QApplication.instance().setOverrideCursor(Qt.QCursor(Qt.Qt.WaitCursor))
   try:
     attr = self.getModel()
     v = F.check_attribute(attr,brief=True)
     if isinstance(v,(type(None),Exception)): 
       Qt.QMessageBox.warning(self,"Warning","%s is not readable nor archivable"%attr)
       self.logger().hide()
       return
     if fun.isSequence(v) or fun.isString(v):
       Qt.QMessageBox.warning(self,"Warning","%s array type is not supported"%attr)
       self.logger().hide()
       return
     modes = self.getModes() or {'MODE_P':[60000]}
     schema = self.getSchema()
     print('%s: applyModes(%s)'%(fun.time2str(),modes))
     msg = 'Modes to be applied:\n'
     for m,v in modes.items():
      msg += '\t%s.%s: %s\n'%(schema,m,v)
     qm = Qt.QMessageBox(Qt.QMessageBox.Warning,'Confirmation',msg,Qt.QMessageBox.Ok|Qt.QMessageBox.Cancel)
     r = qm.exec_()
     if r == Qt.QMessageBox.Ok:
       if not self.api: 
         self.api = pta.api(self.getSchema().lower(),logger=self.logger())
       self.api.log = self.logger()
       #self.emit(Qt.SIGNAL('archive'),attr,modes)
       Qt.QApplication.instance().setOverrideCursor(Qt.QCursor(Qt.Qt.WaitCursor))
       thr = threading.Thread(target=self.startArchiving,args=(attr,modes))
       QLoggerDialog._threads = filter(Thread.is_alive,self.threads())+[thr]
       thr.start()
     else:
       self.logger().hide()
   except: 
     self.logger().error(traceback.print_exc())
   print('%s: applyModes(...): running!'%(fun.time2str()))
def check_archiving_performance(schema='hdb',attributes=[],period=24*3600*90,\
    exclude=['*/waveid','*/wavename','*/elotech-*'],action=False,trace=True):
    import PyTangoArchiving as pta
    import fandango as fn

    ti = fn.now()
    api = pta.api(schema)
    check = dict()
    period = 24*3600*period if period < 1000 else (24*period if period<3600 else period)
    attributes = fn.get_matching_attributes(attributes) if fn.isString(attributes) else map(str.lower,attributes)
    tattrs = [a for a in api if not attributes or a in attributes]
    excluded = [a for a in tattrs if any(fn.clmatch(e,a) for e in exclude)]
    tattrs = [a for a in tattrs if a not in excluded]

    #Getting Tango devices currently not running
    alldevs = set(t.rsplit('/',1)[0] for t in tattrs if api[t].archiver)
    tdevs = filter(fn.check_device,alldevs)
    nodevs = [d for d in alldevs if d not in tdevs]

    #Updating data from archiving config tables
    if not attributes:
      tattrs = sorted(a for a in api if a.rsplit('/',1)[0] in tdevs)
      tattrs = [a for a in tattrs if not any(fn.clmatch(e,a) for e in exclude)]
    print('%d attributes will not be checked (excluded or device not running)'%(len(api)-len(tattrs)))
    
    tarch = sorted(a for a in api if api[a].archiver)
    tnoread = sorted(t for t in tarch if t not in tattrs)
    check.update((t,None) for t in tnoread)

    #Getting attributes archived in the past and not currently active
    tmiss = [t for t in tattrs if not api[t].archiver]
    check.update((t,fn.check_attribute(t,readable=True)) for t in tmiss)
    tmiss = [t for t in tmiss if check[t]]
    tmarray = [t for t in tmiss if fn.isString(check[t].value) or fn.isSequence(check[t].value)]
    tmscalar = [t for t in tmiss if t not in tmarray]
    
    #Getting updated tables from database
    tups = pta.utils.get_table_updates(schema)
    # Some tables do not update MySQL index tables
    t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]]
    check.update((t,check_attribute(a,readable=True)) for t in t0 if not check.get(t))
    t0 = [t for t in t0 if check[t]]
    print('%d/%d archived attributes have indexes not updated ...'%(len(t0),len(tarch)))
    if t0 and len(t0)<100: 
      vs = api.load_last_values(t0);
      tups.update((api[t].table,api[t].last_date) for t in t0)
    tnotup = [a for a in tarch if tups[api[a].table]<fn.now()-1800]
    check.update((t,1) for t in tarch if t not in tnotup)
    
    #Updating readable attributes (all updated are considered as readable)
    tread = sorted(t for t in tattrs if t not in tnoread)
    for t in tattrs:
      if t not in check:
        check[t] = fn.check_attribute(t,readable=True)
    tread = sorted(t for t in tattrs if check[t])
    tnoread.extend(t for t in tread if not check[t])
    tnoread = sorted(set(tnoread))
          
    #tread contains all readable attributes from devices with some attribute archived
    #tnoread contains all unreadable attributes from already archived

    #Calcullating all final stats
    #tok will be all archivable attributes that are archived
    #tnotup = [a for a in tnotup if check[a]]
    #tok = [t for t in tread if t in tarch and t not in tnotup]
    tok = [t for t in tarch if t not in tnotup]
    readarch = [a for a in tread if a in tarch]
    treadnotup = [t for t in readarch if t in tnotup] #tnotup contains only data from tarch
    tokread = [t for t in readarch if t not in tnotup] #Useless, all archived are considered readable
    tarray = [t for t in tarch if check[t] and get_attribute_pytype(t) in (str,list)]
    removed = [a for a in tattrs if not api[a].archiver and tups[api[a].table]>fn.now()-period]
    
    result = fn.Struct()
    result.Excluded = excluded
    result.Schema = schema
    result.All = api.keys()
    result.Archived = tarch
    result.Readable = tread
    result.ArchivedAndReadable = readarch
    result.Updated = tok #tokread
    result.Lost = treadnotup
    result.Removed = removed
    result.TableUpdates = tups
    result.NotUpdated = tnotup
    result.Missing = tmiss
    result.MissingScalars = tmscalar
    result.MissingArrays = tmarray
    result.ArchivedArray = tarray
    result.Unreadable = tnoread
    result.DeviceNotRunning = nodevs
    
    get_ratio = lambda a,b:float(len(a))/float(len(b))
    
    result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch)
    result.ReadRatio = get_ratio(result.Readable,tattrs)
    result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread)
    result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread)
    result.OkRatio = 1.0-result.LostRatio-result.MissRatio
    
    result.Summary = '\n'.join((
      ('Checking archiving of %s attributes'%(len(attributes) if attributes else schema))
      ,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch)))
      ,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs])))
      ,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch)))
      ,('%d readable attributes are not archived'%(len(tmiss)))
      ,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio))
      ,('-'*80)
      ,('%d archived attributes (readable or not) are not updated!'%len(tnotup))
      ,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup))
      ,('-'*80)
      ,('%d readable attributes have been removed in the last %d days!'%(len(removed),period/(24*3600)))
      ,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar))
      ,('%d readable array attributes are not being archived (Ok)'%len(tmarray))
      ,('%d readable array attributes are archived (Expensive)'%len(tarray))
      ,('')))
    
    if trace: print(result.Summary)
    print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\
        (len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio))

    if action:
        print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS')
        print("""
        api = PyTangoArchiving.ArchivingAPI('%s')
        lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated))
        print(lostdevs)
        if lostdevs < a_reasonable_number:
          astor = fn.Astor()
          astor.load_from_devs_list(lostdevs)
          astor.stop_servers()
          fn.time.sleep(10.)
          astor.start_servers()
        """%schema)
        
    if trace: print('finished in %d seconds'%(fn.now()-ti))
        
    return result 
def check_schema_information(schema, restart=False, save='', email=''):
    trace('In check_schema_information(%s,restart=%s,save=%s)' %
          (schema, restart, save))

    api = pta.api(schema, load=True)
    active = api.get_archived_attributes()
    idle = get_idle_servers(api)
    updates = get_table_updates(api)
    get_time_limit = lambda attr: time.time() - max(
        (MAX_ERROR, api[attr].modes.get('MODE_P', [60000])[0] / 1000.
         if 'sqlserver' not in attr else 86400))
    exclude = [] if 'ctbl' in api.host else [
        'sys', 'setpoint', 'wavename', 'waveid', 'bpm-acq', 'elotech', 'bake',
        'temp', 'errorcode'
    ]
    tread, nread = 0, 0

    #Get all attributes updated in information_schema
    updated = [a for a in api if updates[api[a].table] > get_time_limit(a)]

    #Get values for attributes with no info
    attrs0 = [a for a in active if not updates[api[a].table]]
    if attrs0:
        trace('%s tables have no update_time' % len(attrs0))
        for s in (ss for ss in api.servers
                  if 'archiver' in ss.lower() and ss not in idle):
            devs = api.servers[s].get_device_list()
            for a in (aa for aa in attrs0 if api[aa].archiver in devs):
                t0 = time.time()
                api.load_last_values(a)
                tread += (time.time() - t0)
                nread += 1
                if api[a].last_date > get_time_limit(a):
                    updated.append(a)
                elif not any(e in a for e in exclude):
                    #Server already marked, so we won't need to continue querying
                    break

    if not nread:
        import random
        for i in range(100):
            t0 = time.time()
            api.load_last_values(active[random.randint(0, len(active) - 1)])
            tread += (time.time() - t0)
        nread = 100

    t1read = float(tread) / nread
    print 't1read: %f' % t1read

    #BECAUSE ALL BPMS DEPRECATED ARE STILL DEDICATED
    excluded = [
        a for a in active if a not in updated and any(e in a for e in exclude)
    ]
    shouldbe = [
        a for a in get_assigned_attributes(api, dedicated=False)
        if a not in excluded
    ]

    if 'ctbl' in api.host:
        shouldbe = sorted(
            set(shouldbe + get_deactivated_attributes(api, updates)))

    lost = [a for a in shouldbe if a not in updated and fun.check_attribute(a)]
    depr = [a for a in lost if not api[a].archiver]
    msg = '%s: %d/%d/%d attributes updated (%s ignored, %s lost)' % (
        schema.upper(), len(updated), len(shouldbe), len(api), len(excluded),
        len(lost))
    trace(msg)
    marked = get_attributes_servers(lost, api=api)
    marked.update(idle)

    if excluded:
        print 'ignored : %s' % ','.join(excluded)
    print ''

    txt = ''
    if depr:
        txt += ('%d attributes should be reloaded ...' % len(depr))
        txt += '\n' + ','.join(depr)

    if marked:
        txt += '%d servers should be restarted ...' % len(marked)
        txt += '\n'.join('\t%s:%s' % (s, ' '.join(marked[s]))
                         for s in sorted(marked))
    trace(txt)

    print ''
    result = {
        'updates': updates,
        'active': active,
        'shouldbe': shouldbe,
        'lost': lost,
        'marked': marked,
        'excluded': excluded,
        'tread': tread,
        'nread': nread,
        't1read': t1read
    }
    print 'nread: %s, tread: %s, t1read: %s' % (nread, tread, t1read)

    try:
        if save:
            trace('Saving results in %s' % save)
            import pickle
            pickle.dump(result, open(save, 'w'))
        if email and lost > 5:
            fun.linos.sendmail(msg, txt, email)
    except:
        print traceback.format_exc()

    if restart:
        for s in sorted(depr):
            api.start_archiving(depr)
        for s in sorted(marked):
            restart_server(s, api=api)
        trace('Archiving check finished, %d servers restarted' % len(marked))
    return marked
def check_db_schema(schema, tref=None):

    r = fn.Struct()
    r.api = api = pta.api(schema)
    r.tref = fn.notNone(tref, fn.now() - 3600)

    r.attrs = api.keys()
    r.on = api.get_archived_attributes()
    r.off = [a for a in r.attrs if a not in r.on]
    if schema in ('tdb', 'hdb'):
        ups = api.db.get_table_updates()
        r.vals = dict((k, (ups[api[k].table], None)) for k in r.on)
    else:
        r.vals = dict(fn.kmap(api.load_last_values, r.on))
        r.vals = dict((k, v and v.values()[0]) for k, v in r.vals.items())

    dups = fn.defaultdict(list)
    if getattr(api, 'dedicated', None):
        [
            dups[a].append(k) for a in r.on for k, v in api.dedicated.items()
            if a in v
        ]
        nups = [a for a, v in dups.items() if len(v) <= 1]
        [dups.pop(a) for a in nups]
    r.dups = dict(dups)

    # Get all updated attributes
    r.ok = [a for a, v in r.vals.items() if v and v[0] > r.tref]
    # Try to read not-updated attributes
    r.check = dict((a, fn.check_attribute(a)) for a in r.on if a not in r.ok)
    r.nok, r.stall, r.noev, r.lost, r.evs = [], [], [], [], {}
    # Method to compare numpy values
    fbool = lambda x: all(x) if fn.isSequence(x) else bool(x)

    for a, v in r.check.items():
        # Get current value/timestamp
        vv, t = getattr(v, 'value', v), getattr(v, 'time', 0)
        t = t and fn.ctime2time(t)

        if isinstance(vv, (type(None), Exception)):
            # attribute is not readable
            r.nok.append(a)
        elif r.vals[a] and 0 < t <= r.vals[a][0]:
            # attribute timestamp doesnt change
            r.stall.append(a)
        elif r.vals[a] and fbool(vv == r.vals[a][1]):
            # attribute value doesnt change
            r.stall.append(a)
        else:
            r.evs[a] = fn.tango.check_attribute_events(a)
            if not r.evs[a]:
                # attribute doesnt send events
                r.noev.append(a)
            else:
                # archiving failure (events or polling)
                r.lost.append(a)

    # SUMMARY
    print(schema)
    for k in 'attrs on off dups ok nok noev stall lost'.split():
        print('\t%s:\t:%d' % (k, len(r.get(k))))

    return r
def get_deactivated_attributes(api='hdb',updates=None,period=6*30*24*3600):
  # Returns the list of attributes that are not archived despite readable and having data from the last months
  if fun.isString(api): api = pta.api(api)                                                                    
  if updates is None: updates = get_table_updates(api)                                                        
  now = fun.time.time()                                                                                       
  return sorted(a for a,t in updates.items() if (now-period)<t<(now-24*3600) and fun.check_attribute(a))  
def check_schema_information(schema,restart=False,save='',email=''):
  trace('In check_schema_information(%s,restart=%s,save=%s)'%(schema,restart,save)) 
  
  api = pta.api(schema,load=True)
  active = api.get_archived_attributes()
  idle = get_idle_servers(api)
  updates = get_table_updates(api)
  get_time_limit = lambda attr:time.time()-max((MAX_ERROR,api[attr].modes.get('MODE_P',[60000])[0]/1000. if 'sqlserver' not in attr else 86400))
  exclude = [] if 'ctbl' in api.host else ['sys','setpoint','wavename','waveid','bpm-acq','elotech','bake','temp','errorcode'] 
  tread,nread = 0,0 
  
  #Get all attributes updated in information_schema
  updated = [a for a in api if updates[api[a].table]>get_time_limit(a)]
  
  #Get values for attributes with no info
  attrs0 = [a for a in active if not updates[api[a].table]]
  if attrs0:
    trace('%s tables have no update_time'%len(attrs0))
    for s in (ss for ss in api.servers if 'archiver' in ss.lower() and ss not in idle):
      devs = api.servers[s].get_device_list()
      for a in (aa for aa in attrs0 if api[aa].archiver in devs):
        t0 = time.time()
        api.load_last_values(a);
        tread+=(time.time()-t0)
        nread+=1
        if api[a].last_date>get_time_limit(a):
          updated.append(a)
        elif not any(e in a for e in exclude):
          #Server already marked, so we won't need to continue querying
          break   
        
  if not nread:
      import random
      for i in range(100):
          t0 = time.time()
          api.load_last_values(active[random.randint(0,len(active)-1)])
          tread+=(time.time()-t0)
      nread = 100
      
  t1read = float(tread)/nread
  print 't1read: %f'%t1read
    
  #BECAUSE ALL BPMS DEPRECATED ARE STILL DEDICATED
  excluded = [a for a in active if a not in updated and any(e in a for e in exclude)]
  shouldbe = [a for a in get_assigned_attributes(api,dedicated=False) if a not in excluded]
  
  if 'ctbl' in api.host: shouldbe = sorted(set(shouldbe + get_deactivated_attributes(api,updates)))
  
  lost = [a for a in shouldbe if a not in updated and fun.check_attribute(a)]
  depr = [a for a in lost if not api[a].archiver]
  msg = '%s: %d/%d/%d attributes updated (%s ignored, %s lost)'%(schema.upper(),len(updated),len(shouldbe),len(api),len(excluded),len(lost))
  trace(msg)
  marked = get_attributes_servers(lost,api=api)
  marked.update(idle)
  
  if excluded: 
    print 'ignored : %s'%','.join(excluded)
  print ''
  
  txt = ''
  if depr:
    txt += ( '%d attributes should be reloaded ...'%len(depr))
    txt += '\n'+','.join(depr)
    
  if marked:
    txt += '%d servers should be restarted ...'%len(marked)
    txt += '\n'.join('\t%s:%s'%(s,' '.join(marked[s])) for s in sorted(marked))
  trace(txt)
  
  print ''
  result = {'updates':updates,'active':active,'shouldbe':shouldbe,'lost':lost,'marked':marked,'excluded':excluded,'tread':tread,'nread':nread,'t1read':t1read}
  print 'nread: %s, tread: %s, t1read: %s'%(nread,tread,t1read)
  
  try:
    if save:
      trace('Saving results in %s'%save)
      import pickle
      pickle.dump(result,open(save,'w'))
    if email and lost>5:
      fun.linos.sendmail(msg,txt,email) 
  except:
    print traceback.format_exc()
    
  if restart:
    for s in sorted(depr):
      api.start_archiving(depr)
    for s in sorted(marked):
      restart_server(s,api=api)
    trace('Archiving check finished, %d servers restarted'%len(marked))
  return marked
Exemple #12
0
def check_db_schema(schema, attributes = None, values = None,
                    tref = -12*3600, n = 1, filters = '*', export = 'json',
                    restart = False, subscribe = False):
    """
    tref is the time that is considered updated (e.g. now()-86400)
    n is used to consider multiple values
    
    attrs: all attributes in db
    on: archived
    off: in db but not currently archived
    ok: updated   
    
    known error causes (attrs not lost but not updated):
    
    nok: attributes are not currently readable
    noevs: attributes not sending events
    novals: attributes never recorded a value
    stall: not updated, but current value matches archiving
    lost: not updated, and values doesn't match with current
    """
    
    t0 = fn.now()
    if hasattr(schema,'schema'):
        api,schema = schema,api.schema
    else:
        api = pta.api(schema)

    r = fn.Struct(api=api,schema=schema)    
    if isString(tref): 
        tref = fn.str2time(tref)
    r.tref = fn.now()+tref if tref < 0 else tref
    r.attrs = [a for a in (attributes or api.get_attributes())
                if fn.clmatch(filters,a)]
    print('check_db_schema(%s,attrs[%s],tref="%s",export as %s)' 
          % (schema,len(r.attrs),fn.time2str(r.tref),export))
    
    if restart and schema!='hdbpc':
        archs = [a for a in api.get_archivers() if not fn.check_device(a)]
        if archs:
            try:
                print('Restarting archivers: %s' % str(archs))
                astor = fn.Astor(archs)
                astor.stop_servers()
                astor.start_servers()
            except:
                traceback.print_exc()
        
        stopped = api.get_stopped_attributes()
        print('Restarting %d stopped attributes' % len(stopped))
        api.restart_attributes(stopped)
    
    r.on = [a for a in api.get_archived_attributes() if a in r.attrs]
    r.off = [a for a in r.attrs if a not in r.on]
    
    r.archs = fn.defaultdict(list)
    r.pers = fn.defaultdict(list)
    r.values = load_schema_values(api,r.on,values,n,tref=tref)
    
    if schema in ('tdb','hdb'):
        [r.archs[api[k].archiver].append(k) for k in r.on]
    else:
        r.rvals = r.values
        r.freq, r.values = {}, {}
        for k,v in r.rvals.items():
            try:
                if n > 1:
                    v = v[0] if isSequence(v) and len(v) else v
                    r.values[k] = v[0] if isSequence(v) and len(v) else v
                    r.freq[k] = v and float(len(v))/abs(v[0][0]-v[-1][0])
                else:
                    r.values[k] = v
            except Exception as e:
                print(k,v)
                print(fn.except2str())
                
        for k in api.get_archivers():
            r.archs[k] = api.get_archiver_attributes(k)
        for k in api.get_periodic_archivers():
            r.pers[k] = api.get_periodic_archivers_attributes(k)

    # Get all updated attributes
    r.ok = [a for a,v in r.values.items() if v and v[0] > r.tref]
    # Try to read not-updated attributes
    r.check = dict((a,fn.check_attribute(a)
                    ) for a in r.on if a not in r.ok)
    #r.novals = [a for a,v in r.values.items() if not v]
    r.nok, r.stall, r.noevs, r.lost, r.novals, r.evs, r.rem = [],[],[],[],[],{},[]
    # Method to compare numpy values
    
    for a,v in r.check.items():
        state = check_archived_attribute(a, v, default=CheckState.LOST, 
            cache=r, tref=r.tref, 
            check_events = subscribe and not api.is_periodic_archived(a))
        {
            #CheckState.ON : r.on,
            #CheckState.OFF : r.off,
            CheckState.OK : r.ok, #Shouldn't be any ok in check list               
            CheckState.NO_READ : r.nok,
            CheckState.STALL : r.stall,
            CheckState.NO_EVENTS : r.noevs,
            CheckState.LOST : r.lost,
            CheckState.UNK : r.novals,
         }[state].append(a)
                
    # SUMMARY
    r.summary = schema +'\n'
    r.summary += ','.join(
        """on: archived
        off: not archived
        ok: updated   
        nok: not readable
        noevs: no events
        novals: no values
        stall: not changing
        lost: not updated
        """.split('\n'))+'\n'
    
    getline = lambda k,v,l: '\t%s:\t:%d\t(%s)' % (k,len(v),l)
    
    r.summary += '\n\t%s:\t:%d\tok+stall: %2.1f %%' % (
        'attrs',len(r.attrs),
        (100.*(len(r.ok)+len(r.stall))/(len(r.on) or 1e12)))
    r.summary += '\n\t%s/%s:\t:%d/%d' % (
        'on','off',len(r.on),len(r.off))
    #if r.off > 20: r.summary+=' !!!'
    r.summary += '\n\t%s/%s:\t:%d/%d' % (
        'ok','nok',len(r.ok),len(r.nok))
    if len(r.nok) > 10: 
        r.summary+=' !!!'
    r.summary += '\n\t%s/%s:\t:%d/%d' % (
        'noevs','novals',len(r.noevs),len(r.novals))
    if len(r.novals) > 1: 
        r.summary+=' !!!'
    r.summary += '\n\t%s/%s:\t:%d/%d' % (
        'lost','stall',len(r.lost),len(r.stall))
    if len(r.lost) > 1: 
        r.summary+=' !!!'
    r.summary += '\n'
        
    r.archivers = dict.fromkeys(api.get_archivers())
    for d in sorted(r.archivers):
        r.archivers[d] = api.get_archiver_attributes(d)
        novals = [a for a in r.archivers[d] if a in r.novals]   
        lost = [a for a in r.archivers[d] if a in r.lost]
        if (len(novals)+len(lost)) > 2:
            r.summary += ('\n%s (all/novals/lost): %s/%s/%s' 
                % (d,len(r.archivers[d]),len(novals),len(lost)))
            
    if hasattr(api,'get_periodic_archivers'):
        r.periodics = dict.fromkeys(api.get_periodic_archivers())
        for d in sorted(r.periodics):
            r.periodics[d] = api.get_periodic_archiver_attributes(d)
            novals = [a for a in r.periodics[d] if a in r.novals]
            lost = [a for a in r.periodics[d] if a in r.lost]
            if len(novals)+len(lost) > 2:
                r.summary += ('\n%s (all/novals/lost): %s/%s/%s' % 
                    (d,len(r.periodics[d]),len(novals),len(lost)))
        
        r.perattrs = [a for a in r.on if a in api.get_periodic_attributes()]
        r.notper = [a for a in r.on if a not in r.perattrs]
        
        
    r.summary += '\nfinished in %d seconds\n\n'%(fn.now()-t0)
    print(r.summary)
    
    if restart:
        try:
            retries = r.lost+r.novals+r.nok
            print('restarting %d attributes' % len(retries))
            api.restart_attributes(retries)
        except:
            traceback.print_exc()
    
    if export is not None:
        if export is True:
            export = 'txt'
        for x in (export.split(',') if isString(export) else export):
            if x in ('json','pck','pickle','txt'):
                x = '/tmp/%s.%s' % (schema,x)
            print('Saving %s file with keys:\n%s' % (x,r.keys()))
            if 'json' in x:
                fn.dict2json(r.dict(),x)
            else:
                f = open(x,'w')
                if 'pck' in x or 'pickle' in x:
                    pickle.dump(r.dict(),f)
                else:
                    f.write(fn.dict2str(r.dict()))
                f.close()     
                
    for k,v in r.items():
        if fn.isSequence(v):
            r[k] = sorted(v)
                
    return r
Exemple #13
0
    def get_attributes_errors(self, regexp='*', timeout=3*3600, 
                              from_db=False, extend = False):
        """
        Returns a dictionary {attribute, error/last value}
        
        If from_db=True and extend=True, it performs a full attribute check
        """
        if regexp == '*':
            self.status = fn.defaultdict(list)
        if from_db or extend:
            timeout = fn.now()-timeout
            attrs = self.get_attributes(True)
            attrs = fn.filtersmart(attrs,regexp)
            print('get_attributes_errors([%d/%d])' 
                  % (len(attrs),len(self.attributes)))
            vals = self.load_last_values(attrs)
            for a,v in vals.items():
                if v and v[0] > timeout:
                    self.status['Updated'].append(a)
                    if v[1] is not None:
                        self.status['Readable'].append(a)
                    else:
                        rv = fn.read_attribute(a)
                        if rv is not None:
                            self.status['WrongNone'].append(a)
                        else:
                            self.status['None'].append(a)
                    vals.pop(a)

            if not extend:
                self.status['NotUpdated'] = vals.keys()
            else:
                for a,v in vals.items():
                    c = fn.check_attribute(a)
                    if c is None:
                        vals[a] = 'Unreadable'
                        self.status['Unreadable'].append(a)
                    elif isinstance(c,Exception):
                        vals[a] = str(c)
                        self.status['Exception'].append(a)
                    else:
                        ev = fn.tango.check_attribute_events(a)
                        if not ev:
                            vals[a] = 'NoEvents'
                            self.status['NoEvents'].append(a)
                        else:
                            d = self.get_attribute_archiver(a)
                            e = self.get_archiver_errors(d)
                            if a in e:
                                vals[a] = e[a]
                                self.status['ArchiverError'].append(a)
                            else:
                                rv = fn.read_attribute(a)
                                if v and str(rv) == str(v[1]):
                                    vals[a] = 'NotChanged'
                                    self.status['NotChanged'].append(a)
                                else:
                                    self.status['NotUpdated'].append(a)
                                
            if regexp == '*':
                for k,v in self.status.items():
                    print('%s: %s' % (k,len(v)))
            
            return vals
        else:
            # Should inspect the Subscribers Error Lists
            vals = dict()
            for d in self.get_archivers():
                err = self.get_archiver_errors(d)
                for a,e in err.items():
                    if fn.clmatch(regexp,a):
                        vals[a] = e
            return vals    
Exemple #14
0
    def get_attributes_errors(self,
                              regexp='*',
                              timeout=3 * 3600,
                              from_db=False,
                              extend=False):
        """
        Returns a dictionary {attribute, error/last value}
        
        If from_db=True and extend=True, it performs a full attribute check
        """
        if regexp == '*':
            self.status = fn.defaultdict(list)
        if from_db or extend:
            timeout = fn.now() - timeout
            attrs = self.get_attributes(True)
            attrs = fn.filtersmart(attrs, regexp)
            print('get_attributes_errors([%d/%d])' %
                  (len(attrs), len(self.attributes)))
            vals = self.load_last_values(attrs)
            for a, v in vals.items():
                if v and v[0] > timeout:
                    self.status['Updated'].append(a)
                    if v[1] is not None:
                        self.status['Readable'].append(a)
                    else:
                        rv = fn.read_attribute(a)
                        if rv is not None:
                            self.status['WrongNone'].append(a)
                        else:
                            self.status['None'].append(a)
                    vals.pop(a)

            if not extend:
                self.status['NotUpdated'] = vals.keys()
            else:
                for a, v in vals.items():
                    c = fn.check_attribute(a)
                    if c is None:
                        vals[a] = 'Unreadable'
                        self.status['Unreadable'].append(a)
                    elif isinstance(c, Exception):
                        vals[a] = str(c)
                        self.status['Exception'].append(a)
                    else:
                        ev = fn.tango.check_attribute_events(a)
                        if not ev:
                            vals[a] = 'NoEvents'
                            self.status['NoEvents'].append(a)
                        else:
                            d = self.get_attribute_archiver(a)
                            e = self.get_archiver_errors(d)
                            if a in e:
                                vals[a] = e[a]
                                self.status['ArchiverError'].append(a)
                            else:
                                rv = fn.read_attribute(a)
                                if v and str(rv) == str(v[1]):
                                    vals[a] = 'NotChanged'
                                    self.status['NotChanged'].append(a)
                                else:
                                    self.status['NotUpdated'].append(a)

            if regexp == '*':
                for k, v in self.status.items():
                    print('%s: %s' % (k, len(v)))

            return vals
        else:
            # Should inspect the Subscribers Error Lists
            vals = dict()
            for d in self.get_archivers():
                err = self.get_archiver_errors(d)
                for a, e in err.items():
                    if fn.clmatch(regexp, a):
                        vals[a] = e
            return vals