Ejemplo n.º 1
0
def get_attributes_row_counts(db, attrs='*', start=-86400, stop=-1, limit=0):
    """
    DUPLICATED BY HDBPP.get_attribute_rows !!!
    
    It will return matching $attrs that recorded more than $limit values in 
    the $start-$stop period::
    
      countsrf = get_attributes_row_counts('hdbrf',start=-3*86400,limit=20000)
      
    """
    db = pta.api(db) if fn.isString(db) else db
    start = start if fn.isString(start) else fn.time2str(start)
    stop = stop if fn.isString(stop) else fn.time2str(stop)

    if fn.isString(attrs):
        attrs = [a for a in db.get_attributes() if fn.clmatch(attrs, a)]

    r = {}
    for a in attrs:
        i, t, b = db.get_attr_id_type_table(a)
        l = db.Query("select count(*) from %s where att_conf_id = %d"
                     " and data_time between '%s' and '%s'" %
                     (b, i, start, stop))
        c = l[0][0] if len(l) else 0
        if c >= limit:
            r[a] = c
    return r
Ejemplo n.º 2
0
def get_attributes_row_counts(db,attrs='*',start=-86400,stop=-1,limit=0):
    """
    DUPLICATED BY HDBPP.get_attribute_rows !!!
    
    It will return matching $attrs that recorded more than $limit values in 
    the $start-$stop period::
    
      countsrf = get_attributes_row_counts('hdbrf',start=-3*86400,limit=20000)
      
    """
    db = pta.api(db) if fn.isString(db) else db
    start = start if fn.isString(start) else fn.time2str(start) 
    stop = stop if fn.isString(stop) else fn.time2str(stop)
    
    if fn.isString(attrs):
        attrs = [a for a in db.get_attributes() if fn.clmatch(attrs,a)]
        
    r = {}
    for a in attrs:
        i,t,b = db.get_attr_id_type_table(a)
        l = db.Query("select count(*) from %s where att_conf_id = %d"
            " and data_time between '%s' and '%s'"  % (b,i,start,stop))
        c = l[0][0] if len(l) else 0
        if c >= limit:
            r[a] = c
    return r
Ejemplo n.º 3
0
 def getApi(k, schema):
     if schema == '*':
         import PyTangoArchiving.reader
         return PyTangoArchiving.reader.Reader()
     elif fn.isString(schema):
         schema = k.getSchema(schema)
         if schema is not None:
             api = schema.get('api', 'PyTangoArchiving.ArchivingAPI')
             if fn.isString(api):
                 api = k._load_object(api, schema)
             return api(schema['schema']) if isinstance(api, type) else api
     else:
         return schema
Ejemplo n.º 4
0
def mysqldump_by_date(schema, user, passwd, folder, start, stop,
                      compress = True, delete = True):
    """
    This method creates a backup between selected dates for each table 
    of the selected database.
    
    All dump files are exported to the same folder, and a compressed file
    is created at the end.
    
    Deleting of temporary files created (folder/*dmp) must be done manually.
    """
    print('mysqldump_by_date(%s,,,folder=%s,%s,%s,compress=%s,delete=%s)'
          % (schema, folder, start, stop, compress, delete))
    db = FriendlyDB(schema,user=user,passwd=passwd)
    t,e = start,stop
    print(t,e)
    start = start if fn.isString(start) else fn.time2str(start)
    stop = stop if fn.isString(stop) else fn.time2str(stop)
    tables = db.getTables()

    print('mysqldump_by_date(%s): %d tables to backup between %s and %s' 
          % (schema,len(tables),start,stop))

    if not os.path.isdir(folder):
        print('mkdir %s' % folder)
        os.mkdir(folder)
        
    for t in tables:
        filename = ('%s/%s-%s-%s-%s.dmp' 
            % (folder,schema,t,start.split()[0],stop.split()[0]))
        cols = db.getTableCols(t)
        col = [c for c in ('time','data_time') if c in cols] 
        if col:
            where = " %s >= '%s' and %s < '%s' " % (col[0],start,col[0],stop)
        else:
            where = ""
        mysqldump(schema,user,passwd,filename,t,where)
        
    ext = ('part.' if fn.str2time(stop) > fn.now() else '') + 'tgz'
    if compress:
        filename = ('%s/%s-%s-%s.%s' 
            % (folder,schema,start.split()[0],stop.split()[0],ext))
        cmd = 'tar zcvf %s %s/*.dmp' % (filename,folder)
        print(cmd)
        fn.linos.shell_command(cmd)
    if compress and delete:
        cmd = 'rm -rf %s/*.dmp' % folder
        print(cmd)
        fn.linos.shell_command(cmd)
    return filename
Ejemplo n.º 5
0
    def getReader(k, schema, dct=None):
        # This method initializes a reader object from Schema config
        # It does not update the Schema object, just returns a reader

        dct = dct if dct is not None else k.getSchema(
            schema if fn.isString(schema) else schema.get('schema'))
        rd = dct.get('reader', dct.get('api'))

        if rd and isinstance(rd, str):
            try:
                #print('Schemas.getReader(%s): instantiating reader' % schema)

                rd = k._load_object(rd, dct)
                #print('getReader(%s): %s' % (schema,type(rd)))
                if not hasattr(rd, 'is_attribute_archived'):
                    rd.is_attribute_archived = lambda *a, **k: True
                if not hasattr(rd, 'get_attributes'):
                    rd.get_attributes = lambda *a, **k: []
                if not hasattr(rd, 'get_attribute_values'):
                    if dct['method']:
                        rd.get_attribute_values = getattr(rd, dct['method'])
                if not hasattr(rd, 'schema'):
                    rd.schema = schema
            except:
                print('getReader(%s) failed!' % schema)
                #traceback.print_exc()
                rd = None

        return rd
Ejemplo n.º 6
0
def get_average_read_time(api='hdb',period=10*3600*24,N=100):
    if fn.isString(api):
        import PyTangoArchiving
        api = PyTangoArchiving.ArchivingAPI(api)
    reader = api.get_reader()
    active = [a for a in api.get_archived_attributes() if api[a].data_type not in (1,8)]
    target = [active[i] for i in fn.randomize(range(len(active)))][:int(2*N)]
    stats = []
    navg,tavg,count = 0,0,0
    print('testing %s %s attributes'%(len(target),api.schema))
    for t in target:
        if count == N: 
            break
        t0 = time.time()
        try: 
            vs = reader.get_attribute_values(t,time.time()-period-3600,time.time()-3600)
            if not len(vs): 
                continue
        except: 
            continue
        t1 = time.time()-t0
        if not count%10:
            print(count,':',t,len(vs),t1)
        navg += len(vs)
        tavg += t1
        count += 1
        stats.append((t1,len(vs),t))
    N = float(count)
    print('Worst tread were: \n%s'%'\n'.join(map(str,sorted(stats)[-10:])))
    return (N, (N>0 and navg/N),(N>0 and tavg/N))
Ejemplo n.º 7
0
 def get_submodules(self, module=None, nr=0):
     print('get_submodules(%s)' % module)
     try:
         module = module or self.module
         if fn.isString(module):
             m = self.load_module(module)
         else:
             m, module = module, module.__name__
         result = set()
         l = getattr(m, '__test__', dir(m))
         print m, l
         l = list(l)
         for o in l:
             o = o.split('.')[-1]
             n = getattr(m, o)
             if self.is_module(n) and module == n.__package__:
                 o = module + '.' + o
                 result.add(o)
                 if nr < 10:
                     result = result.union(self.get_submodules(n,
                                                               nr=nr + 1))
     except Exception, e:
         print('get_submodules(%s)' % module)
         traceback.print_exc()
         raise e
Ejemplo n.º 8
0
def call(args=None,locals_=None):
    """
    Calls a method from local scope parsing a pipe-like argument list
    """
    if args is None:
        import sys
        args = sys.argv[1:]
    f,args = args[0],args[1:]
    
    print(f,args)
    
    if not isCallable(f):
        locals_ = locals_ or globals()
        if f=='help':
            if args and args[0] in locals_:
                n,o = args[0],locals_[args[0]]
                if hasattr(o,'func_code'):
                    n = n+str(o.func_code.co_varnames)
                return '%s:\n%s' % (n,o.__doc__)
            else:
                m = [k for k,v in locals_.items() if isCallable(v)]
                return ('\n'.join(sorted(m,key=str.lower)))
        f = locals_.get(f,None) 
    if all(isString(a) for a in args):
        args = map(str2type,args)
    return f(*args)    
Ejemplo n.º 9
0
def call(args=None, locals_=None):
    """
    Calls a method from local scope parsing a pipe-like argument list
    """
    if args is None:
        import sys
        args = sys.argv[1:]
    f, args = args[0], args[1:]

    print(f, args)

    if not isCallable(f):
        locals_ = locals_ or globals()
        if f == 'help':
            if args and args[0] in locals_:
                n, o = args[0], locals_[args[0]]
                if hasattr(o, 'func_code'):
                    n = n + str(o.func_code.co_varnames)
                return '%s:\n%s' % (n, o.__doc__)
            else:
                m = [k for k, v in locals_.items() if isCallable(v)]
                return ('\n'.join(sorted(m, key=str.lower)))
        f = locals_.get(f, None)
    if all(isString(a) for a in args):
        args = map(str2type, args)
    return f(*args)
Ejemplo n.º 10
0
def get_average_read_time(api='hdb',period=10*3600*24,N=100):
    if fandango.isString(api):
        import PyTangoArchiving
        api = PyTangoArchiving.ArchivingAPI(api)
    reader = api.get_reader()
    active = [a for a in api.get_archived_attributes() if api[a].data_type not in (1,8)]
    target = [active[i] for i in fandango.randomize(range(len(active)))][:int(2*N)]
    stats = []
    navg,tavg,count = 0,0,0
    print('testing %s %s attributes'%(len(target),api.schema))
    for t in target:
        if count == N: 
            break
        t0 = time.time()
        try: 
            vs = reader.get_attribute_values(t,time.time()-period-3600,time.time()-3600)
            if not len(vs): 
                continue
        except: 
            continue
        t1 = time.time()-t0
        if not count%10:
            print(count,':',t,len(vs),t1)
        navg += len(vs)
        tavg += t1
        count += 1
        stats.append((t1,len(vs),t))
    N = float(count)
    print('Worst tread were: \n%s'%'\n'.join(map(str,sorted(stats)[-10:])))
    return (N, (N>0 and navg/N),(N>0 and tavg/N))
Ejemplo n.º 11
0
 def load_from_device(self, device):
     """ 
     This method allows to load ModbusMap values from a running device server 
     """
     if fn.isString(device): device = fn.tango.get_device(device)
     self.load(fn.tango.get_device_property(device.name(), 'Mapping'))
     [self[k].set(list(getattr(device, k))) for k in self]
Ejemplo n.º 12
0
    def load_last_values(self,
                         attributes=None,
                         n=1,
                         epoch=None,
                         tref=90 * 86400):
        """
        load_last_values provided to comply with Reader API
        get_last_attribute_values provided to comply with CommonAPI 
        
        load_last_values returns a dictionary {attr:(last_time,last_value)}
        
        attributes: attribute name or list
        n: the number of last values to be retorned
        tref: time from which start searching values (-1d by default)
        epoch: end of window to search values (now by default)
        """
        if attributes is None:
            attributes = self.get_archived_attributes()

        if epoch is not None:
            epoch = fn.str2time(epoch) if fn.isString(epoch) else epoch
            kwargs = {
                'epoch': epoch,
                'period': (epoch - tref) if tref > 1e9 else abs(tref)
            }
        else:
            kwargs = {}

        vals = dict((a, self.get_last_attribute_values(a, n=n, **kwargs))
                    for a in fn.toList(attributes))

        return vals
def get_deactivated_attributes(api='hdb',
                               updates=None,
                               period=6 * 30 * 24 * 3600):
    # Returns the list of attributes that are not archived despite readable and having data from the last months
    if fun.isString(api): api = pta.api(api)
    if updates is None: updates = get_table_updates(api)
    now = fun.time.time()
    return sorted(
        a for a, t in updates.items()
        if (now - period) < t < (now - 24 * 3600) and fun.check_attribute(a))
Ejemplo n.º 14
0
    def checkSchema(k, schema, attribute='', start=None, stop=None):
        schema = k.getSchema(schema)
        if not schema: 
            return False
        
        f = schema.get('check')
        if not f: 
            print('%s has no check function' % str(schema))
            return True

        try:
            now = time.time()
            start = (str2time(start) if fn.isString(start) 
                     else fn.notNone(start,now-1))
            stop = (str2time(stop) if fn.isString(stop) 
                    else fn.notNone(stop,now))
            k.LOCALS.update({'attribute':attribute.lower(),
                    'match':clmatch,'clmatch':clmatch,
                    'overlap':overlap,
                    'time2str':time2str,'str2time':str2time,
                    't2s':time2str,'s2t':str2time,
                    'start':start,'stop':stop,'now':now,
                    'begin':start,'end':stop,'NOW':now,
                    'reader':schema.get('reader',schema.get('api')),
                    'schema':schema.get('schema'),
                    'dbname':schema.get('dbname',schema.get('schema','')),
                    })
            if 'reader' in f:
                k.getReader(schema)
            if 'api' in f:
                k.getApi(schema)
                
            #print('In reader.Schemas.checkSchema(%s,%s,%s,%s): %s'
                #% (schema,attribute,start,stop,f))                
            #print('(%s)%%(%s)'%(f,[t for t in k.LOCALS.items() if t[0] in f]))
            v =fn.evalX(f,k.LOCALS,k.MODULES)
        except:
            print('checkSchema(%s,%s) failed!' % (schema,attribute))
            traceback.print_exc()
            v = False

        #print('checkSchema(%s): %s'%(schema,v))
        return v
Ejemplo n.º 15
0
 def generate_partition_name_for_date(self, table, date):
     """
     generates the matching partition name for the date given
     """
     if not fn.isString(date):
         date = fn.time2str(date)
     p = partition_prefixes.get(table, None)
     if p:
         p += ''.join(date.split('-')[0:2]) + '01'
     return p
def get_idle_servers(api='hdb'):
    idle = dict()
    if fun.isString(api): api = pta.api(api)
    for s, t in api.servers.items():
        if 'archiver' not in s: continue
        for d in t.get_device_list():
            if not fun.check_device(d):
                idle[s] = [d]
                break
    trace('\t%d servers have idle devices' % len(idle))
    return idle
Ejemplo n.º 17
0
 def getWindow(klass,window):
     if not fandango.isString(window):
         if window in klass.WINDOWS: 
             return window
     else:
         for w in klass.WINDOWS:
             try:
                 if w.windowTitle()==window:
                     return w
             except:pass
     return None
def restart_server(ss,api='hdb',wait=WAIT_TIME):
  if fun.isString(api): api = pta.api('hdb')
  trace('restart_server(%s)'%ss)
  api.servers.stop_servers(ss)
  time.sleep(STOP_WAIT)
  api.servers.start_servers(ss,wait=0.1)
  last_restart = time.time()
  if wait: 
    print '\tWaiting %s seconds' %wait
    time.sleep(wait)
  return ss
Ejemplo n.º 19
0
 def getWindow(klass,window):
     if not fandango.isString(window):
         if window in klass.WINDOWS: 
             return window
     else:
         for w in klass.WINDOWS:
             try:
                 if w.windowTitle()==window:
                     return w
             except:pass
     return None
def restart_server(ss, api='hdb', wait=WAIT_TIME):
    if fun.isString(api): api = pta.api('hdb')
    trace('restart_server(%s)' % ss)
    api.servers.stop_servers(ss)
    time.sleep(STOP_WAIT)
    api.servers.start_servers(ss, wait=0.1)
    last_restart = time.time()
    if wait:
        print '\tWaiting %s seconds' % wait
        time.sleep(wait)
    return ss
def get_idle_servers(api='hdb'):
  idle = dict()
  if fun.isString(api): api = pta.api(api)
  for s,t in api.servers.items():
    if 'archiver' not in s: continue
    for d in t.get_device_list():
      if not fun.check_device(d):
        idle[s] = [d]
        break
  trace('\t%d servers have idle devices'%len(idle))  
  return idle
Ejemplo n.º 22
0
    def get_attr_id_type_table(self, attr):
        if fn.isString(attr):
            attr = fn.tango.get_full_name(attr, True).lower()

        try:
            s = self.attributes[attr]
            return s.id, s.type, s.table
        except:
            self.get_att_conf_table.cache.clear()
            self.get_att_conf_table()

            if attr not in self.attributes:
                return None, None, ''
            else:
                s = self.attributes[attr]
                return s.id, s.type, s.table
Ejemplo n.º 23
0
 def setModels(self, args):
     #self.clear()
     if len(args) == 1:
         args = args[0]
     if fn.isString(args):
         args = fn.find_attributes(args)
     print('setModels(%s)' % args)
     for m in args:
         w = TaurusSingleValue()
         w.setModel(m)
         self.main.layout().addWidget(w)
         self.widgets.append(w)
     #self.main.show()
     self.setWidget(self.main)
     #self.resize(400,600)
     self.show()
Ejemplo n.º 24
0
def get_table_partitions(api, table, description=''):
    """
    DUPLICATED BY pta.dbs.get_partitions_from_query !!!
    """
    if fn.isString(api): api = pta.api(api)
    if not description: description = get_table_description(api, table)
    rows = [l for l in description.split('\n') if 'partition' in l.lower()]
    f = rows[0].split()[-1]
    data = (f, [])
    for i, l in enumerate(rows[1:]):
        try:
            l, n = l.split(), i and rows[i].split() or [0] * 6
            data[-1].append((l[1], n[5], l[5]))
        except:
            print(fn.except2str())
            print(i, l, n)
    return (data)
Ejemplo n.º 25
0
def get_table_partitions(api,table,description=''):
    """
    DUPLICATED BY pta.dbs.get_partitions_from_query !!!
    """
    if fn.isString(api): api = pta.api(api)
    if not description: description = get_table_description(api,table)
    rows = [l for l in description.split('\n') if 'partition' in l.lower()]
    f = rows[0].split()[-1]
    data = (f,[])
    for i,l in enumerate(rows[1:]):
        try:
            l,n = l.split(),i and rows[i].split() or [0]*6
            data[-1].append((l[1],n[5],l[5]))
        except:
            print(fn.except2str())
            print(i,l,n)
    return(data)
Ejemplo n.º 26
0
 def get_last_attribute_values(self,table,n,check_table=False,epoch=fn.END_OF_TIME):
     """
     Check table set to False as sometimes order of insertion is not the same as expected, BE CAREFUL WITH THIS ARGUMENT!
     """
     query,where = table,''
     if check_table:
         table_size = self.getTableSize(table)
         if table_size>1e3:
             x = max((2*n,20))
             query = '(select * from %s limit %d,%d)'%(table,table_size-x,x)
     epoch = fn.str2time(epoch) if fn.isString(epoch) else epoch
     if epoch not in (None, fn.END_OF_TIME):
         where = " where T.time < '%s' " % (fn.time2str(epoch))
     what = 'SELECT time'
     what += (',value',',read_value')['read_value' in self.getTableCols(table)]
     return self.Query('%s from %s T %s order by T.time desc limit %d' % (
         what, query, where, n))
def get_attributes_servers(attr_list, api='hdb'):
    if fun.isString(api): api = pta.api(api)
    api.load_dedicated_archivers(check=False)
    from collections import defaultdict
    devices = defaultdict(set)
    servers = defaultdict(set)
    [
        devices[x].add(l) for l in attr_list
        for x in (api[l].archiver, api[l].dedicated) if x
    ]
    #assigned = set(x for l in attr_list for x in (api[l].archiver,api[l].dedicated) if x)
    [
        servers[api.servers.get_device_server(d).lower()].update(devices[d])
        for d in devices
    ]
    #servers = sorted(set(api.servers.get_device_server(a) for a in set(x for l in attr_list for x in (api[l].archiver,api[l].dedicated) if x)))
    return servers
def get_attributes_servers(attr_list,api='hdb',dedicated=False):
  if fun.isString(api): 
      api = pta.api(api)
  if dedicated:
    api.load_dedicated_archivers(check=False);
  
  devices = defaultdict(set)
  servers = defaultdict(set)
  
  [devices[x].add(l) for l in attr_list 
        for x in (api[l].archiver,api[l].dedicated) if x]
  #assigned = set(x for l in attr_list 
  #     for x in (api[l].archiver,api[l].dedicated) if x)
  [servers[api.servers.get_device_server(d).lower()].update(devices[d]) 
        for d in devices]
  #servers = sorted(set(api.servers.get_device_server(a) 
  #      for a in set(x for l in attr_list 
  #             for x in (api[l].archiver,api[l].dedicated) if x)))
  return servers
Ejemplo n.º 29
0
 def launch_app(self, app=None):
     args = self.args
     try:
         print('launch_app(%s)' % app)
         if not fn.isString(app):
             if hasattr(app, 'text'):
                 app = str(app.text()).strip()
             else:
                 app = str(app)
         print('launch_app(%s)' % app)
         args = str(args.text()).strip()
         #-hold
         app = "xterm -e %s" % app
         if check.isChecked():
             args = TF + " " + args
         c = "%s %s &" % (app, args)
         print('launch_app(%s)' % c)
         print('>' * 80)
         fn.shell_command(c)
     except:
         traceback.print_exc()
Ejemplo n.º 30
0
def generate_rest_files(module,path='source'):
  print '\n'*5
  print 'Writing documentation settings to %s/*rst' % (path)
  if fandango.isString(module): module = fandango.loadModule(module)
  submodules = [(o,v) for o,v in vars(module).items()
    if inspect.ismodule(v) and v.__name__.startswith(module.__name__)]

  for o,v in submodules:
    filename = path+'/'+o+'.rst'
    if not os.path.isfile(filename):
      print('writing %s'%filename)
      open(filename,'w').write(DEFAULT_MODULE%(v.__name__,'='*len(v.__name__),v.__name__))
      
        
  print('\nWrite this into index.rst:\n')
  print("""
  .. toctree::
     :maxdepth: 2
     
     """+
     '\n     '.join([t[0] for t in submodules]))
Ejemplo n.º 31
0
def generate_rest_files(module,path='source'):
  import fandango
  print '\n'*5
  print 'Writing documentation settings to %s/*rst' % (path)
  if fandango.isString(module): module = fandango.loadModule(module)
  submodules = [(o,v) for o,v in vars(module).items()
    if inspect.ismodule(v) and v.__name__.startswith(module.__name__)]

  for o,v in submodules:
    filename = path+'/'+o+'.rst'
    if not os.path.isfile(filename):
      print('writing %s'%filename)
      open(filename,'w').write(DEFAULT_MODULE%(v.__name__,'='*len(v.__name__),v.__name__))
      
        
  print('\nWrite this into index.rst:\n')
  print("""
  .. toctree::
     :maxdepth: 2
     
     """+
     '\n     '.join([t[0] for t in submodules]))
Ejemplo n.º 32
0
    def check_attributes(self, attrs='', load=False, t0=0):

        db, t0, result, vals = self, t0 or fn.now(), {}, {}
        print('Checking %s' % str(db))

        if fn.isDictionary(attrs):
            attrs, vals = attrs.keys(), attrs
            if isinstance(vals.values()[0], dict):
                vals = dict((k, v.values()[0]) for k, v in vals.items())
        else:
            if fn.isString(attrs):
                attrs = fn.filtersmart(db.get_attributes(), attrs)
                load = True

        if load:
            [vals.update(db.load_last_values(a)) for a in attrs]

        print('\t%d attributes' % len(attrs))
        result['attrs'] = attrs
        result['vals'] = vals
        result['novals'] = [a for a, v in vals.items() if not v]
        result['nones'], result['down'], result['lost'] = [], [], []
        for a, v in vals.items():
            if not v or [1] is None:
                if not fn.read_attribute(a):  #USE read not check!!
                    result['down'].append(a)
                else:
                    result['novals' if not v else 'nones'].append(a)
            elif v[0] < (t0 - 7200):
                result['lost'].append(a)

        print('\t%d attributes have no values' % len(result['novals']))
        print('\t%d attributes are not readable' % len(result['down']))
        print('\t%d attributes are not updated' % len(result['lost']))
        print('\t%d attributes have None values' % len(result['nones']))

        return result
Ejemplo n.º 33
0
    def check_attributes(self,attrs = '', load = False, t0 = 0):
        
        db,t0,result,vals = self,t0 or fn.now(),{},{}
        print('Checking %s' % str(db))

        if fn.isDictionary(attrs):
            attrs,vals = attrs.keys(),attrs
            if isinstance(vals.values()[0],dict):
                vals = dict((k,v.values()[0]) for k,v in vals.items())
        else:
            if fn.isString(attrs):
                attrs = fn.filtersmart(db.get_attributes(),attrs)
                load = True

        if load:
            [vals.update(db.load_last_values(a)) for a in attrs]

        print('\t%d attributes'%len(attrs))
        result['attrs'] = attrs
        result['vals'] = vals
        result['novals'] = [a for a,v in vals.items() if not v]
        result['nones'],result['down'],result['lost'] = [],[],[]
        for a,v in vals.items():
            if not v or [1] is None:
                if not fn.read_attribute(a): #USE read not check!!
                    result['down'].append(a)
                else:
                    result['novals' if not v else 'nones'].append(a)
            elif v[0] < (t0 - 7200):
                result['lost'].append(a)
        
        print('\t%d attributes have no values'%len(result['novals']))
        print('\t%d attributes are not readable'%len(result['down']))
        print('\t%d attributes are not updated'%len(result['lost']))
        print('\t%d attributes have None values'%len(result['nones']))
        
        return result    
Ejemplo n.º 34
0
 def get_submodules(self,module=None,nr=0):
   print('get_submodules(%s)'%module)
   try:
     module = module or self.module
     if fn.isString(module):
       m = self.load_module(module)
     else:
       m,module = module,module.__name__
     result = set()
     l = getattr(m,'__test__',dir(m))
     print m,l
     l = list(l)
     for o in l:
       o = o.split('.')[-1]
       n = getattr(m,o)
       if self.is_module(n) and module == n.__package__:
         o = module+'.'+o
         result.add(o)
         if nr<10:
           result = result.union(self.get_submodules(n,nr=nr+1))
   except Exception,e:
     print('get_submodules(%s)'%module)
     traceback.print_exc()
     raise e
Ejemplo n.º 35
0
def check_archiving_performance(schema='hdb',attributes=[],period=24*3600*90,\
    exclude=['*/waveid','*/wavename','*/elotech-*'],action=False,trace=True):
    import PyTangoArchiving as pta
    import fandango as fn

    ti = fn.now()
    api = pta.api(schema)
    check = dict()
    period = 24*3600*period if period < 1000 else (24*period if period<3600 else period)
    attributes = fn.get_matching_attributes(attributes) if fn.isString(attributes) else map(str.lower,attributes)
    tattrs = [a for a in api if not attributes or a in attributes]
    excluded = [a for a in tattrs if any(fn.clmatch(e,a) for e in exclude)]
    tattrs = [a for a in tattrs if a not in excluded]

    #Getting Tango devices currently not running
    alldevs = set(t.rsplit('/',1)[0] for t in tattrs if api[t].archiver)
    tdevs = filter(fn.check_device,alldevs)
    nodevs = [d for d in alldevs if d not in tdevs]

    #Updating data from archiving config tables
    if not attributes:
      tattrs = sorted(a for a in api if a.rsplit('/',1)[0] in tdevs)
      tattrs = [a for a in tattrs if not any(fn.clmatch(e,a) for e in exclude)]
    print('%d attributes will not be checked (excluded or device not running)'%(len(api)-len(tattrs)))
    
    tarch = sorted(a for a in api if api[a].archiver)
    tnoread = sorted(t for t in tarch if t not in tattrs)
    check.update((t,None) for t in tnoread)

    #Getting attributes archived in the past and not currently active
    tmiss = [t for t in tattrs if not api[t].archiver]
    check.update((t,fn.check_attribute(t,readable=True)) for t in tmiss)
    tmiss = [t for t in tmiss if check[t]]
    tmarray = [t for t in tmiss if fn.isString(check[t].value) or fn.isSequence(check[t].value)]
    tmscalar = [t for t in tmiss if t not in tmarray]
    
    #Getting updated tables from database
    tups = pta.utils.get_table_updates(schema)
    # Some tables do not update MySQL index tables
    t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]]
    check.update((t,check_attribute(a,readable=True)) for t in t0 if not check.get(t))
    t0 = [t for t in t0 if check[t]]
    print('%d/%d archived attributes have indexes not updated ...'%(len(t0),len(tarch)))
    if t0 and len(t0)<100: 
      vs = api.load_last_values(t0);
      tups.update((api[t].table,api[t].last_date) for t in t0)
    tnotup = [a for a in tarch if tups[api[a].table]<fn.now()-1800]
    check.update((t,1) for t in tarch if t not in tnotup)
    
    #Updating readable attributes (all updated are considered as readable)
    tread = sorted(t for t in tattrs if t not in tnoread)
    for t in tattrs:
      if t not in check:
        check[t] = fn.check_attribute(t,readable=True)
    tread = sorted(t for t in tattrs if check[t])
    tnoread.extend(t for t in tread if not check[t])
    tnoread = sorted(set(tnoread))
          
    #tread contains all readable attributes from devices with some attribute archived
    #tnoread contains all unreadable attributes from already archived

    #Calcullating all final stats
    #tok will be all archivable attributes that are archived
    #tnotup = [a for a in tnotup if check[a]]
    #tok = [t for t in tread if t in tarch and t not in tnotup]
    tok = [t for t in tarch if t not in tnotup]
    readarch = [a for a in tread if a in tarch]
    treadnotup = [t for t in readarch if t in tnotup] #tnotup contains only data from tarch
    tokread = [t for t in readarch if t not in tnotup] #Useless, all archived are considered readable
    tarray = [t for t in tarch if check[t] and get_attribute_pytype(t) in (str,list)]
    removed = [a for a in tattrs if not api[a].archiver and tups[api[a].table]>fn.now()-period]
    
    result = fn.Struct()
    result.Excluded = excluded
    result.Schema = schema
    result.All = api.keys()
    result.Archived = tarch
    result.Readable = tread
    result.ArchivedAndReadable = readarch
    result.Updated = tok #tokread
    result.Lost = treadnotup
    result.Removed = removed
    result.TableUpdates = tups
    result.NotUpdated = tnotup
    result.Missing = tmiss
    result.MissingScalars = tmscalar
    result.MissingArrays = tmarray
    result.ArchivedArray = tarray
    result.Unreadable = tnoread
    result.DeviceNotRunning = nodevs
    
    get_ratio = lambda a,b:float(len(a))/float(len(b))
    
    result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch)
    result.ReadRatio = get_ratio(result.Readable,tattrs)
    result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread)
    result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread)
    result.OkRatio = 1.0-result.LostRatio-result.MissRatio
    
    result.Summary = '\n'.join((
      ('Checking archiving of %s attributes'%(len(attributes) if attributes else schema))
      ,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch)))
      ,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs])))
      ,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch)))
      ,('%d readable attributes are not archived'%(len(tmiss)))
      ,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio))
      ,('-'*80)
      ,('%d archived attributes (readable or not) are not updated!'%len(tnotup))
      ,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup))
      ,('-'*80)
      ,('%d readable attributes have been removed in the last %d days!'%(len(removed),period/(24*3600)))
      ,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar))
      ,('%d readable array attributes are not being archived (Ok)'%len(tmarray))
      ,('%d readable array attributes are archived (Expensive)'%len(tarray))
      ,('')))
    
    if trace: print(result.Summary)
    print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\
        (len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio))

    if action:
        print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS')
        print("""
        api = PyTangoArchiving.ArchivingAPI('%s')
        lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated))
        print(lostdevs)
        if lostdevs < a_reasonable_number:
          astor = fn.Astor()
          astor.load_from_devs_list(lostdevs)
          astor.stop_servers()
          fn.time.sleep(10.)
          astor.start_servers()
        """%schema)
        
    if trace: print('finished in %d seconds'%(fn.now()-ti))
        
    return result 
Ejemplo n.º 36
0
 def check(self, method=None):
     method = method or self.get_data_types
     if fn.isString(method):
         return self.Query(method)
     else:
         return method()
Ejemplo n.º 37
0
def check_archiving_schema(
        schema='hdb',
        attributes=[],values={},
        ti = None,
        period = 7200,
        old_period=24*3600*90,\
        exclude=['*/waveid','*/wavename','*/elotech-*'],
        use_index = True,
        loads = True,
        action=False,
        trace=True,
        export=None):

    ti = fn.now() if ti is None else str2time(ti) if isString(ti) else ti

    api = pta.api(schema)
    is_hpp = isinstance(api, pta.HDBpp)
    check = dict()
    old_period = 24*3600*old_period if old_period < 1000 \
        else (24*old_period if old_period<3600 else old_period)

    allattrs = api.get_attributes() if hasattr(
        api, 'get_attributes') else api.keys()
    print('%s contains %d attributes' % (schema, len(allattrs)))

    if attributes:
        if fn.isString(attributes) and fn.isRegexp(attributes):
            tattrs = [a for a in allattrs if clsearch(attributes, a)]
        else:
            attributes = map(fn.tango.get_normal_name, fn.toList(attributes))
            tattrs = [
                a for a in allattrs if fn.tango.get_normal_name(a) in allattrs
            ]

    else:
        tattrs = allattrs

    excluded = [a for a in tattrs if any(fn.clmatch(e, a) for e in exclude)]
    tattrs = [a for a in tattrs if a not in excluded]

    print('%d attributes to check' % len(tattrs))
    if not len(tattrs):
        return

    if excluded:
        print('\t%d attributes excluded' % len(excluded))

    archived = {}
    for a in tattrs:
        if hasattr(api, 'get_attribute_archiver'):
            arch = api.get_attribute_archiver(a)
        else:
            arch = api[a].archiver
        if arch:
            archived[a] = arch

    print('\t%d attributes are archived' % len(archived))

    #Getting Tango devices currently not running
    alldevs = set(t.rsplit('/', 1)[0] for t in tattrs)
    #tdevs = filter(fn.check_device,alldevs)
    #nodevs = [fn.tango.get_normal_name(d) for d in alldevs if d not in tdevs]
    #if nodevs:
    #print('\t%d devices are not running' % len(nodevs))

    archs = sorted(set(archived.values()))
    if loads:
        astor = fn.Astor()
        astor.load_from_devs_list(archs)
        loads = fn.defaultdict(list)
        for k, s in astor.items():
            for d in s.get_device_list():
                d = fn.tango.get_normal_name(d)
                for a in archived:
                    if fn.tango.get_normal_name(archived[a]) == d:
                        loads[k].append(a)
        for k, s in sorted(loads.items()):
            print('\t%s archives %d attributes' % (k, len(s)))

    noarchs = [
        fn.tango.get_normal_name(d) for d in archs if not fn.check_device(d)
    ]
    if noarchs:
        print('\t%d archivers are not running: %s' % (len(noarchs), noarchs))

    ###########################################################################

    if isString(values) and values.endswith('.pck'):
        print('\nLoading last values from %s file\n' % values)
        import pickle
        values = pickle.load(open(values))

    elif isString(values) and values.endswith('.json'):
        print('\nLoading last values from %s file\n' % values)
        values = fn.json2dict(values)

    elif not use_index or is_hpp:
        print('\nGetting last values ...\n')
        for a in tattrs:
            values[a] = api.load_last_values(a)

    else:
        print('\nGetting updated tables from database ...\n')
        tups = pta.utils.get_table_updates(schema)
        # Some tables do not update MySQL index tables
        t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]]
        check.update((t, check_attribute(a, readable=True)) for t in t0
                     if not check.get(t))
        t0 = [t for t in t0 if check[t]]
        print('%d/%d archived attributes have indexes not updated ...' %
              (len(t0), len(tarch)))
        if t0 and len(t0) < 100:
            vs = api.load_last_values(t0)
            tups.update((api[t].table, api[t].last_date) for t in t0)

        for a in tattrs:
            if a in tups:
                values[a] = [tups[api[a].table], 0]

    for k, v in values.items():
        if (len(v) if isSequence(v) else v):
            if isinstance(v, dict):
                v = v.values()[0]
            if isSequence(v) and len(v) == 1:
                v = v[0]
            if v and not isNumber(v[0]):
                v = [date2time(v[0]), v[1]]
            values[k] = v
        else:
            values[k] = [] if isSequence(v) else None

    print('%d values obtained' % len(values))

    ###########################################################################

    now = fn.now()
    result = fn.Struct()
    times = [t[0] for t in values.values() if t]
    futures = [t for t in times if t > now]
    times = [t for t in times if t < now]
    tmiss = []
    tfutures = [k for k, v in values.items() if v and v[0] in futures]
    tmin, tmax = min(times), max(times)
    print('\toldest update was %s' % time2str(tmin))
    print('\tnewest update was %s' % time2str(tmax))
    if futures:
        print('\t%d attributes have values in the future!' % len(futures))

    tnovals = [a for a in archived if not values.get(a, None)]
    if tnovals:
        print('\t%d archived attributes have no values' % len(tnovals))
    try:
        tmiss = [
            a for a, v in values.items()
            if v and old_period < v[0] < ti - period and a not in archived
        ]
    except:
        print(values.items()[0])
    if tmiss:
        print('\t%d/%d attrs with values are not archived anymore' %
              (len(tmiss), len(tattrs)))

    result.Excluded = excluded
    result.Schema = schema
    result.All = tattrs
    result.Archived = values

    result.NoValues = tnovals
    result.MissingOrRemoved = tmiss

    result.TMin = tmin
    result.TMax = tmax
    result.Futures = tfutures

    tup = sorted(a for a in values if values[a] and values[a][0] > ti - period)
    tok = [a for a in tup if values[a][1] not in (None, [])]
    print('\n%d/%d archived attributes are updated since %s - %s' %
          (len(tup), len(archived), ti, period))
    print('%d archived attributes are fully ok\n' % (len(tok)))

    tnotup = sorted(a for a in values
                    if values[a] and values[a][0] < ti - period)
    print('\t%d archived attrs are not updated' % len(tnotup))
    tupnoread = [
        a for a in tup if not values[a][1] and fn.read_attribute(a) is None
    ]

    reads = dict((a, fn.read_attribute(a)) for a in tnotup)
    tnotupread = [a for a in tnotup if reads[a] is not None]
    print('\t%d not updated attrs are readable (Lost)' % len(tnotupread))
    print('\t%d of them are not floats' %
          len([t for t in tnotupread if not isinstance(reads[t], float)]))
    print('\t%d of them are states' %
          len([t for t in tnotupread if t.lower().endswith('/state')]))
    print('\t%d of them seem motors' %
          len([t for t in tnotupread if t.lower().endswith('/position')]))

    tnotupevs = [a for a in tnotupread if fn.tango.check_attribute_events(a)]
    print('\t%d not updated attrs are readable and have events (LostEvents)' %
          len(tnotupevs))

    tnotupnotread = [a for a in tnotup if a not in tnotupread]
    print('\t%d not updated attrs are not readable' % len(tnotupnotread))

    result.Lost = tnotupread
    result.LostEvents = tnotupevs

    losts = (tnotupevs if is_hpp else tnotupread)

    diffs = dict()
    for a in losts:
        try:
            v, vv = values.get(a, (None, ))[1], reads[a]
            if fn.isSequence(v): v = fn.toList(v)
            if fn.isSequence(vv): vv = fn.toList(vv)
            diffs[a] = v != vv
            if fn.isSequence(diffs[a]):
                diffs[a] = any(diffs[a])
            else:
                diffs[a] = bool(diffs[a])
        except:
            diffs[a] = None

    fams = fn.defaultdict(list)
    for a in tnotupread:
        fams['/'.join(a.split('/')[-4:-2])].append(a)
    for f in sorted(fams):
        print('\t%s: %d attrs not updated' % (f, len(fams[f])))

    print()

    differ = [a for a in losts if diffs[a]]  #is True]
    print('\t%d/%d not updated attrs have also wrong values!!!' %
          (len(differ), len(losts)))

    rd = pta.Reader()
    only = [a for a in tnotupread if len(rd.is_attribute_archived(a)) == 1]
    print('\t%d/%d not updated attrs are archived only in %s' %
          (len(only), len(losts), schema))
    result.LostDiff = differ
    print()

    archs = sorted(set(archived.values()))
    astor = fn.Astor()
    astor.load_from_devs_list(archs)
    badloads = fn.defaultdict(list)
    for k, s in astor.items():
        for d in s.get_device_list():
            d = fn.tango.get_normal_name(d)
            for a in losts:
                if fn.tango.get_normal_name(archived[a]) == d:
                    badloads[k].append(a)
    for k, s in badloads.items():
        if len(s):
            print('\t%s archives %d lost attributes' % (k, len(s)))

    print('\t%d updated attrs are not readable' % len(tupnoread))

    result.ArchivedAndReadable = tok
    result.Updated = tup
    result.NotUpdated = tnotup
    result.Unreadable = tnotupnotread
    #result.DeviceNotRunning = nodevs
    result.ArchiverNotRunning = noarchs

    result.LostFamilies = fams

    # Tnones is for readable attributes not being archived
    tnones = [
        a for a in archived
        if (a not in values or values[a] and values[a][1] in (None, []))
        and a not in tupnoread and a not in tnotupread
    ]
    tupnones = [a for a in tnones if a in tup]

    if tupnones:
        print('\t%d archived readable attrs record empty values' %
              len(tupnones))

    result.Nones = tnones

    if 0:

        get_ratio = lambda a, b: float(len(a)) / float(len(b))

        #result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch)
        #result.ReadRatio = get_ratio(result.Readable,tattrs)
        #result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread)
        #result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread)
        #result.OkRatio = 1.0-result.LostRatio-result.MissRatio

        #result.Summary = '\n'.join((
        #('Checking archiving of %s attributes'%(len(attributes) if attributes else schema))
        #,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch)))
        #,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs])))
        #,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch)))
        #,('%d readable attributes are not archived'%(len(tmiss)))
        #,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio))
        #,('-'*80)
        #,('%d archived attributes (readable or not) are not updated!'%len(tnotup))
        #,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup))
        #,('-'*80)
        #,('%d readable attributes have been removed in the last %d days!'%(len(removed),old_period/(24*3600)))
        #,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar))
        #,('%d readable array attributes are not being archived (Ok)'%len(tmarray))
        #,('%d readable array attributes are archived (Expensive)'%len(tarray))
        #,('')))

        #if trace: print(result.Summary)
        #print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\
        #(len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio))

    if action:
        if action == 'start_devices':
            print('Executing action %s' % action)
            api.start_devices()

        if action == 'restart_all':
            print('Executing action %s' % action)
            devs = api.get_archivers()
            astor = fn.Astor()
            print('Restarting %d devs:' % (len(devs), devs))
            astor.load_from_devs_list(devs)
            astor.stop_servers()
            fn.wait(10.)
            astor.start_servers()

        #print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS')
        #print("""
        #api = PyTangoArchiving.HDBpp(schema)
        #api.start_devices()

        #or

        #api = PyTangoArchiving.ArchivingAPI('%s')
        #lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated))
        #print(lostdevs)
        #if lostdevs < a_reasonable_number:
        #astor = fn.Astor()
        #astor.load_from_devs_list(lostdevs)
        #astor.stop_servers()
        #fn.time.sleep(10.)
        #astor.start_servers()
        #"""%schema)

    print('\nfinished in %d seconds\n\n' % (fn.now() - ti))

    if export is not None:
        if export is True:
            export = 'txt'
        for x in (export.split(',') if isString(export) else export):
            if x in ('json', 'pck', 'pickle', 'txt'):
                x = '/tmp/%s.%s' % (schema, x)
            print('Saving %s file with keys:\n%s' % (x, result.keys()))
            if 'json' in x:
                fn.dict2json(result.dict(), x)
            else:
                f = open(x, 'w')
                if 'pck' in x or 'pickle' in x:
                    pickle.dump(result.dict(), f)
                else:
                    f.write(fn.dict2str(result.dict()))
                f.close()

    return result
Ejemplo n.º 38
0
def get_table_description(api,table):
    if fn.isString(api): api = pta.api(api)
    return api.Query('show create table %s'%table)[-1][-1]
Ejemplo n.º 39
0
def mysqldump_by_date(schema, user, passwd, folder, start, stop, options = '',
                      tables = None, compress = True, delete = True):
    """
    This method creates a backup between selected dates for each table 
    of the selected database.
    
    All dump files are exported to the same folder, and a compressed file
    is created at the end.
    
    Deleting of temporary files created (folder/*dmp) must be done manually.
    """
    print('mysqldump_by_date(%s,,,folder=%s,%s,%s,compress=%s,delete=%s)'
          % (schema, folder, start, stop, compress, delete))
    db = FriendlyDB(schema,user=user,passwd=passwd)
    t,e = start,stop
    print(t,e)
    start = start if fn.isString(start) else fn.time2str(start)
    stop = stop if fn.isString(stop) else fn.time2str(stop)
    tables = tables or db.getTables()

    print('mysqldump_by_date(%s): %d tables to backup between %s and %s' 
          % (schema,len(tables),start,stop))

    if not os.path.isdir(folder):
        print('mkdir %s' % folder)
        os.mkdir(folder)
        
    t0 = fn.now()
    filenames = []
    
    for t in sorted(tables):
        currops = options
        filename = ('%s/%s-%s-%s-%s.dmp' 
            % (folder,schema,t,start.split()[0],stop.split()[0]))
        cols = db.getTableCols(t)
        col = [c for c in ('int_time','time','data_time') if c in cols] 
        if col and col[0] == 'int_time':
            where = " %s >= %s and %s < %s " % (
                col[0],fn.str2time(start),col[0],fn.str2time(stop))
        elif col:
            where = " %s >= '%s' and %s < '%s' " % (col[0],start,col[0],stop)
        else:
            where = ""
        if t in CONFIG_TABLES:
            currops += " --add-drop-table "
        else:
            currops += ""
        mysqldump(schema,user,passwd,filename,t,where,options=currops)
        filenames.append(filename)
        
    t1 = fn.now()
        
    ext = ('part.' if fn.str2time(stop) > fn.now() else '') + 'tgz'
    dext = '.dmp'
    if compress:
        # doing it on separate files ...
        #for f in filenames:
            #cmd = 'tar zcvf %s.tgz %s' % (f,f)
            #print(cmd)
            #fn.linos.shell_command(cmd)
        #dext+='.tgz' 

        filename = ('%s/%s-%s-%s.%s' 
            % (folder,schema,start.split()[0],stop.split()[0],ext))
        cmd = 'tar zcvf %s %s/*%s' % (filename,folder,dext)
        print(cmd)
        fn.linos.shell_command(cmd)

    if compress and delete:
        cmd = 'rm -rf %s/*.dmp*' % folder
        print(cmd)
        fn.linos.shell_command(cmd)
        
    t2 = fn.now()
    print('Backup took %d seconds' % int(t1-t0))
    print('Compression took %d seconds' % int(t2-t1))
        
    return filename
Ejemplo n.º 40
0
 def getApi(k,schema):
     schema = k.getSchema(schema)
     if schema is not None:
         api = schema.get('api','PyTangoArchiving.ArchivingAPI')
         if fn.isString(api): api = k._load_object(api,schema)
         return api(schema['schema']) if isinstance(api,type) else api
def get_assigned_attributes(api='hdb',dedicated=False):
  if fun.isString(api): api = pta.api(api)
  if dedicated: api.load_dedicated_archivers(check=False);
  return sorted(set(a for a in api if api[a].archiver or dedicated and api[a].dedicated))
def get_assigned_attributes(api='hdb', dedicated=False):
    if fun.isString(api): api = pta.api(api)
    api.load_dedicated_archivers(check=False)
    return sorted(
        set(a for a in api
            if api[a].archiver or dedicated and api[a].dedicated))
Ejemplo n.º 43
0
def get_table_description(api, table):
    if fn.isString(api): api = pta.api(api)
    return api.Query('show create table %s' % table)[-1][-1]
def get_deactivated_attributes(api='hdb',updates=None,period=6*30*24*3600):
  # Returns the list of attributes that are not archived despite readable and having data from the last months
  if fun.isString(api): api = pta.api(api)                                                                    
  if updates is None: updates = get_table_updates(api)                                                        
  now = fun.time.time()                                                                                       
  return sorted(a for a,t in updates.items() if (now-period)<t<(now-24*3600) and fun.check_attribute(a))  
Ejemplo n.º 45
0
    def checkSchema(k, schema, attribute='', start=None, stop=None):
        if not isinstance(schema, SchemaDict):
            schema = k.getSchema(schema)
        if not schema:
            return False

        f = schema.get('check')
        if not f:
            print('%s has no check function' % str(schema))
            return True

        try:
            now = time.time()
            start = (str2time(start) if fn.isString(start) else fn.notNone(
                start, now - 1))
            stop = (str2time(stop) if fn.isString(stop) else fn.notNone(
                stop, now))
            xmatch = lambda e, a: clmatch(e, a, extend=True)
            k.LOCALS.update({
                'attr':
                attribute.lower(),
                'attribute':
                attribute.lower(),
                'device':
                attribute.lower().rsplit('/', 1)[0],
                'match':
                lambda r: xmatch(r, attribute),
                'clmatch':
                xmatch,
                'overlap':
                overlap,
                'time2str':
                time2str,
                'str2time':
                str2time,
                't2s':
                time2str,
                's2t':
                str2time,
                'start':
                start,
                'stop':
                stop,
                'now':
                now,
                'begin':
                start,
                'end':
                stop,
                'NOW':
                now,
                'reader':
                schema.get('reader', schema.get('api')),
                'schema':
                schema.get('schema'),
                'dbname':
                schema.get('dbname',
                           schema.get('db_name', schema.get('schema', ''))),
            })
            if 'reader' in f:
                k.getReader(schema.get('schema'))
            if 'api' in f:
                k.getApi(schema.get('schema'))

            #print('In reader.Schemas.checkSchema(%s,%s,%s,%s): %s'
            #% (schema,attribute,start,stop,f))
            #print('(%s)%%(%s)'%(f,[t for t in k.LOCALS.items() if t[0] in f]))
            v = fn.evalX(f, k.LOCALS, k.MODULES)
        except:
            print('checkSchema(%s,%s) failed!' % (schema, attribute))
            traceback.print_exc()
            v = False

        #print('checkSchema(%s): %s'%(schema,v))
        return v