Example #1
0
    def get_archiver_attributes(self, archiver, from_db=False, full=False):
        """
        get_archiver_attributes(self, archiver, from_db=False, full=False):
        
        Obtain archiver AttributeList, either from TangoDB or a running device
        if from_db = True or full = True; the full config is returned
        """
        if full or from_db or not check_device_cached(archiver):
            self.debug('getting %s attributes from database' % archiver)
            attrs = [
                str(l) for l in fn.toList(
                    get_device_property(archiver, 'AttributeList'))
            ]
            if not full:
                attrs = [str(l).split(';')[0] for l in attrs]
        else:
            try:
                attrs = get_device(archiver, keep=True).AttributeList or []
            except:
                print('Unable to get %s attributes' % archiver)
                traceback.print_exc()
                attrs = []

        self.debug('%s archives %d attributes' % (archiver, len(attrs)))
        self.dedicated[archiver] = attrs

        return attrs
Example #2
0
    def addXYModels(self, attrs, t0=None, t1=None):
        """
        Convert model, dates to 
        'tgarch://alba03.cells.es:10000/sr/id/scw01/pressure?db=*;t0=2019-11-11T11:41:59;t1=2020-01-03T09:03:03;ts',
        """
        c = self.cursor()
        self.setCursor(Qt.Qt.WaitCursor)
        attrs = fn.toList(attrs)

        if not t0 and not t1 and not self.t0 and not self.t1:
            t0, t1 = self.tc.getTimes()

        if t0 and t1:
            t0 = t0 if fn.isNumber(t0) else fn.str2time(t0, relative=True)
            t1 = t1 if fn.isNumber(t1) else fn.str2time(t1, relative=True)
            self.t0, self.t1 = fn.time2str(t0, iso=1), fn.time2str(t1, iso=1)
            self.t0 = self.t0.replace(' ', 'T')
            self.t1 = self.t1.replace(' ', 'T')

        ms = []
        for attr in attrs:
            attr = fn.tango.get_full_name(attr, fqdn=True)
            attr = attr.replace('tango://', '')
            q = 'tgarch://%s?db=*;t0=%s;t1=%s' % (attr, self.t0, self.t1)
            m = (q + ';ts', q)
            ms.append(m)

        self.plot.onAddXYModel(ms)
        self.setCursor(c)
Example #3
0
    def load_last_values(self,
                         attributes=None,
                         n=1,
                         epoch=None,
                         tref=90 * 86400):
        """
        load_last_values provided to comply with Reader API
        get_last_attribute_values provided to comply with CommonAPI 
        
        load_last_values returns a dictionary {attr:(last_time,last_value)}
        
        attributes: attribute name or list
        n: the number of last values to be retorned
        tref: time from which start searching values (-1d by default)
        epoch: end of window to search values (now by default)
        """
        if attributes is None:
            attributes = self.get_archived_attributes()

        if epoch is not None:
            epoch = fn.str2time(epoch) if fn.isString(epoch) else epoch
            kwargs = {
                'epoch': epoch,
                'period': (epoch - tref) if tref > 1e9 else abs(tref)
            }
        else:
            kwargs = {}

        vals = dict((a, self.get_last_attribute_values(a, n=n, **kwargs))
                    for a in fn.toList(attributes))

        return vals
Example #4
0
 def get_periodic_archivers(self):
     #archs = fn.tango.get_class_devices('PyHdbppPeriodicArchiver')
     archivers = fn.tango.get_device_property(self.manager,
                                              'PeriodicArchivers')
     if not fn.isSequence(archivers):
         archivers = fn.toList(archivers)
     try:
         return sorted(archivers)
     except:
         return []
def export(devices=[dd, sd], suffix='.json', preffix=''):
    """ save devices in .json files """
    devices = fn.toList(devices)
    files = []
    if preffix and not preffix.endswith('/'):
        preffix += '/'
    for d in devices:
        values = ft.export_device_to_dict(d)
        files.append(preffix + dev2file(d, suffix))
        json.dump(values, open(files[-1], 'w'))
    return files
Example #6
0
 def load_last_values(self,attributes=None,n=1,epoch=None):
     if attributes is None:
         attributes = self.get_archived_attributes()
     vals = dict((a,self.get_last_attribute_values(a,n=n,epoch=epoch)) 
                 for a in fn.toList(attributes))
     for a,v in vals.items():
         if n!=1:
             v = v and v[0]
         self.attributes[a].last_date = v and v[0]
         self.attributes[a].last_value = v and v[1]
         
     return vals
Example #7
0
    def test(self, tests=[]):
        """
    Tests would be a list of (name,result,args,kwargs) values
    """
        print('test(', tests, ')')
        try:
            tests = tests or self.tests
            if not fn.isSequence(tests): tests = [tests]
            passed = 0
            if fn.isMapping(tests):
                tests = [
                    [k] +
                    list(t if not isMapping(t) else (t.get('result', None),
                                                     t.get('args', []),
                                                     t.get('kwargs', [])))
                    for k, t in tests.items()
                ]
            for t in tests:
                t = fn.toList(t)
                print t
                t[0] = t[0]
                t[1] = (t[1:] or [None])[0]
                t[2] = (t[2:] or [[]])[0]
                t[3] = (t[3:] or [{}])[0]
                v = self.test_object(t[0], t[1], *t[2], **t[3])
                if v: passed += 1
                self.results[t[0]] = v

            print('-' * 80)
            for t in tests:
                v = self.results[fn.toList(t)[0]]
                print('%s testing: %s : %s' %
                      (self.module, t, ['Failed', 'Ok'][bool(v)]))

            print('%s : %d / %d tests passed' %
                  (self.module, passed, len(tests)))
        except:
            traceback.print_exc()
            print(tests)
        return passed
Example #8
0
 def test(self,tests=[]):
   """
   Tests would be a list of (name,result,args,kwargs) values
   """
   print('test(',tests,')')
   try:
     tests = tests or self.tests
     if not fn.isSequence(tests): tests = [tests]
     passed = 0
     if fn.isMapping(tests):
       tests = [
         [k]+list(t if not isMapping(t) else 
           (t.get('result',None),t.get('args',[]),t.get('kwargs',[]))
           )
         for k,t in tests.items()]
     for t in tests:
       t = fn.toList(t)
       print t
       t[0] = t[0]
       t[1] = (t[1:] or [None])[0]
       t[2] = (t[2:] or [[]])[0]
       t[3] = (t[3:] or [{}])[0]
       v = self.test_object(t[0],t[1],*t[2],**t[3])
       if v: passed += 1
       self.results[t[0]] = v
       
     print('-'*80)
     for t in tests:
       v = self.results[fn.toList(t)[0]]
       print('%s testing: %s : %s' % (self.module,t,['Failed','Ok'][bool(v)]))
       
     print('%s : %d / %d tests passed'%(self.module,passed,len(tests)))
   except:
     traceback.print_exc()
     print(tests)
   return passed
Example #9
0
def export(devices, suffix='.json', preffix=''):
    """ save devices in .json files """
    devices = fn.toList(devices)
    files = []
    if preffix and not preffix.endswith('/'):
        preffix += '/'
    for d in devices:
        try:
            values = ft.export_device_to_dict(d)
            files.append(preffix + dev2file(d, suffix))
            json.dump(dict2json(values), open(files[-1], 'w'))
        except:
            print('%s failed' % d)
            traceback.print_exc()
    return files
Example #10
0
def export(devices,suffix='.json',preffix=''):
  """ save devices in .json files """
  devices = fn.toList(devices)
  files = []
  if preffix and not preffix.endswith('/'):
    preffix+='/'
  for d in devices:
    try:
      values = ft.export_device_to_dict(d)
      files.append(preffix+dev2file(d,suffix))
      json.dump(dict2json(values),open(files[-1],'w'))
    except:
      print('%s failed'%d)
      traceback.print_exc()
  return files 
def load(instance, devices=[df, sf]):
    """ load .json files into simulated devices """

    done = []
    devices = fn.toList(devices)
    for dd in devices:
        if os.path.isfile(dd):
            df, dd = dd, file2dev(dd)
        else:
            df, dd = dev2file(dd), dd

        data = json.load(open(df))

        if data['dev_class'] == 'PyAlarm':
            props = data['properties']
            props = dict((str(k), map(str, v)) for k, v in props.items())

            assert not ft.get_matching_devices(dd), Exception(
                'Device %s Already Exists!!!' % dd)
            ft.add_new_device('PyAlarm/' + instance, 'PyAlarm', dd)
            ft.put_device_property(dd, props)

        else:
            vals = dict(
                (str(k), v['value']) for k, v in data['attributes'].items())
            dynattrs = []
            for k, v in sorted(vals.items()):
                if k.lower() in ('state', 'status'):
                    continue
                t = type(v).__name__
                if t == 'unicode': t = 'str'
                v = str(v) if t != 'str' else "'%s'" % v
                dynattrs.append('%s = %s(%s)' % (k, t, v))

            assert not ft.get_matching_devices(dd), Exception(
                'Device %s Already Exists!!!' % dd)
            ft.add_new_device('PyAttributeProcessor/' + instance,
                              'PyAttributeProcessor', dd)
            ft.put_device_property(dd, 'DynamicAttributes', dynattrs)

        done.append(dd)

    return done
def extract(target):
    """ Extract devices used in alarms """
    import panic
    api = panic.api()
    target = fn.toList(target)
    alarms = []
    devs = []

    for t in target:
        if t in api:
            alarms.append(t)
        if t in api.devices:
            alarms.extend(api.devices[t].alarms.keys())

    for a in alarms:
        devs.append(api[a].device)
        attrs = api.parse_attributes(a)
        devs.extend(d.rsplit('/', 1)[0] for d in attrs)

    return sorted(set(map(str.lower, devs)))
Example #13
0
def export_attributes_to_pck(filein='ui_exported_devices.txt',
                             fileout='ui_attribute_values.pck'):

    print('export_attributes from:' + str((filein, fileout)))
    assert fileout.endswith('.pck'), 'output must be a pickle file!'

    all_devs = fd.tango.get_all_devices()
    filein = fd.toList(filein)
    if all(d.lower() in all_devs for d in filein):
        devs = filein
    else:
        devs = export_devices_from_sources(*filein, check=True)

    print('devices to export: %s' % str(devs))

    proxies = dict((d, PyTango.DeviceProxy(d)) for d in devs)
    devs = defaultdict(Struct)

    for d, dp in sorted(proxies.items()):
        print('%s (%d/%d)' % (d, 1 + len(devs), len(proxies)))
        obj = devs[d]
        obj.dev_class, obj.attrs, obj.comms = '', defaultdict(Struct), {}
        obj.props = dict(
            (k, v if not 'vector' in str(type(v)).lower() else list(v))
            for k, v in fd.tango.get_matching_device_properties(
                d, '*').items() if 'dynamicattributes' not in k.lower())
        if fd.check_device(d):
            devs[d].name = d
            devs[d].dev_class = dp.info().dev_class
            for c in dp.command_list_query():
                if c.cmd_name.lower() not in ('state', 'status', 'init'):
                    obj.comms[c.cmd_name] = (str(c.in_type), str(c.out_type))
            for a in dp.get_attribute_list():
                if a.lower() == 'status':
                    continue
                obj.attrs[a] = fd.tango.export_attribute_to_dict(
                    d, a, as_struct=True)

    pickle.dump(devs, open(fileout, 'w'))
    return (fileout)
Example #14
0
 def load_last_values(self, attributes, n=1):
     return dict((a, self.get_last_attribute_values(a, n=n))
                 for a in fn.toList(attributes))
Example #15
0
 def get_periodic_archiver_attributes(self, archiver):
     prop = fn.toList(
         fn.tango.get_device_property(archiver, 'AttributeList'))
     return dict(p.lower().split(';', 1) for p in prop if p.strip())
Example #16
0
def check_archiving_schema(
        schema='hdb',
        attributes=[],values={},
        ti = None,
        period = 7200,
        old_period=24*3600*90,\
        exclude=['*/waveid','*/wavename','*/elotech-*'],
        use_index = True,
        loads = True,
        action=False,
        trace=True,
        export=None):

    ti = fn.now() if ti is None else str2time(ti) if isString(ti) else ti

    api = pta.api(schema)
    is_hpp = isinstance(api, pta.HDBpp)
    check = dict()
    old_period = 24*3600*old_period if old_period < 1000 \
        else (24*old_period if old_period<3600 else old_period)

    allattrs = api.get_attributes() if hasattr(
        api, 'get_attributes') else api.keys()
    print('%s contains %d attributes' % (schema, len(allattrs)))

    if attributes:
        if fn.isString(attributes) and fn.isRegexp(attributes):
            tattrs = [a for a in allattrs if clsearch(attributes, a)]
        else:
            attributes = map(fn.tango.get_normal_name, fn.toList(attributes))
            tattrs = [
                a for a in allattrs if fn.tango.get_normal_name(a) in allattrs
            ]

    else:
        tattrs = allattrs

    excluded = [a for a in tattrs if any(fn.clmatch(e, a) for e in exclude)]
    tattrs = [a for a in tattrs if a not in excluded]

    print('%d attributes to check' % len(tattrs))
    if not len(tattrs):
        return

    if excluded:
        print('\t%d attributes excluded' % len(excluded))

    archived = {}
    for a in tattrs:
        if hasattr(api, 'get_attribute_archiver'):
            arch = api.get_attribute_archiver(a)
        else:
            arch = api[a].archiver
        if arch:
            archived[a] = arch

    print('\t%d attributes are archived' % len(archived))

    #Getting Tango devices currently not running
    alldevs = set(t.rsplit('/', 1)[0] for t in tattrs)
    #tdevs = filter(fn.check_device,alldevs)
    #nodevs = [fn.tango.get_normal_name(d) for d in alldevs if d not in tdevs]
    #if nodevs:
    #print('\t%d devices are not running' % len(nodevs))

    archs = sorted(set(archived.values()))
    if loads:
        astor = fn.Astor()
        astor.load_from_devs_list(archs)
        loads = fn.defaultdict(list)
        for k, s in astor.items():
            for d in s.get_device_list():
                d = fn.tango.get_normal_name(d)
                for a in archived:
                    if fn.tango.get_normal_name(archived[a]) == d:
                        loads[k].append(a)
        for k, s in sorted(loads.items()):
            print('\t%s archives %d attributes' % (k, len(s)))

    noarchs = [
        fn.tango.get_normal_name(d) for d in archs if not fn.check_device(d)
    ]
    if noarchs:
        print('\t%d archivers are not running: %s' % (len(noarchs), noarchs))

    ###########################################################################

    if isString(values) and values.endswith('.pck'):
        print('\nLoading last values from %s file\n' % values)
        import pickle
        values = pickle.load(open(values))

    elif isString(values) and values.endswith('.json'):
        print('\nLoading last values from %s file\n' % values)
        values = fn.json2dict(values)

    elif not use_index or is_hpp:
        print('\nGetting last values ...\n')
        for a in tattrs:
            values[a] = api.load_last_values(a)

    else:
        print('\nGetting updated tables from database ...\n')
        tups = pta.utils.get_table_updates(schema)
        # Some tables do not update MySQL index tables
        t0 = [a for a in tarch if a in tattrs and not tups[api[a].table]]
        check.update((t, check_attribute(a, readable=True)) for t in t0
                     if not check.get(t))
        t0 = [t for t in t0 if check[t]]
        print('%d/%d archived attributes have indexes not updated ...' %
              (len(t0), len(tarch)))
        if t0 and len(t0) < 100:
            vs = api.load_last_values(t0)
            tups.update((api[t].table, api[t].last_date) for t in t0)

        for a in tattrs:
            if a in tups:
                values[a] = [tups[api[a].table], 0]

    for k, v in values.items():
        if (len(v) if isSequence(v) else v):
            if isinstance(v, dict):
                v = v.values()[0]
            if isSequence(v) and len(v) == 1:
                v = v[0]
            if v and not isNumber(v[0]):
                v = [date2time(v[0]), v[1]]
            values[k] = v
        else:
            values[k] = [] if isSequence(v) else None

    print('%d values obtained' % len(values))

    ###########################################################################

    now = fn.now()
    result = fn.Struct()
    times = [t[0] for t in values.values() if t]
    futures = [t for t in times if t > now]
    times = [t for t in times if t < now]
    tmiss = []
    tfutures = [k for k, v in values.items() if v and v[0] in futures]
    tmin, tmax = min(times), max(times)
    print('\toldest update was %s' % time2str(tmin))
    print('\tnewest update was %s' % time2str(tmax))
    if futures:
        print('\t%d attributes have values in the future!' % len(futures))

    tnovals = [a for a in archived if not values.get(a, None)]
    if tnovals:
        print('\t%d archived attributes have no values' % len(tnovals))
    try:
        tmiss = [
            a for a, v in values.items()
            if v and old_period < v[0] < ti - period and a not in archived
        ]
    except:
        print(values.items()[0])
    if tmiss:
        print('\t%d/%d attrs with values are not archived anymore' %
              (len(tmiss), len(tattrs)))

    result.Excluded = excluded
    result.Schema = schema
    result.All = tattrs
    result.Archived = values

    result.NoValues = tnovals
    result.MissingOrRemoved = tmiss

    result.TMin = tmin
    result.TMax = tmax
    result.Futures = tfutures

    tup = sorted(a for a in values if values[a] and values[a][0] > ti - period)
    tok = [a for a in tup if values[a][1] not in (None, [])]
    print('\n%d/%d archived attributes are updated since %s - %s' %
          (len(tup), len(archived), ti, period))
    print('%d archived attributes are fully ok\n' % (len(tok)))

    tnotup = sorted(a for a in values
                    if values[a] and values[a][0] < ti - period)
    print('\t%d archived attrs are not updated' % len(tnotup))
    tupnoread = [
        a for a in tup if not values[a][1] and fn.read_attribute(a) is None
    ]

    reads = dict((a, fn.read_attribute(a)) for a in tnotup)
    tnotupread = [a for a in tnotup if reads[a] is not None]
    print('\t%d not updated attrs are readable (Lost)' % len(tnotupread))
    print('\t%d of them are not floats' %
          len([t for t in tnotupread if not isinstance(reads[t], float)]))
    print('\t%d of them are states' %
          len([t for t in tnotupread if t.lower().endswith('/state')]))
    print('\t%d of them seem motors' %
          len([t for t in tnotupread if t.lower().endswith('/position')]))

    tnotupevs = [a for a in tnotupread if fn.tango.check_attribute_events(a)]
    print('\t%d not updated attrs are readable and have events (LostEvents)' %
          len(tnotupevs))

    tnotupnotread = [a for a in tnotup if a not in tnotupread]
    print('\t%d not updated attrs are not readable' % len(tnotupnotread))

    result.Lost = tnotupread
    result.LostEvents = tnotupevs

    losts = (tnotupevs if is_hpp else tnotupread)

    diffs = dict()
    for a in losts:
        try:
            v, vv = values.get(a, (None, ))[1], reads[a]
            if fn.isSequence(v): v = fn.toList(v)
            if fn.isSequence(vv): vv = fn.toList(vv)
            diffs[a] = v != vv
            if fn.isSequence(diffs[a]):
                diffs[a] = any(diffs[a])
            else:
                diffs[a] = bool(diffs[a])
        except:
            diffs[a] = None

    fams = fn.defaultdict(list)
    for a in tnotupread:
        fams['/'.join(a.split('/')[-4:-2])].append(a)
    for f in sorted(fams):
        print('\t%s: %d attrs not updated' % (f, len(fams[f])))

    print()

    differ = [a for a in losts if diffs[a]]  #is True]
    print('\t%d/%d not updated attrs have also wrong values!!!' %
          (len(differ), len(losts)))

    rd = pta.Reader()
    only = [a for a in tnotupread if len(rd.is_attribute_archived(a)) == 1]
    print('\t%d/%d not updated attrs are archived only in %s' %
          (len(only), len(losts), schema))
    result.LostDiff = differ
    print()

    archs = sorted(set(archived.values()))
    astor = fn.Astor()
    astor.load_from_devs_list(archs)
    badloads = fn.defaultdict(list)
    for k, s in astor.items():
        for d in s.get_device_list():
            d = fn.tango.get_normal_name(d)
            for a in losts:
                if fn.tango.get_normal_name(archived[a]) == d:
                    badloads[k].append(a)
    for k, s in badloads.items():
        if len(s):
            print('\t%s archives %d lost attributes' % (k, len(s)))

    print('\t%d updated attrs are not readable' % len(tupnoread))

    result.ArchivedAndReadable = tok
    result.Updated = tup
    result.NotUpdated = tnotup
    result.Unreadable = tnotupnotread
    #result.DeviceNotRunning = nodevs
    result.ArchiverNotRunning = noarchs

    result.LostFamilies = fams

    # Tnones is for readable attributes not being archived
    tnones = [
        a for a in archived
        if (a not in values or values[a] and values[a][1] in (None, []))
        and a not in tupnoread and a not in tnotupread
    ]
    tupnones = [a for a in tnones if a in tup]

    if tupnones:
        print('\t%d archived readable attrs record empty values' %
              len(tupnones))

    result.Nones = tnones

    if 0:

        get_ratio = lambda a, b: float(len(a)) / float(len(b))

        #result.ArchRatio = get_ratio([t for t in readarch if t not in tnotup],readarch)
        #result.ReadRatio = get_ratio(result.Readable,tattrs)
        #result.LostRatio = get_ratio([a for a in tread if a in tnotup],tread)
        #result.MissRatio = get_ratio([a for a in tread if a not in tarch],tread)
        #result.OkRatio = 1.0-result.LostRatio-result.MissRatio

        #result.Summary = '\n'.join((
        #('Checking archiving of %s attributes'%(len(attributes) if attributes else schema))
        #,('%d attributes in %s, %d are currently active'%(len(api),schema,len(tarch)))
        #,('%d devices with %d archived attributes are not running'%(len(nodevs),len([a for a in api if a.rsplit('/',1) in nodevs])))
        #,('%d archived attributes (%2.1f %%) are unreadable! (check and remove)'%(len(tnoread),1e2*get_ratio(tnoread,tarch)))
        #,('%d readable attributes are not archived'%(len(tmiss)))
        #,('%d attributes (readable or not) are updated (%2.1f %% of all readables)'%(len(tok),1e2*result.OkRatio))
        #,('-'*80)
        #,('%d archived attributes (readable or not) are not updated!'%len(tnotup))
        #,('%d archived and readable attributes are not updated! (check and restart?)'%len(treadnotup))
        #,('-'*80)
        #,('%d readable attributes have been removed in the last %d days!'%(len(removed),old_period/(24*3600)))
        #,('%d readable scalar attributes are not being archived (not needed anymore?)'%len(tmscalar))
        #,('%d readable array attributes are not being archived (Ok)'%len(tmarray))
        #,('%d readable array attributes are archived (Expensive)'%len(tarray))
        #,('')))

        #if trace: print(result.Summary)
        #print('%d readable lost,Ok = %2.1f%%, %2.1f %% over all Readables (%2.1f %% of total)'%\
        #(len(treadnotup),1e2*result.ArchRatio,1e2*result.OkRatio,1e2*result.ReadRatio))

    if action:
        if action == 'start_devices':
            print('Executing action %s' % action)
            api.start_devices()

        if action == 'restart_all':
            print('Executing action %s' % action)
            devs = api.get_archivers()
            astor = fn.Astor()
            print('Restarting %d devs:' % (len(devs), devs))
            astor.load_from_devs_list(devs)
            astor.stop_servers()
            fn.wait(10.)
            astor.start_servers()

        #print('NO ACTIONS ARE GONNA BE EXECUTED, AS THESE ARE ONLY RECOMMENDATIONS')
        #print("""
        #api = PyTangoArchiving.HDBpp(schema)
        #api.start_devices()

        #or

        #api = PyTangoArchiving.ArchivingAPI('%s')
        #lostdevs = sorted(set(api[a].archiver for a in result.NotUpdated))
        #print(lostdevs)
        #if lostdevs < a_reasonable_number:
        #astor = fn.Astor()
        #astor.load_from_devs_list(lostdevs)
        #astor.stop_servers()
        #fn.time.sleep(10.)
        #astor.start_servers()
        #"""%schema)

    print('\nfinished in %d seconds\n\n' % (fn.now() - ti))

    if export is not None:
        if export is True:
            export = 'txt'
        for x in (export.split(',') if isString(export) else export):
            if x in ('json', 'pck', 'pickle', 'txt'):
                x = '/tmp/%s.%s' % (schema, x)
            print('Saving %s file with keys:\n%s' % (x, result.keys()))
            if 'json' in x:
                fn.dict2json(result.dict(), x)
            else:
                f = open(x, 'w')
                if 'pck' in x or 'pickle' in x:
                    pickle.dump(result.dict(), f)
                else:
                    f.write(fn.dict2str(result.dict()))
                f.close()

    return result
Example #17
0
 def setCommand(self, cmd):
     if not fandango.isSequence(cmd):
         cmd = fandango.toList(cmd)
     self._default_cmd = cmd
Example #18
0
def load(tango_host,
         instance,
         devices,
         replace={},
         overwrite=False,
         def_class='SimulatorDS'):
    """ 
  the tango_host variable must match with the current tango_host; it is used to avoid accidental loading
  load .json files into simulated devices; one .json file per device
  devices may be a list of devices, a list of files or a dictionary {device:filename}
  """
    assert tango_host == fn.get_tango_host()

    done = []
    if isMapping(devices):
        filenames = devices
    else:
        devices = fn.toList(devices)
        filenames = {}
        for dd in devices:
            if os.path.isfile(dd):
                df, dd = dd, file2dev(dd)
            else:
                df, dd = dev2file(dd), dd
        filenames[dd] = df

    for dd, df in filenames.items():

        exists = ft.get_matching_devices(dd)
        if exists and not overwrite:
            raise Exception('Device %s Already Exists!!!' % dd)

        data = json.load(open(df))
        props = data['properties']
        props = dict((str(k), map(str, v)) for k, v in props.items())

        for r, rr in replace.items():
            dd = clsub(r, rr, dd)
            for p, pp in props.items():
                for i, l in enumerate(pp):
                    props[p][i] = clsub(r, rr, l)

        if overwrite:
            props['polled_attr'] = []
            props['polled_cmd'] = []

        if data['dev_class'] == 'PyAlarm':
            if not exists:
                ft.add_new_device('PyAlarm/' + instance, 'PyAlarm', dd)
            props['AlarmReceivers'] = []
            ft.put_device_property(dd, props)

        else:
            if not exists:
                ft.add_new_device(def_class + '/' + instance, def_class, dd)

            ft.put_device_property(dd, props)

            #if data['dev_class'] not in ('PySignalSimulator','PyAttributeProcessor','PyStateComposer','CopyCatDS'):

            vals = dict((str(k), v['value'] if v else 0)
                        for k, v in data['attributes'].items())
            dynattrs = []
            attrprops = fn.dicts.defaultdict(dict)

            for k, v in sorted(vals.items()):
                if k.lower() in ('state', 'status'):
                    continue
                if v is None:
                    continue

                t = type(v).__name__
                if t == 'unicode': t = 'str'
                v = str(v) if t != 'str' else "'%s'" % v

                dynattrs.append(
                    #'%s = %s(%s) #%s'%(
                    '%s = %s(VAR(%s,default=%s,WRITE=True)) #%s' %
                    (k, k, t, v, data['attributes'][k]['data_type']))

                attrprops[dd][k] = dict((p, data['attributes'][k].get(p, ''))
                                        for p in ('format', 'label', 'unit',
                                                  'min_alarm', 'max_alarm'))

            ft.put_device_property(dd, 'DynamicAttributes', dynattrs)
            try:
                ft.get_database().put_device_attribute_property(
                    dd, dict((k, v) for k, v in attrprops[dd].items() if v))
            except:
                fn.time.sleep(3.)

        done.append(dd)

    return done
Example #19
0
def load(tango_host,instance,devices,replace={},overwrite=False):
  """ 
  the tango_host variable must match with the current tango_host; it is used to avoid accidental loading
  load .json files into simulated devices 
  devices may be a list of devices, a list of files or a dictionary {device:filename}
  """
  assert tango_host == fn.get_tango_host()
  
  done = []
  if isMapping(devices):
    filenames = devices
  else:
    devices = fn.toList(devices)
    filenames = {}
    for dd in devices:
      if os.path.isfile(dd):
        df,dd = dd,file2dev(dd)
      else:
        df,dd = dev2file(dd),dd
    filenames[dd] = df
      
  for dd,df in filenames.items():
    
    exists =  ft.get_matching_devices(dd)
    if exists and not overwrite:
      raise Exception('Device %s Already Exists!!!'%dd)
    
    data = json.load(open(df))
    props = data['properties']
    props = dict((str(k),map(str,v)) for k,v in props.items())
    
    for r,rr in replace.items():
      dd = clsub(r,rr,dd)
      for p,pp in props.items():
        for i,l in enumerate(pp):
          props[p][i] = clsub(r,rr,l)
          
    if overwrite:
      props['polled_attr'] = []
      props['polled_cmd'] = []
    
    if data['dev_class'] == 'PyAlarm':
      if not exists: ft.add_new_device('PyAlarm/'+instance,'PyAlarm',dd)
      props['AlarmReceivers'] = []
      ft.put_device_property(dd,props)
      
    else:
      if not exists: ft.add_new_device('PyAttributeProcessor/'+instance,'PyAttributeProcessor',dd)
        
      ft.put_device_property(dd,props)
      
      #if data['dev_class'] not in ('PySignalSimulator','PyAttributeProcessor','PyStateComposer','CopyCatDS'):
        
      vals = dict((str(k),v['value'] if v else 0) for k,v in data['attributes'].items())
      dynattrs = []
      attrprops = fn.dicts.defaultdict(dict)

      for k,v in sorted(vals.items()):
        if k.lower() in ('state','status'):
          continue
        if v is None:
          continue

        t = type(v).__name__
        if t == 'unicode': t = 'str'
        v = str(v) if t!='str' else "'%s'"%v

        dynattrs.append(
            #'%s = %s(%s) #%s'%(
            '%s = %s(VAR(%s,default=%s,WRITE=True)) #%s'%(k,
            k,t,v,data['attributes'][k]['data_type']))
        
        attrprops[dd][k] = dict((p,data['attributes'][k].get(p,'')) for p in 
            ('format','label','unit','min_alarm','max_alarm'))
      
      ft.put_device_property(dd,'DynamicAttributes',dynattrs)
      try:
        ft.get_database().put_device_attribute_property(dd,
          dict((k,v) for k,v in attrprops[dd].items() if v))
      except:
        fn.time.sleep(3.)
    
    done.append(dd)

  return done