Esempio n. 1
0
 def addPV(label):
     pv = SharedPV(initial=NTScalar('I').wrap(0), 
                   handler=DefaultPVHandler())
     provider.add(name+':'+label,pv)
     return pv
Esempio n. 2
0
class GroupSetup(object):
    def __init__(self, name, app, group, stats, init=None):
        self._group = group
        self._app   = app
        self._stats = stats

        def addPV(label,cmd,init=0,set=False):
            pv = SharedPV(initial=NTScalar('I').wrap(init), 
                          handler=PVHandler(cmd))
            provider.add(name+':'+label,pv)
            if set:
                cmd(pv,init)
            return pv

        self._pv_L0Select   = addPV('L0Select'               ,self.put)
        self._pv_FixedRate  = addPV('L0Select_FixedRate'     ,self.put)
        self._pv_ACRate     = addPV('L0Select_ACRate'        ,self.put)
        self._pv_ACTimeslot = addPV('L0Select_ACTimeslot'    ,self.put)
        self._pv_EventCode  = addPV('L0Select_EventCode'     ,self.put)
        self._pv_Sequence   = addPV('L0Select_Sequence'      ,self.put)
        self._pv_SeqBit     = addPV('L0Select_SeqBit'        ,self.put)        
        self._pv_DstMode    = addPV('DstSelect'              ,self.put, 1)
        self._pv_DstMask    = addPV('DstSelect_Mask'         ,self.put)
        self._pv_Run        = addPV('Run'                    ,self.run   , set=True)
        self._pv_Master     = addPV('Master'                 ,self.master, set=True)

        self._pv_StepDone   = SharedPV(initial=NTScalar('I').wrap(0), handler=DefaultPVHandler())
        provider.add(name+':StepDone', self._pv_StepDone)

        self._pv_StepGroups = addPV('StepGroups'            ,self.stepGroups, set=True)
        self._pv_StepEnd    = addPV('StepEnd'               ,self.stepEnd   , set=True)

        def addPV(label,reg,init=0,set=False):
            pv = SharedPV(initial=NTScalar('I').wrap(init), 
                          handler=IdxRegH(reg,self._app.partition,group))
            provider.add(name+':'+label,pv)
            if set:
                self._app.partition.set(group)
                reg.set(init)
            return pv
        
        self._pv_MsgHeader  = addPV('MsgHeader' , app.msgHdr ,  0, set=True)
        self._pv_MsgPayload = addPV('MsgPayload', app.msgPayl,  0, set=True)

        def addPV(label,reg,init=0,set=False):
            pv = SharedPV(initial=NTScalar('I').wrap(init), 
                          handler=L0DelayH(reg,self._app.partition,group))
            provider.add(name+':'+label,pv)
            if set:
                self._app.partition.set(group)
                reg.set(pipelinedepth_from_delay(init))
            return pv

        self._pv_L0Delay    = addPV('L0Delay'   , app.pipelineDepth, init['L0Delay'][group] if init else 90, set=True)

        #  initialize
        self.put(None,None)

        def addPV(label):
            pv = SharedPV(initial=NTScalar('I').wrap(0), 
                          handler=DefaultPVHandler())
            provider.add(name+':'+label,pv)
            return pv

        self._pv_MsgConfigKey = addPV('MsgConfigKey')

        self._inhibits = []
        self._inhibits.append(PVInhibit(name, app, app.inh_0, group, 0))
        self._inhibits.append(PVInhibit(name, app, app.inh_1, group, 1))
        self._inhibits.append(PVInhibit(name, app, app.inh_2, group, 2))
        self._inhibits.append(PVInhibit(name, app, app.inh_3, group, 3))

    def dump(self):
        print('Group: {}  Master: {}  RateSel: {:x}  DestSel: {:x}  Ena: {}'
              .format(self._group, self._app.l0Master.get(), self._app.l0RateSel.get(), self._app.l0DestSel.get(), self._app.l0En.get()))

    def setFixedRate(self):
        rateVal = (0<<14) | (self._pv_FixedRate.current()['value']&0xf)
        self._app.l0RateSel.set(rateVal)
        
    def setACRate(self):
        acRate = self._pv_ACRate    .current()['value']
        acTS   = self._pv_ACTimeslot.current()['value']
        rateVal = (1<<14) | ((acTS&0x3f)<<3) | (acRate&0x7)
        self._app.l0RateSel.set(rateVal)

    def setEventCode(self):
        code   = self._pv_EventCode.current()['value']
        rateVal = (2<<14) | ((code&0xf0)<<4) | (code&0xf)
        self._app.l0RateSel.set(rateVal)

    def setSequence(self):
        seqIdx = self._pv_Sequence.current()['value']
        seqBit = self._pv_SeqBit  .current()['value']
        rateVal = (2<<14) | ((seqIdx&0x3f)<<8) | (seqBit&0xf)
        self._app.l0RateSel.set(rateVal)

    def setDestn(self):
        mode = self._pv_DstMode.current()['value']
        mask = self._pv_DstMask.current()['value']
        destVal  = (mode<<15) | (mask&0x7fff)
        self._app.l0DestSel.set(destVal)

    def master(self, pv, val):
        lock.acquire()
        self._app.partition.set(self._group)
        forceUpdate(self._app.l0Master)

        if val==0:
            self._app.l0Master.set(0)
            self._app.l0En    .set(0)
            self._stats._master = 0
            
            curr = self._pv_Run.current()
            curr['value'] = 0
            self._pv_Run.post(curr)
        else:
            self._app.l0Master.set(1)
            self._stats._master = 1
        lock.release()

    def put(self, pv, val):
        lock.acquire()
        self._app.partition.set(self._group)
        forceUpdate(self._app.l0RateSel)
        mode = self._pv_L0Select.current()['value']
        if mode == RateSel.FixedRate:
            self.setFixedRate()
        elif mode == RateSel.ACRate:
            self.setACRate()
        elif mode == RateSel.EventCode:
            self.setEventCode()
        elif mode == RateSel.Sequence:
            self.setSequence()
        else:
            print('L0Select mode invalid {}'.format(mode))

        forceUpdate(self._app.l0DestSel)
        self.setDestn()
        self.dump()
        lock.release()

    def stepGroups(self, pv, val):
        getattr(self._app,'stepGroup%i'%self._group).set(val)

    def stepEnd(self, pv, val):
        self.stepDone(False)
        getattr(self._app,'stepEnd%i'%self._group).set(val)

    def stepDone(self, val):
        value = self._pv_StepDone.current()
        value['value'] = 1 if val else 0
        timev = divmod(float(time.time_ns()), 1.0e9)
        value['timeStamp.secondsPastEpoch'], value['timeStamp.nanoseconds'] = timev
        self._pv_StepDone.post(value)

    def run(self, pv, val):
        lock.acquire()
        self._app.partition.set(self._group)
        forceUpdate(self._app.l0En)
        enable = 1 if val else 0
        self._app.l0En.set(enable)
        self.dump()
        lock.release()
Esempio n. 3
0
    def __init__(self, name, app, group, stats, init=None):
        self._group = group
        self._app   = app
        self._stats = stats

        def addPV(label,cmd,init=0,set=False):
            pv = SharedPV(initial=NTScalar('I').wrap(init), 
                          handler=PVHandler(cmd))
            provider.add(name+':'+label,pv)
            if set:
                cmd(pv,init)
            return pv

        self._pv_L0Select   = addPV('L0Select'               ,self.put)
        self._pv_FixedRate  = addPV('L0Select_FixedRate'     ,self.put)
        self._pv_ACRate     = addPV('L0Select_ACRate'        ,self.put)
        self._pv_ACTimeslot = addPV('L0Select_ACTimeslot'    ,self.put)
        self._pv_EventCode  = addPV('L0Select_EventCode'     ,self.put)
        self._pv_Sequence   = addPV('L0Select_Sequence'      ,self.put)
        self._pv_SeqBit     = addPV('L0Select_SeqBit'        ,self.put)        
        self._pv_DstMode    = addPV('DstSelect'              ,self.put, 1)
        self._pv_DstMask    = addPV('DstSelect_Mask'         ,self.put)
        self._pv_Run        = addPV('Run'                    ,self.run   , set=True)
        self._pv_Master     = addPV('Master'                 ,self.master, set=True)

        self._pv_StepDone   = SharedPV(initial=NTScalar('I').wrap(0), handler=DefaultPVHandler())
        provider.add(name+':StepDone', self._pv_StepDone)

        self._pv_StepGroups = addPV('StepGroups'            ,self.stepGroups, set=True)
        self._pv_StepEnd    = addPV('StepEnd'               ,self.stepEnd   , set=True)

        def addPV(label,reg,init=0,set=False):
            pv = SharedPV(initial=NTScalar('I').wrap(init), 
                          handler=IdxRegH(reg,self._app.partition,group))
            provider.add(name+':'+label,pv)
            if set:
                self._app.partition.set(group)
                reg.set(init)
            return pv
        
        self._pv_MsgHeader  = addPV('MsgHeader' , app.msgHdr ,  0, set=True)
        self._pv_MsgPayload = addPV('MsgPayload', app.msgPayl,  0, set=True)

        def addPV(label,reg,init=0,set=False):
            pv = SharedPV(initial=NTScalar('I').wrap(init), 
                          handler=L0DelayH(reg,self._app.partition,group))
            provider.add(name+':'+label,pv)
            if set:
                self._app.partition.set(group)
                reg.set(pipelinedepth_from_delay(init))
            return pv

        self._pv_L0Delay    = addPV('L0Delay'   , app.pipelineDepth, init['L0Delay'][group] if init else 90, set=True)

        #  initialize
        self.put(None,None)

        def addPV(label):
            pv = SharedPV(initial=NTScalar('I').wrap(0), 
                          handler=DefaultPVHandler())
            provider.add(name+':'+label,pv)
            return pv

        self._pv_MsgConfigKey = addPV('MsgConfigKey')

        self._inhibits = []
        self._inhibits.append(PVInhibit(name, app, app.inh_0, group, 0))
        self._inhibits.append(PVInhibit(name, app, app.inh_1, group, 1))
        self._inhibits.append(PVInhibit(name, app, app.inh_2, group, 2))
        self._inhibits.append(PVInhibit(name, app, app.inh_3, group, 3))
Esempio n. 4
0
 def addPV(label, reg, init=0):
     pv = SharedPV(initial=NTScalar('I').wrap(init),
                   handler=IdxRegH(reg, linkreg, link))
     provider.add(name + ':' + label + '%d' % link, pv)
     reg.set(init)  #  initialization
     return pv
Esempio n. 5
0
    def __init__(self,
                 p,
                 m,
                 name=None,
                 ip=None,
                 xpm=None,
                 stats=None,
                 handle=None,
                 db=None,
                 cuInit=False,
                 fidPrescale=200,
                 fidPeriod=1400 / 1.3):
        global provider
        provider = p
        global lock
        lock = m
        global _fidPrescale
        _fidPrescale = fidPrescale
        global _fidPeriod
        _fidPeriod = fidPeriod

        # Assign transmit link ID
        ip_comp = ip.split('.')
        xpm_num = name.rsplit(':', 1)[1]
        v = 0xff00000 | ((int(xpm_num) & 0xf) << 16) | (
            (int(ip_comp[2]) & 0xf) << 12) | ((int(ip_comp[3]) & 0xff) << 4)
        xpm.XpmApp.paddr.set(v)
        print('Set PADDR to 0x{:x}'.format(v))

        self._name = name
        self._ip = ip
        self._xpm = xpm
        self._db = db
        self._handle = handle

        init = None
        try:
            db_url, db_name, db_instrument, db_alias = db.split(',', 4)
            print('db {:}'.format(db))
            print('url {:}  name {:}  instr {:}  alias {:}'.format(
                db_url, db_name, db_instrument, db_alias))
            print('device {:}'.format(name))
            init = get_config_with_params(db_url, db_instrument, db_name,
                                          db_alias, name)
            print('cfg {:}'.format(init))
        except:
            print('Caught exception reading configdb [{:}]'.format(db))

        self._links = []
        for i in range(24):
            self._links.append(LinkCtrls(name, xpm, i))

        app = xpm.XpmApp

        self._pv_amcDumpPLL = []
        for i in range(2):
            pv = SharedPV(initial=NTScalar('I').wrap(0),
                          handler=IdxCmdH(app.amcPLL.Dump, app.amc, i))
            provider.add(name + ':DumpPll%d' % i, pv)
            self._pv_amcDumpPLL.append(pv)

        self._cu = CuGenCtrls(name + ':XTPG', xpm, dbinit=init)
        #self._cu   = None

        self._group = GroupCtrls(name, app, stats, init=init)

        self._pv_usRxReset = SharedPV(initial=NTScalar('I').wrap(0),
                                      handler=CmdH(xpm.UsTiming.C_RxReset))
        provider.add(name + ':Us:RxReset', self._pv_usRxReset)

        self._pv_cuRxReset = SharedPV(initial=NTScalar('I').wrap(0),
                                      handler=CmdH(xpm.CuTiming.C_RxReset))
        provider.add(name + ':Cu:RxReset', self._pv_cuRxReset)

        self._pv_l0HoldReset = SharedPV(initial=NTScalar('I').wrap(0),
                                        handler=RegH(app.l0HoldReset,
                                                     archive=False))
        provider.add(name + ':L0HoldReset', self._pv_l0HoldReset)

        self._thread = threading.Thread(target=self.notify)
        self._thread.start()

        print('monStreamPeriod {}'.format(app.monStreamPeriod.get()))
        app.monStreamPeriod.set(125000000)
        app.monStreamEnable.set(1)
Esempio n. 6
0
            self.server.process(0.1)

            while (sim_pv_state != self.input_pv_state):

                sim_pv_state = copy.deepcopy(self.input_pv_state)
                output_pv_state = self.model.run(self.input_pv_state,
                                                 verbose=True)
                self.driver.set_output_pvs(output_pv_state)

    def stop_server(self):
        self.serve_data = False


if __name__ == '__main__':

    pv = SharedPV(MyHandler())

    sm = SurrogateModel(model_file='model_weights.h5')

    cmd_pvdb = {}
    for ii, input_name in enumerate(sm.input_names):
        cmd_pvdb[input_name] = {'type': 'd', 'value': sm.input_ranges[ii][0]}

    sim_pvdb = {}
    for ii, output_name in enumerate(sm.output_names):
        sim_pvdb[output_name] = {'type': 'd', 'value': 0}

    server = PVServer(cmd_pvdb)
    server.start_server()

    # Add in noise for fun
Esempio n. 7
0
class PVCtrls(threading.Thread):
    def __init__(self, prefix, app):
        threading.Thread.__init__(self, daemon=True)
        self.prefix = prefix + ':'
        self.app = app

    def run(self):
        self.provider = StaticProvider(__name__)

        self.fieldNames = SharedPV(initial=NTScalar('as').wrap(
            {'value': ['pid%02x' % i for i in range(31)]}),
                                   handler=DefaultPVHandler(self))

        # 'i' (integer) or 'f' (float)
        self.fieldTypes = SharedPV(initial=NTScalar('aB').wrap(
            {'value': [ord('i')] * 31}),
                                   handler=DefaultPVHandler(self))

        self.fieldMask = SharedPV(initial=NTScalar('I').wrap({'value':
                                                              0x8000}),
                                  handler=DefaultPVHandler(self))

        self.payload = SharedPV(initial=Value(Type([]), {}),
                                handler=DefaultPVHandler(self))

        print('Hosting {:}HPS:FIELDMASK'.format(self.prefix))
        self.provider.add(self.prefix + 'HPS:FIELDNAMES', self.fieldNames)
        self.provider.add(self.prefix + 'HPS:FIELDTYPES', self.fieldTypes)
        self.provider.add(self.prefix + 'HPS:FIELDMASK', self.fieldMask)
        self.provider.add(self.prefix + 'PAYLOAD', self.payload)
        self.update()

        try:
            Server.forever(providers=[self.provider])
        except:
            print('Server exited')

    def update(self):
        self.app.Enable.set(0)

        mask = self.fieldMask.current().get('value')
        names = self.fieldNames.current().get('value')
        types = self.fieldTypes.current().get('value')
        oid = self.payload.current().getID()
        nid = str(mask)

        print('PVCtrls.update mask[{:x}] oid[{:}]'.format(mask, oid))

        if nid == oid:
            nid += 'a'
        ntypes = []
        nvalues = {}
        ntypes.append(('valid', 'i'))
        nvalues['valid'] = 0
        mmask = mask
        for i in range(31):
            if mmask & 1:
                ntypes.append((names[i], chr(types[i])))
                nvalues[names[i]] = 0
            mmask >>= 1

        pvname = self.prefix + 'PAYLOAD'
        self.provider.remove(pvname)
        self.payload = SharedPV(initial=Value(Type(ntypes, id=nid), nvalues),
                                handler=DefaultPVHandler(self))
        print('Payload struct ID %s' % self.payload.current().getID())
        self.provider.add(pvname, self.payload)

        if mask:
            self.app.channelMask.set(mask)
            self.app.Enable.set(1)
Esempio n. 8
0
 def addPV(label, ctype, init, cmd):
     pv = SharedPV(initial=NTScalar(ctype).wrap(init),
                   handler=PVHandler(cmd))
     provider.add(name + ':' + label, pv)
     return pv
Esempio n. 9
0
    'enum': NTEnum().wrap(0),
}

pvs_lock = Lock()
pvs = {}

provider = StaticProvider('dynamicbox')


class MailboxHandler(object):
    def put(self, pv, op):
        pv.post(op.value())
        op.done()


addpv = SharedPV(initial=NTScalar('s').wrap('Only RPC'))
delpv = SharedPV(initial=NTScalar('s').wrap('Only RPC'))
listpv = SharedPV(nt=list_type, initial=[])

provider.add(prefix + "add", addpv)
provider.add(prefix + "del", delpv)
provider.add(prefix + "list", listpv)
_log.info("add with %s, remove with %s, list with %s", prefix + "add",
          prefix + "del", prefix + "list")


@addpv.rpc
def adder(pv, op):
    name = op.value().query.name
    type = op.value().query.get('type', 'int')
Esempio n. 10
0
    P.add_argument('pvname')
    P.add_argument('-g',
                   '--gray',
                   action='store_const',
                   const=True,
                   default=True)
    P.add_argument('-C',
                   '--color',
                   action='store_const',
                   const=False,
                   dest='gray')
    P.add_argument('-d',
                   '--debug',
                   action='store_const',
                   const=logging.DEBUG,
                   default=logging.INFO)
    return P.parse_args()


args = getargs()

logging.basicConfig(level=args.debug)

pv = SharedPV(nt=NTNDArray(), initial=face(gray=args.gray))

print('serving pv:', args.pvname)

Server.forever(providers=[{
    args.pvname: pv,
}])
Esempio n. 11
0
 def addPV(label, ctype='I', init=0):
     pv = SharedPV(initial=NTScalar(ctype).wrap(init),
                   handler=DefaultPVHandler())
     provider.add(name + ':' + label, pv)
     return pv
Esempio n. 12
0
if rmat_data is None or twiss_data is None:
    raise Exception("Could not load saved rmat or twiss data.")

twiss_cols = [('ORDINAL', 'd'), ('ELEMENT_NAME', 's'),
              ('EPICS_CHANNEL_ACCESS_NAME', 's'), ('POSITION_INDEX', 's'),
              ('LEFF', 'd'), ('TOTAL_ENERGY', 'd'), ('PSI_X', 'd'),
              ('BETA_X', 'd'), ('ALPHA_X', 'd'),
              ('ETA_X', 'd'), ('ETAP_X', 'd'), ('PSI_Y', 'd'), ('BETA_Y', 'd'),
              ('ALPHA_Y', 'd'), ('ETA_Y', 'd'), ('ETAP_Y', 'd')]
twiss_table = NTTable(twiss_cols)
rows = [{key: twiss_data['value'][key][i]
         for key, _ in twiss_cols}
        for i in range(0, len(twiss_data['value']['ELEMENT_NAME']))]
twiss_vals = twiss_table.wrap(rows)
twiss_pv = SharedPV(nt=twiss_table, initial=twiss_vals)

rmat_cols = [('ORDINAL', 'd'), ('ELEMENT_NAME', 's'),
             ('EPICS_CHANNEL_ACCESS_NAME', 's'), ('POSITION_INDEX', 's'),
             ('Z_POSITION', 'd'), ('R11', 'd'), ('R12', 'd'), ('R13', 'd'),
             ('R14', 'd'), ('R15', 'd'), ('R16', 'd'), ('R21', 'd'),
             ('R22', 'd'), ('R23', 'd'), ('R24', 'd'), ('R25', 'd'),
             ('R26', 'd'), ('R31', 'd'), ('R32', 'd'), ('R33', 'd'),
             ('R34', 'd'), ('R35', 'd'), ('R36', 'd'), ('R41', 'd'),
             ('R42', 'd'), ('R43', 'd'), ('R44', 'd'), ('R45', 'd'),
             ('R46', 'd'), ('R51', 'd'), ('R52', 'd'), ('R53', 'd'),
             ('R54', 'd'), ('R55', 'd'), ('R56', 'd'), ('R61', 'd'),
             ('R62', 'd'), ('R63', 'd'), ('R64', 'd'), ('R65', 'd'),
             ('R66', 'd')]
rmat_table = NTTable(rmat_cols)
rows = [{key: rmat_data['value'][key][i]
Esempio n. 13
0
    def __init__(self, provider, name, dev):
        self._dev = dev

        pv = SharedPV(initial=NTScalar('f').wrap(0), handler=DefaultPVHandler())
        provider.add(name+':CuPhase', pv)
        self._pv = pv
Esempio n. 14
0
from p4p.nt import NTScalar
from p4p.server import Server
from p4p.server.thread import SharedPV

pv1 = SharedPV(nt=NTScalar('d'), initial=0.0)
pv2 = SharedPV(nt=NTScalar('d'), initial=4.2)
pv3 = SharedPV(nt=NTScalar('d'), initial=24.2346692)


@pv1.put
#@pv2.put
#@pv3.put
def handleput(pv, op):
    print(
        f"You changed my value to: {op.value().raw['value']}, I used to be: {pv.current().raw['value']}"
    )
    pv.post(op.value())
    op.done()


print("Server running....")
Server.forever(providers=[{
    'p4p:pv1': pv1,
    'p4p:pv2': pv2,
    'p4p:pv3': pv3,
}])
Esempio n. 15
0
 def addPV(label,reg):
     pv = SharedPV(initial=NTScalar('I').wrap(0), 
                   handler=RegH(reg))
     provider.add(name+':'+label,pv)
     return pv
Esempio n. 16
0
def MySharedPV(nt, cb=None):
    return SharedPV(initial=Value(Type(pvTypes(nt)), pvValues(nt)),
                    handler=DefaultPVHandler(cb))
Esempio n. 17
0
    def __init__(self, p, m, name=None, ip=None, xpm=None, stats=None, db=None, cuInit=False):
        global provider
        provider = p
        global lock
        lock     = m

        # Assign transmit link ID
        ip_comp = ip.split('.')
        xpm_num = name.rsplit(':',1)[1]
        v = 0xff00000 | ((int(xpm_num)&0xf)<<16) | ((int(ip_comp[2])&0xf)<<12) | ((int(ip_comp[3])&0xff)<< 4)
        xpm.XpmApp.paddr.set(v)
        print('Set PADDR to 0x{:x}'.format(v))

        self._name  = name
        self._ip    = ip
        self._xpm   = xpm
        self._db    = db

        init = None
        try:
            db_url, db_name, db_instrument, db_alias = db.split(',',4)
            print('db {:}'.format(db))
            print('url {:}  name {:}  instr {:}  alias {:}'.format(db_url,db_name,db_instrument,db_alias))
            print('device {:}'.format(name))
            init = get_config_with_params(db_url, db_instrument, db_name, db_alias, name)
            print('cfg {:}'.format(init))
        except:
            print('Caught exception reading configdb [{:}]'.format(db))

        self._links = []
        for i in range(24):
            self._links.append(LinkCtrls(name, xpm, i))

        app = xpm.XpmApp

        self._pv_amcDumpPLL = []
        for i in range(2):
            pv = SharedPV(initial=NTScalar('I').wrap(0), 
                          handler=IdxCmdH(app.amcPLL.Dump,app.amc,i))
            provider.add(name+':DumpPll%d'%i,pv)
            self._pv_amcDumpPLL.append(pv)

        self._cu    = CuGenCtrls(name+':XTPG', xpm, dbinit=init)

        self._group = GroupCtrls(name, app, stats, init=init)

        #  The following section will throw an exception if the CuInput PV is not set properly
        if not cuInit:
            self._seq = PVSeq(provider, name+':SEQENG:0', ip, Engine(0, xpm.SeqEng_0))

            self._pv_dumpSeq = SharedPV(initial=NTScalar('I').wrap(0), 
                                        handler=CmdH(self._seq._eng.dump))
            provider.add(name+':DumpSeq',self._pv_dumpSeq)

        self._pv_usRxReset = SharedPV(initial=NTScalar('I').wrap(0),
                                      handler=CmdH(xpm.UsTiming.C_RxReset))
        provider.add(name+':Us:RxReset',self._pv_usRxReset)

        self._pv_cuRxReset = SharedPV(initial=NTScalar('I').wrap(0),
                                      handler=CmdH(xpm.CuTiming.C_RxReset))
        provider.add(name+':Cu:RxReset',self._pv_cuRxReset)

        self._thread = threading.Thread(target=self.notify)
        self._thread.start()
Esempio n. 18
0
    def main(self):
        cli = Context()

        pvs = {}
        # table of detected "features"
        self.features = pvs[args.output + 'features'] = SharedPV(
            nt=NTTable(columns=[
                ('X', 'd'),
                ('Y', 'd'),
                ('W', 'd'),
                ('H', 'd'),
                ('idx', 'd'),
            ]),
            initial=[])
        # output image (example)
        self.imgOut = pvs[args.output + 'img'] = SharedPV(nt=NTNDArray(),
                                                          initial=np.zeros(
                                                              (0, 0),
                                                              dtype='u1'))
        # display execution time
        self.execTime = pvs[args.output + 'etime'] = SharedPV(
            nt=NTScalar('d', display=True),
            initial={
                'value': 0.0,
                'display.units': 's',
            })
        # background threshold level
        bg = pvs[args.output + 'bg'] = SharedPV(nt=NTScalar('I', display=True),
                                                initial={
                                                    'value': self.bgLvl,
                                                    'display.units': 'px',
                                                })

        @bg.put
        def set_bg(pv, op):
            self.bgLvl = max(1, int(op.value()))
            pv.post(self.bgLvl)
            op.done()

        # image flattening mode
        imode = pvs[args.output + 'imode'] = SharedPV(
            nt=NTEnum(), initial={'choices': [e.name for e in ImageMode]})

        @imode.put
        def set_imode(pv, op):
            self.imode = ImageMode(op.value())
            pv.post(self.imode)
            op.done()

        # separately publish info of largest feature
        self.X = pvs[args.output + 'x'] = SharedPV(nt=NTScalar('d'),
                                                   initial=0.0)
        self.Y = pvs[args.output + 'y'] = SharedPV(nt=NTScalar('d'),
                                                   initial=0.0)
        self.W = pvs[args.output + 'w'] = SharedPV(nt=NTScalar('d'),
                                                   initial=0.0)
        self.H = pvs[args.output + 'h'] = SharedPV(nt=NTScalar('d'),
                                                   initial=0.0)

        print("Output PVs", list(pvs.keys()))

        # subscribe to input image PV and run local server
        with cli.monitor(self.args.input,
                         self.on_image,
                         request='record[pipeline=true,queueSize=2]'), Server(
                             providers=[pvs]):
            # park while work happens in other tasks
            done = threading.Event()
            signal.signal(signal.SIGINT, lambda x, y: done.set())
            done.wait()
Esempio n. 19
0
    def __init__(
        self,
        model_class,
        model_kwargs: dict,
        in_pvdb: Dict[str, dict],
        out_pvdb: Dict[str, dict],
        prefix: str,
    ) -> None:
        """
        Initialize the global process variable list, populate the initial values for \\
        the global input variable state, generate starting output from the main thread \\
        OnlineSurrogateModel model instance, and initialize input and output process \\
        variables.

        Parameters
        ----------
        model_class: class
            Model class to be instantiated

        model_kwargs: dict
            kwargs for initialization

        in_pvdb: dict
            Dictionary that maps the input process variable string to type (str), prec \\
            (precision), value (float), units (str), range (List[float])

        out_pvdb: dict
            Dictionary that maps the output process variable string to type (str), \\
            prec (precision), value (float), units (str), range (List[float])

        prefix: str
            Prefix to use when serving
        """
        # need these to be global to access from threads
        global providers
        global input_pvs
        global model_loader
        providers = {}
        input_pvs = {}

        # initialize loader for model
        model_loader = ModelLoader(model_class, model_kwargs)

        # these aren't currently used; but, probably not a bad idea to have around
        # for introspection
        self.in_pvdb = in_pvdb
        self.out_pvdb = out_pvdb

        # initialize model and state
        for in_pv in in_pvdb:
            input_pvs[in_pv] = in_pvdb[in_pv]["value"]

        # use main thread loaded model to do initial model run
        starting_output = model_loader.model.run(input_pvs)

        # create PVs for model inputs
        for in_pv in in_pvdb:
            pvname = f"{prefix}:{in_pv}"
            pv = SharedPV(
                handler=InputHandler(
                    prefix),  # Use InputHandler class to handle callbacks
                nt=NTScalar("d"),
                initial=in_pvdb[in_pv]["value"],
            )
            providers[pvname] = pv

        # use default handler for the output process variables
        # updates to output pvs are handled from post calls within the input update
        for out_pv, value in starting_output.items():
            pvname = f"{prefix}:{out_pv}"
            if out_pv not in ARRAY_PVS:
                pv = SharedPV(nt=NTScalar(), initial=value)

            elif out_pv in ARRAY_PVS:
                pv = SharedPV(nt=NTNDArray(), initial=value)

            providers[pvname] = pv

        else:
            pass  # throw exception for incorrect data type
Esempio n. 20
0
 def addPV(label, init, reg, archive):
     pv = SharedPV(initial=NTScalar('I').wrap(init), 
                   handler=RegH(reg,archive=archive))
     provider.add(name+':'+label,pv)
     reg.set(init)
     return pv
Esempio n. 21
0
def addPV(name,ctype,init=0):
    pv = SharedPV(initial=NTScalar(ctype).wrap(init), handler=DefaultPVHandler())
    provider.add(name, pv)
    return pv
Esempio n. 22
0
 def addPV(label,cmd,init):
     pv = SharedPV(initial=NTScalar('I').wrap(init), 
                   handler=PVHandler(cmd))
     provider.add(name+':'+label+'%d'%idx,pv)
     cmd(pv,init)  # initialize
     return pv
Esempio n. 23
0
 def addPV(label, init, handler):
     pv = SharedPV(initial=NTScalar('I').wrap(init), handler=handler)
     provider.add(name + ':' + label + '%d' % link, pv)
     return pv
Esempio n. 24
0
    def setup_server(self) -> None:
        """Configure and start server.

        """
        # ignore interrupt in subprocess
        signal.signal(signal.SIGINT, signal.SIG_IGN)

        logger.info("Initializing pvAccess server")
        # initialize global inputs
        for variable in self._input_variables.values():
            pvname = f"{self._prefix}:{variable.name}"

            # prepare scalar variable types
            if variable.variable_type == "scalar":
                nt = NTScalar("d")
                initial = variable.value
            # prepare image variable types
            elif variable.variable_type == "image":
                nd_array = variable.value.view(NTNDArrayData)
                nd_array.attrib = {
                    "x_min": variable.x_min,
                    "y_min": variable.y_min,
                    "x_max": variable.x_max,
                    "y_max": variable.y_max,
                }
                nt = NTNDArray()
                initial = nd_array

            elif variable.variable_type == "array":
                if variable.value_type == "str":
                    nt = NTScalar("as")
                    initial = variable.value

                else:
                    nd_array = variable.value.view(NTNDArrayData)
                    nt = NTNDArray()
                    initial = nd_array

            else:
                raise ValueError("Unsupported variable type provided: %s",
                                 variable.variable_type)

            handler = PVAccessInputHandler(pvname=pvname,
                                           is_constant=variable.is_constant,
                                           server=self)
            pv = SharedPV(handler=handler, nt=nt, initial=initial)
            self._providers[pvname] = pv

        # use default handler for the output process variables
        # updates to output pvs are handled from post calls within the input
        # update
        for variable in self._output_variables.values():
            pvname = f"{self._prefix}:{variable.name}"
            if variable.variable_type == "scalar":
                nt = NTScalar()
                initial = variable.value

            elif variable.variable_type == "image":
                nd_array = variable.value.view(NTNDArrayData)

                # get image limits from model output
                nd_array.attrib = {
                    "x_min": np.float64(variable.x_min),
                    "y_min": np.float64(variable.y_min),
                    "x_max": np.float64(variable.x_max),
                    "y_max": np.float64(variable.y_max),
                }

                nt = NTNDArray()
                initial = nd_array

            elif variable.variable_type == "array":

                if variable.value_type == "string":
                    nt = NTScalar("as")
                    initial = variable.value

                else:
                    nd_array = variable.value.view(NTNDArrayData)
                    nt = NTNDArray()
                    initial = nd_array
            else:
                raise ValueError("Unsupported variable type provided: %s",
                                 variable.variable_type)

            pv = SharedPV(nt=nt, initial=initial)
            self._providers[pvname] = pv

        else:
            pass  # throw exception for incorrect data type

        # initialize pva server
        self.pva_server = P4PServer(providers=[self._providers])

        # update configuration
        for key in self.pva_server.conf():
            self._conf[key] = self.pva_server.conf()[key]

        logger.info("pvAccess server started")