Esempio n. 1
0
def isAlive(ds):
    # Attempt to get the detector
    try:
        online.getSomeDetector(ds,
                               spec0='SQS_DPU_LIC/CAM/YAG_UPSTR:daqOutput',
                               spec1='data.image.pixels')
        #online.getSomeDetector(ds, spec0='SQS_DPU_LIC/CAM/YAG_UPSTR:output', spec1='data.image.data')
    except Exception as exc:
        print(str(exc))
        print('MCP DATA SOURCE CHANGED!!!')
        print('MCP DATA SOURCE CHANGED!!!')
        print('MCP DATA SOURCE CHANGED!!!')
        return False

    # Attempt to get the TOF
    try:
        online.getSomeDetector(ds,
                               spec0='SQS_DIGITIZER_UTC1/ADC/1:network',
                               spec1='digitizers.channel_1_A.raw.samples')
    except Exception as exc:
        print('TOF DATA SOURCE CHANGED!!!')
        print('TOF DATA SOURCE CHANGED!!!')
        print('TOF DATA SOURCE CHANGED!!!')
        return False

    # If both succeed return true
    return True
def makeDatastreamPipeline(source):
    ds = online.servedata(source) #get the datastream
    ds = online.getTof(ds) #get the tofs
    ds = processTofs(ds) #treat the tofs
    ds = online.getSomeDetector(ds, name='tid', spec0='SQS_DIGITIZER_UTC1/ADC/1:network', spec1='digitizers.trainId') #get current trainids from digitizer property
    #ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property
    if pnCCD_in_stream:
        ds = online.getSomePnCCD(ds, name='pnCCD', spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output', spec1='data.image') #get pnCCD
        #ds = online.getSomeDetector(ds, name='tid', spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property

    ds = online.getSomeDetector(ds, name='gmd', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.intensitySa3TD') #get GMD
    return ds
Esempio n. 3
0
def pipeline_XGM(d):
    d = online.getSomeDetector(d,
                               name='gmd',
                               spec0='SA3_XTD10_XGM/XGM/DOOCS:output',
                               spec1='data.intensitySa3TD')
    d['gmd_0'] = data['gmd'][0]
    return d
Esempio n. 4
0
def makeDatastreamPipeline(source):
    '''
    build the datastream pipeline
    '''
    # get the data stream
    ds = online.servedata(source)

    # get the pnccds from datastream
    if pnCCD_in_stream:
        ds = online.getSomePnCCD(
            ds,
            name='pnCCD',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='data.image')  #get pnCCD
        ds = online.getSomeDetector(
            ds,
            name='tid',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='timestamp.tid',
            readFromMeta=True)  #get current trainids from gmd property
        ds = processPnCCDs(ds)  # convert pnccd adu to photon count
    # get the tofs from datastream directly cut the tof
    if tof_in_stream:
        if not set_tof_trace_to_zero:
            ds = online.getTof(
                ds, idx_range=[start_tof, end_tof], baselineTo=1000
            )  #get the tofs, uses element specs from experiment defaults
        ds = processTofs(ds)  #treat the tofs
        ds = online.getSomeDetector(
            ds,
            name='tid',
            spec0='SQS_DIGITIZER_UTC1/ADC/1:network',
            spec1='digitizers.trainId'
        )  #get current trainids from digitizer property
    # get gmd data from datastream
    if gmd_in_stream:
        ds = online.getSomeDetector(
            ds,
            name='gmd',
            spec0='SA3_XTD10_XGM/XGM/DOOCS:output',
            spec1='data.intensitySa3TD'
        )  #get GMD pulse energies returns an array with a list of pulse energie for each bunch
        #~ ds = online.getSomeDetector(ds, name='gmd_x', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.xSa3TD') #get GMD pointing x
        #~ ds = online.getSomeDetector(ds, name='gmd_y', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.ySa3TD') #get GMD pointing y
        #~ ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True)
    return ds
Esempio n. 5
0
def makeDatastreamPipeline(source):
    ds = online.servedata(source)  #get the datastream
    if pnCCD_in_stream:
        ds = online.getSomePnCCD(
            ds,
            name='pnCCD',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='data.image')  #get pnCCD
        ds = online.getSomeDetector(
            ds,
            name='tid',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='timestamp.tid',
            readFromMeta=True)  #get current trainids from gmd property
        ds = processPnCCDs(ds)  # convert pnccd adu to photon count
    if gmd_in_stream:
        ds = online.getSomeDetector(ds,
                                    name='gmd',
                                    spec0='SA3_XTD10_XGM/XGM/DOOCS:output',
                                    spec1='data.intensitySa3TD')  #get GMD
        #~ ds = online.getSomeDetector(ds, name='gmd_x', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.xSa3TD') #get GMD
        #~ ds = online.getSomeDetector(ds, name='gmd_y', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.ySa3TD') #get GMD
        #~ ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True)
    return ds
Esempio n. 6
0
def pipeline_TOF(d, tof_prop):
    # Load
    d['tof_x'] = tof_prop['tof_x']
    d = online.getTof(d,
                      idx_range=[tof_prop['start_tof'], tof_prop['end_tof']],
                      baselineTo=tof_prop['baseline_to'])
    d = online.getSomeDetector(d,
                               name='tid',
                               spec0=defaultConf['tofDevice'],
                               spec1='digitizers.trainId'
                               )  #get current trainids from digitizer property
    # Process
    d['tof_integral'] = abs(np.sum(data['tof']))
    d['tof_height'] = abs(np.min(data['tof']))

    return d
Esempio n. 7
0
def makeDatastreamPipeline(source):
    ds = online.servedata(source)  #get the datastream
    ds = online.getTof(ds, idx_range=[start_tof, end_tof])  #get the tofs
    ds = processTofs(ds)  #treat the tofs
    ds = online.getSomeDetector(
        ds,
        name='tid',
        spec0=defaultConf['tofDevice'],
        spec1='digitizers.trainId'
    )  #get current trainids from digitizer property
    #ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property
    #if pnCCD_in_stream:
    #ds = online.getSomePnCCD(ds, name='pnCCD', spec0='SQS_NQS_PNCCD1MP/CAL/CORR_CM:output', spec1='data.image') #get pnCCD
    #ds = online.getSomeDetector(ds, name='tid', spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property

    #ds = online.getSomeDetector(ds, name='gmd', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.intensitySa3TD') #get GMD
    return ds
Esempio n. 8
0
def makeSomeData():
    source = 'tcp://10.253.0.142:6666'
    source = 'tcp://127.0.0.1:8010'
    ds = online.servedata(source)  #get the datastream
    ds = online.getTof(ds)  #get the tofs
    ds = foldTofs(ds)
    print("get some detector")
    ds = online.getSomeDetector(ds,
                                name='tid',
                                spec0='SQS_DIGITIZER_UTC1/ADC/1:network',
                                spec1='digitizers.trainId')
    print("done")
    # initialize variables for performance monitoring
    t_start = time.time()
    for_loop_step_dur = 0
    n = -1
    freq_avg = 0
    dt_avg = 0
    trainId = 0
    trainId_old = -1
    skip_count = 0
    # start with for loop
    print("Start With For Loop")
    for data in ds:
        # performance monitor - frequency of displaying data + loop duration
        n += 1
        dt = (time.time() - t_start)
        t_start = time.time()
        freq = 1 / dt
        if n > 0:
            dt_avg = (dt_avg * (n - 1) + dt) / n
            freq_avg = 1 / dt_avg
            loop_classification_percent = for_loop_step_dur / 0.1 * 100
            if loop_classification_percent < 100:
                loop_classification_msg = "OK"
            else:
                loop_classification_msg = "TOO LONG!!!"
            print("Frequency: " + str(round(freq_avg, 2)) +
                  " Hz  |  skipped: " + str(skip_count) + " ( " +
                  str(round(skip_count / n * 100, 1)) + " %)  |  n: " +
                  str(n) + "/" + str(trainId) + "  |  Loop benchmark: " +
                  str(round(loop_classification_percent, 1)) +
                  " % (OK if <100%) - " + loop_classification_msg)

        t_1 = time.time()
        # extract TOF data
        #tof = data['tof']
        t_2 = time.time()
        #tof = tof[200000:200000+N_datapts]
        # print(tof.shape)    # just in case debugging because of shape errors
        #N_samples = len(tof)
        #print(data['tof'].shape)
        x = np.arange(N_datapts)
        y = np.squeeze(data['tof'])
        t_3 = time.time()

        # Train ID Data
        trainId_old = trainId
        trainId = str(data['tid'])
        if int(trainId) - int(trainId_old) is not 1:
            #print('SKIP')
            skip_count += 1

        # update from callback
        doc.add_next_tick_callback(partial(update, x=x, y=y, tid=trainId))

        # monitor for loop step duration
        for_loop_step_dur = time.time() - t_start
Esempio n. 9
0
def makeDatastreamPipeline(source):
    ds = online.servedata(source) #get the datastream
    ds = online.getTof(ds) #get the tofs
    ds = processTofs(ds) #treat the tofs
    ds = online.getSomeDetector(ds, name='tid', spec0='SQS_DIGITIZER_UTC1/ADC/1:network', spec1='digitizers.trainId') #get current trainids from digitizer property
    return ds
Esempio n. 10
0

def integral(d, int_range=[400000, 450000]):
    int_val = np.sum(d[int_range[0]:int_range[1]])


## initialize plot windows
### tofplot
_tofplot = pg.plot(title='ToF Simple Live {}'.format(tools.__version__))

## initialize some global lists
_int_value_list = list()
_scan_paramter_list = list()

## setup the pipeline
ds = online.servedata(source)  #get the datastream
ds = online.getTof(ds, idx_range=[0, 1200000])  #get the tofs
ds = online.getSomeDetector(
    ds,
    name='parker',
    spec0='SQS_NQS_CRSC/TSYS/PARKER_TRIGGER',
    spec1='actualDelay.value')  #get a random piece of data

## start pulling the data
for data in ds:  #this could be made into a pipeline maybe
    tof = data['tof']
    parker = data['parker']
    print(parker)
    plotTOF(tof)
    #_int_value_list.append(integral(tof, int_range=[400000,450000]))