def main(source, tofbg=None):
    '''
    Iterate over the datastream served by source
    Input:
        source: ip address as string
    Output:
        none, updates plots
    '''
    if tofbg is not None:
        print('tofbg "{}" loaded...'.format(tofbg))
        tofbg = np.load(tofbg)['arr_0'][262000:290000]
        print('tofbg average is {}'.format(np.mean(tofbg)))

#    for i, tof in enumerate(xfel.getTof(xfel.servedata(source),idx_range=[530000,560000])):
#    for i, tof in enumerate(xfel.getTof(xfel.servedata(source),idx_range=[142500,1200000])):
    for i, tof in enumerate(xfel.getTof(xfel.servedata(source),idx_range=[100000,500000])):
        if tofbg is not None:
            tof['tof'] = tof['tof'] - tofbg
        plottofavg(tof['tof'])
        plotintegral(tof['tof'])
        if i%100 == 0:
            print('running gc...')
            print(tof['tof'].shape)
            import gc
            gc.collect()
Esempio n. 2
0
def main(source):
    '''
    Iterate over the datastream served by source
    Input:
        source: ip address as string
    Output:
        none, updates plots
    '''
    for i, ds in enumerate((online.servedata(source))):
        plotdaqalive(isAlive(ds))
Esempio n. 3
0
def buildDataStreamPipeline():
    ds = online.servedata(source)
    if _inStream_TOF:
        ds = pipeline_TOF(ds, tof_prop)
    if _inStream_pnCCD:
        ds = pipeline_pnCCD(ds, pnCCD_prop)
    if _inStream_XGM:
        ds = pipeline_XGM(ds)

    return ds
def makeDatastreamPipeline(source):
    ds = online.servedata(source) #get the datastream
    ds = online.getTof(ds) #get the tofs
    ds = processTofs(ds) #treat the tofs
    ds = online.getSomeDetector(ds, name='tid', spec0='SQS_DIGITIZER_UTC1/ADC/1:network', spec1='digitizers.trainId') #get current trainids from digitizer property
    #ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property
    if pnCCD_in_stream:
        ds = online.getSomePnCCD(ds, name='pnCCD', spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output', spec1='data.image') #get pnCCD
        #ds = online.getSomeDetector(ds, name='tid', spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property

    ds = online.getSomeDetector(ds, name='gmd', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.intensitySa3TD') #get GMD
    return ds
def main(source):
    '''
    Iterate over the datastream served by source
    Input:
        source: ip address as string
    Output:
        none, updates plots
    '''
    ds = xfel.servedata(source)
    #ds = xfel.getImage(ds)
    ds = xfel.getImageandTof(ds)
    for data in ds:
        #plotimage(image)
        plotbrightest(data['image'], tid=data['tid'], tof=data['tof'])
Esempio n. 6
0
def main(source):
    '''
    Iterate over the datastream served by source
    Input:
        source: ip address as string
    Output:
        none, updates plots
    '''
    ds = online.servedata(source) #get the datastream
    ds = online.getTof(ds, idx_range=tof_range) #get the tofs / give the index range!

    _tofplot = pg.plot(title='ToF Simple Live {}'.format(tools.__version__))

    for data in ds:
            _tofplot.plot(data['tof'].flatten(), clear=True)
            pg.QtGui.QApplication.processEvents()
Esempio n. 7
0
def makeDatastreamPipeline(source):
    '''
    build the datastream pipeline
    '''
    # get the data stream
    ds = online.servedata(source)

    # get the pnccds from datastream
    if pnCCD_in_stream:
        ds = online.getSomePnCCD(
            ds,
            name='pnCCD',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='data.image')  #get pnCCD
        ds = online.getSomeDetector(
            ds,
            name='tid',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='timestamp.tid',
            readFromMeta=True)  #get current trainids from gmd property
        ds = processPnCCDs(ds)  # convert pnccd adu to photon count
    # get the tofs from datastream directly cut the tof
    if tof_in_stream:
        if not set_tof_trace_to_zero:
            ds = online.getTof(
                ds, idx_range=[start_tof, end_tof], baselineTo=1000
            )  #get the tofs, uses element specs from experiment defaults
        ds = processTofs(ds)  #treat the tofs
        ds = online.getSomeDetector(
            ds,
            name='tid',
            spec0='SQS_DIGITIZER_UTC1/ADC/1:network',
            spec1='digitizers.trainId'
        )  #get current trainids from digitizer property
    # get gmd data from datastream
    if gmd_in_stream:
        ds = online.getSomeDetector(
            ds,
            name='gmd',
            spec0='SA3_XTD10_XGM/XGM/DOOCS:output',
            spec1='data.intensitySa3TD'
        )  #get GMD pulse energies returns an array with a list of pulse energie for each bunch
        #~ ds = online.getSomeDetector(ds, name='gmd_x', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.xSa3TD') #get GMD pointing x
        #~ ds = online.getSomeDetector(ds, name='gmd_y', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.ySa3TD') #get GMD pointing y
        #~ ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True)
    return ds
Esempio n. 8
0
def makeDatastreamPipeline(source):
    ds = online.servedata(source)  #get the datastream
    ds = online.getTof(ds, idx_range=[start_tof, end_tof])  #get the tofs
    ds = processTofs(ds)  #treat the tofs
    ds = online.getSomeDetector(
        ds,
        name='tid',
        spec0=defaultConf['tofDevice'],
        spec1='digitizers.trainId'
    )  #get current trainids from digitizer property
    #ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property
    #if pnCCD_in_stream:
    #ds = online.getSomePnCCD(ds, name='pnCCD', spec0='SQS_NQS_PNCCD1MP/CAL/CORR_CM:output', spec1='data.image') #get pnCCD
    #ds = online.getSomeDetector(ds, name='tid', spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output', spec1='timestamp.tid', readFromMeta=True) #get current trainids from gmd property

    #ds = online.getSomeDetector(ds, name='gmd', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.intensitySa3TD') #get GMD
    return ds
def main(source):
    '''
    Iterate over the datastream served by source
    Input:
        source: ip address as string
    Output:
        none, updates plots
    '''
    #ds is the datastream - everything is done in pipeline form

    ds = online.servedata(source) #get the datastream
    ds = online.getTof(ds, idx_range=tof_range) #get the tofs 
    
    #~ Option to let multi bunch tof trace appear like a single bunch tof trace but actually containing a sum of each bunches trace
    #~ ds = foldTofs(ds) #fold tofs from shots in the pulsetrain
    
    #~ Example on how to add some other detector to the datastream
    #~ ds = online.getSomeDetector(ds, name='phoFlux', spec0='SA3_XTD10_XGM/XGM/DOOCS', spec1='pulseEnergy.photonFlux.value')
    
    #~ Run the live analysis
    for data in ds:
        plotHits(data)    
Esempio n. 10
0
def makeDatastreamPipeline(source):
    ds = online.servedata(source)  #get the datastream
    if pnCCD_in_stream:
        ds = online.getSomePnCCD(
            ds,
            name='pnCCD',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='data.image')  #get pnCCD
        ds = online.getSomeDetector(
            ds,
            name='tid',
            spec0='SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output',
            spec1='timestamp.tid',
            readFromMeta=True)  #get current trainids from gmd property
        ds = processPnCCDs(ds)  # convert pnccd adu to photon count
    if gmd_in_stream:
        ds = online.getSomeDetector(ds,
                                    name='gmd',
                                    spec0='SA3_XTD10_XGM/XGM/DOOCS:output',
                                    spec1='data.intensitySa3TD')  #get GMD
        #~ ds = online.getSomeDetector(ds, name='gmd_x', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.xSa3TD') #get GMD
        #~ ds = online.getSomeDetector(ds, name='gmd_y', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='data.ySa3TD') #get GMD
        #~ ds = online.getSomeDetector(ds, name='tid', spec0='SA3_XTD10_XGM/XGM/DOOCS:output', spec1='timestamp.tid', readFromMeta=True)
    return ds
Esempio n. 11
0
def makeSomeData():
    source = 'tcp://10.253.0.142:6666'
    source = 'tcp://127.0.0.1:8010'
    ds = online.servedata(source)  #get the datastream
    ds = online.getTof(ds)  #get the tofs
    ds = foldTofs(ds)
    print("get some detector")
    ds = online.getSomeDetector(ds,
                                name='tid',
                                spec0='SQS_DIGITIZER_UTC1/ADC/1:network',
                                spec1='digitizers.trainId')
    print("done")
    # initialize variables for performance monitoring
    t_start = time.time()
    for_loop_step_dur = 0
    n = -1
    freq_avg = 0
    dt_avg = 0
    trainId = 0
    trainId_old = -1
    skip_count = 0
    # start with for loop
    print("Start With For Loop")
    for data in ds:
        # performance monitor - frequency of displaying data + loop duration
        n += 1
        dt = (time.time() - t_start)
        t_start = time.time()
        freq = 1 / dt
        if n > 0:
            dt_avg = (dt_avg * (n - 1) + dt) / n
            freq_avg = 1 / dt_avg
            loop_classification_percent = for_loop_step_dur / 0.1 * 100
            if loop_classification_percent < 100:
                loop_classification_msg = "OK"
            else:
                loop_classification_msg = "TOO LONG!!!"
            print("Frequency: " + str(round(freq_avg, 2)) +
                  " Hz  |  skipped: " + str(skip_count) + " ( " +
                  str(round(skip_count / n * 100, 1)) + " %)  |  n: " +
                  str(n) + "/" + str(trainId) + "  |  Loop benchmark: " +
                  str(round(loop_classification_percent, 1)) +
                  " % (OK if <100%) - " + loop_classification_msg)

        t_1 = time.time()
        # extract TOF data
        #tof = data['tof']
        t_2 = time.time()
        #tof = tof[200000:200000+N_datapts]
        # print(tof.shape)    # just in case debugging because of shape errors
        #N_samples = len(tof)
        #print(data['tof'].shape)
        x = np.arange(N_datapts)
        y = np.squeeze(data['tof'])
        t_3 = time.time()

        # Train ID Data
        trainId_old = trainId
        trainId = str(data['tid'])
        if int(trainId) - int(trainId_old) is not 1:
            #print('SKIP')
            skip_count += 1

        # update from callback
        doc.add_next_tick_callback(partial(update, x=x, y=y, tid=trainId))

        # monitor for loop step duration
        for_loop_step_dur = time.time() - t_start
Esempio n. 12
0
def makeDatastreamPipeline(source):
    ds = online.servedata(source) #get the datastream
    ds = online.getTof(ds) #get the tofs
    ds = processTofs(ds) #treat the tofs
    ds = online.getSomeDetector(ds, name='tid', spec0='SQS_DIGITIZER_UTC1/ADC/1:network', spec1='digitizers.trainId') #get current trainids from digitizer property
    return ds
Esempio n. 13
0

def integral(d, int_range=[400000, 450000]):
    int_val = np.sum(d[int_range[0]:int_range[1]])


## initialize plot windows
### tofplot
_tofplot = pg.plot(title='ToF Simple Live {}'.format(tools.__version__))

## initialize some global lists
_int_value_list = list()
_scan_paramter_list = list()

## setup the pipeline
ds = online.servedata(source)  #get the datastream
ds = online.getTof(ds, idx_range=[0, 1200000])  #get the tofs
ds = online.getSomeDetector(
    ds,
    name='parker',
    spec0='SQS_NQS_CRSC/TSYS/PARKER_TRIGGER',
    spec1='actualDelay.value')  #get a random piece of data

## start pulling the data
for data in ds:  #this could be made into a pipeline maybe
    tof = data['tof']
    parker = data['parker']
    print(parker)
    plotTOF(tof)
    #_int_value_list.append(integral(tof, int_range=[400000,450000]))