Example #1
0
def test_basic_usage():
    for i in range(5):
        insert_run_start(time=float(i), scan_id=i + 1,
                         owner='nedbrainard', beamline_id='example',
                         uid=str(uuid.uuid4()))
    header_1 = db[-1]

    header_ned = db(owner='nedbrainard')
    header_ned = db.find_headers(owner='nedbrainard')  # deprecated API
    header_null = db(owner='this owner does not exist')
    # smoke test
    db.fetch_events(header_1)
    db.fetch_events(header_ned)
    db.fetch_events(header_null)
    list(get_events(header_1))
    list(get_events(header_null))
    get_table(header_1)
    get_table(header_ned)
    get_table(header_null)

    # get events for multiple headers
    list(get_events(db[-2:]))

    # test time shift issue GH9
    table = get_table(db[105])
    assert table.notnull().all().all()
Example #2
0
def test_configuration():
    rs = insert_run_start(
        time=ttime.time(), scan_id=105, owner="stepper", beamline_id="example", uid=str(uuid.uuid4()), cat="meow"
    )
    step_scan.run(run_start_uid=rs)
    h = db[rs]
    # check that config is not included by default
    ev = next(get_events(h))
    assert set(ev["data"].keys()) == set(["Tsam", "point_det"])
    # find config in descriptor['configuration']
    ev = next(get_events(h, fields=["Tsam", "exposure_time"]))
    assert "exposure_time" in ev["data"]
    assert ev["data"]["exposure_time"] == 5
    assert "exposure_time" in ev["timestamps"]
    assert ev["timestamps"]["exposure_time"] == 0.0
    # find config in start doc
    ev = next(get_events(h, fields=["Tsam", "cat"]))
    assert "cat" in ev["data"]
    assert ev["data"]["cat"] == "meow"
    assert "cat" in ev["timestamps"]
    # find config in stop doc
    ev = next(get_events(h, fields=["Tsam", "exit_status"]))
    assert "exit_status" in ev["data"]
    assert ev["data"]["exit_status"] == "success"
    assert "exit_status" in ev["timestamps"]
Example #3
0
def test_configuration():
    rs = insert_run_start(time=ttime.time(), scan_id=105,
                          owner='stepper', beamline_id='example',
                          uid=str(uuid.uuid4()), cat='meow')
    step_scan.run(run_start_uid=rs)
    h = db[rs]
    # check that config is not included by default
    ev = next(get_events(h))
    assert set(ev['data'].keys()) == set(['Tsam', 'point_det'])
    # find config in descriptor['configuration']
    ev = next(get_events(h, fields=['Tsam', 'exposure_time']))
    assert 'exposure_time' in ev['data']
    assert ev['data']['exposure_time'] == 5
    assert 'exposure_time' in ev['timestamps']
    assert ev['timestamps']['exposure_time'] == 0.
    # find config in start doc
    ev = next(get_events(h, fields=['Tsam', 'cat']))
    assert 'cat' in ev['data']
    assert ev['data']['cat'] == 'meow'
    assert 'cat' in ev['timestamps']
    # find config in stop doc
    ev = next(get_events(h, fields=['Tsam', 'exit_status']))
    assert 'exit_status' in ev['data']
    assert ev['data']['exit_status'] == 'success'
    assert 'exit_status' in ev['timestamps']
Example #4
0
def get_fastccd_timestamps(header, tag='fccd_image'):
    """Return the FastCCD timestamps from the Areadetector Data File

    Return a list of numpy arrays of the timestamps for the images as
    recorded in the datafile.

    Parameters
    ----------
    header : databorker header
        This header defines the run
    tag : string
        This is the tag or name of the fastccd.

    Returns
    -------
        list of arrays of the timestamps

    """
    hover = {tag: AreaDetectorHDF5SWMRTimestampHandler}
    img = [i for i in get_events(header, [tag],
                                 handler_overrides=hover)]

    timestamps = [i['data'][tag] for i in img if tag in i['data'].keys()]

    return timestamps
def test_hdf_io():
    rows, cols = 1, 5
    rs_uid, ev_uids = hdf_data_io(rows, cols)
    h = db[rs_uid]
    for e in get_events(h, fill=True):
        _retrieve_data_helper(e, cols)
        assert e['uid'] in ev_uids
Example #6
0
def textout(scan=-1, header=[], userheader={}, column=[], output=True):
    '''
    scan: can be scan_id (integer) or uid (string). defaul = -1 (last scan run)
    '''
    scanh = db[scan]
    print(scanh.start)
    events = list(get_events(scanh))

    #convert time stamp to localtime
    #timestamp=scanhh.start['time']
    #scantime=time.localtime(timestamp)

    filedir = '/nfs/xf05id1/userdata/2016cycle2/300265_inhouse/'
    filename = 'scan_' + str(scanh.start['scan_id'])
    f = open(filedir + filename, 'w')

    staticheader = '# XDI/1.0 MX/2.0\n' \
              +'# Beamline.name: '+scanh.start.beamline_id+'\n'  \
              +'# Facility.name: NSLS-II\n'  \
              +'# Scan.start.uid: '+scanh.start.uid+'\n'  \
              +'# Scan.start.time: '+str(scanh.start.time)+'\n'  \
              +'# Scan.start.ctime: '+time.ctime(scanh.start.time)+'\n'  \
              +'# Mono.name: Si 111\n'  \

    f.write(staticheader)

    for item in header:
        if item in events[0].data.keys():
            f.write('# ' + item + ': ' + str(events[0]['data'][item]) + '\n')
            if output is True:
                print(item + ' is written')
        else:
            print(item + ' is not in the scan')

    for key in userheader:
        f.write('# ' + key + ': ' + str(userheader[key]) + '\n')
        if output is True:
            print(key + ' is written')

    for idx, item in enumerate(column):
        if item in events[0].data.keys():
            f.write('# Column.' + str(idx + 1) + ': ' + item + '\n')

    f.write('# ')
    for item in column:
        if item in events[0].data.keys():
            f.write(str(item) + '\t')
    f.write('\n')

    for event in events:
        for item in column:
            if item in events[0].data.keys():
                f.write(str(event['data'][item]) + '\t')
        f.write('\n')

    f.close()
Example #7
0
def test_handler_options(image_example_uid):
    h = db[image_example_uid]
    list(get_events(h))
    list(get_table(h))
    list(get_images(h, "img"))
    res = list(get_events(h, fields=["img"], fill=True, handler_registry={"npy": DummyHandler}))
    res = [ev for ev in res if "img" in ev["data"]]
    res[0]["data"]["img"] == "dummy"
    res = list(get_events(h, fields=["img"], fill=True, handler_overrides={"image": DummyHandler}))
    res = [ev for ev in res if "img" in ev["data"]]
    res[0]["data"]["img"] == "dummy"
    res = get_table(h, ["img"], fill=True, handler_registry={"npy": DummyHandler})
    assert res["img"].iloc[0] == "dummy"
    res = get_table(h, ["img"], fill=True, handler_overrides={"img": DummyHandler})
    assert res["img"].iloc[0] == "dummy"
    res = get_images(h, "img", handler_registry={"npy": DummyHandler})
    assert res[0] == "dummy"
    res = get_images(h, "img", handler_override=DummyHandler)
    assert res[0] == "dummy"
Example #8
0
 def dark_sub(self, header):
     """ public method operates on header level """
     img_list = []
     timestamp_list = []
     dark_img, dark_time_stamp = self.pull_dark(header)
     for ev in get_events(header, fill=True):
         sub_img, timestamp, ind, dark_sub = self._dark_sub(ev, dark_img)
         img_list.append(sub_img)
         timestamp_list.append(timestamp)
     return img_list, timestamp_list, dark_img, header.start
Example #9
0
def test_handler_options(image_example_uid):
    h = db[image_example_uid]
    list(get_events(h))
    list(get_table(h))
    list(get_images(h, 'img'))
    res = list(get_events(h, fields=['img'], fill=True,
                          handler_registry={'npy': DummyHandler}))
    res = [ev for ev in res if 'img' in ev['data']]
    res[0]['data']['img'] == 'dummy'
    res = list(get_events(h, fields=['img'], fill=True,
                          handler_overrides={'image': DummyHandler}))
    res = [ev for ev in res if 'img' in ev['data']]
    res[0]['data']['img'] == 'dummy'
    res = get_table(h, ['img'], fill=True,
                    handler_registry={'npy': DummyHandler})
    assert res['img'].iloc[0] == 'dummy'
    res = get_table(h, ['img'], fill=True,
                    handler_overrides={'img': DummyHandler})
    assert res['img'].iloc[0] == 'dummy'
    res = get_images(h, 'img', handler_registry={'npy': DummyHandler})
    assert res[0] == 'dummy'
    res = get_images(h, 'img', handler_override=DummyHandler)
    assert res[0] == 'dummy'
Example #10
0
 def f(name, stop_doc):
     if name != 'stop':
         return
     uid = stop_doc['run_start']
     start = run_start_given_uid(uid)
     descriptors = descriptors_by_start(uid)
     # For convenience, I'll rely on the broker to get Events.
     header = db[uid]
     events = get_events(header)
     callback('start', start)
     for d in descriptors:
         callback('descriptor', d)
     for e in events:
         callback('event', e)
     callback('stop', stop_doc)
Example #11
0
 def f(name, stop_doc):
     if name != 'stop':
         return
     uid = stop_doc['run_start']
     start = run_start_given_uid(uid)
     descriptors = descriptors_by_start(uid)
     # For convenience, I'll rely on the broker to get Events.
     header = db[uid]
     events = get_events(header)
     callback('start', start)
     for d in descriptors:
         callback('descriptor', d)
     for e in events:
         callback('event', e)
     callback('stop', stop_doc)
Example #12
0
def scantime(scanid, printresults=True):
    '''
    input: scanid
    return: start and stop time stamps as strings 
    '''
    start_str = 'scan start: '+time.ctime(db[scanid].start['time'])
    stop_str  = 'scan stop : '+time.ctime(db[scanid].stop['time'])
    totaltime = db[scanid].stop['time'] - db[scanid].start['time']
    scannumpt = len(list(get_events(db[scanid])))
    
    if printresults is True:
        print(start_str)
        print(stop_str)
        print('total time:', totaltime, 's')
        print('number of points:', scannumpt)
        print('scan time per point:', totaltime/scannumpt, 's')
    return db[scanid].start['time'], db[scanid].stop['time'], start_str, stop_str
Example #13
0
def verify_files_accessible(name, doc):
    "This is a brute-force approach. We retrieve all the data."
    ttime.sleep(0.1)  # Wati for data to be saved.
    if name != 'stop':
        return
    print("  Verifying that run was saved to broker...")
    try:
        header = db[doc['run_start']]
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
        return
    else:
        print('\x1b[1A\u2713')
    print("  Verifying that all data is accessible on the disk...")
    try:
        list(get_events(header, fill=True))
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
    else:
        print('\x1b[1A\u2713')
Example #14
0
def verify_files_accessible(name, doc):
    "This is a brute-force approach. We retrieve all the data."
    ttime.sleep(0.1)  # Wati for data to be saved.
    if name != 'stop':
        return
    print("  Verifying that run was saved to broker...")
    try:
        header = db[doc['run_start']]
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
        return
    else:
        print('\x1b[1A\u2713')
    print("  Verifying that all data is accessible on the disk...")
    try:
        list(get_events(header, fill=True))
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
    else:
        print('\x1b[1A\u2713')
Example #15
0
def test_basic_usage():
    for i in range(5):
        insert_run_start(time=float(i), scan_id=i + 1,
                         owner='nedbrainard', beamline_id='example',
                         uid=str(uuid.uuid4()))
    header_1 = db[-1]

    header_ned = db(owner='nedbrainard')
    header_ned = db.find_headers(owner='nedbrainard')  # deprecated API
    header_null = db(owner='this owner does not exist')
    # smoke test
    db.fetch_events(header_1)
    db.fetch_events(header_ned)
    db.fetch_events(header_null)
    get_events(header_1)
    get_events(header_ned)
    get_events(header_null)
    get_table(header_1)
    get_table(header_ned)
    get_table(header_null)

    # get events for multiple headers
    get_events([header_1, header_ned])
Example #16
0
def test_legacy_config_warnings(RE):
    name = databroker.databroker.SPECIAL_NAME
    assert 'test' in name
    path = os.path.join(os.path.expanduser('~'), '.config', 'databroker',
                        name + '.yml')
    ensure_path_exists(os.path.dirname(path))
    with open(path, 'w') as f:
        yaml.dump(EXAMPLE, f)

    imp.reload(databroker.databroker)
    imp.reload(databroker)
    from databroker import db, DataBroker, get_table, get_events

    RE.subscribe(db.insert)
    uid, = RE(count([det]))
    with pytest.warns(UserWarning):
        assert len(get_table(db[uid]))
    with pytest.warns(UserWarning):
        assert list(get_events(db[uid]))

    # Clean up
    os.remove(path)
Example #17
0
def textout(scan=-1, header=[], userheader={}, column=[], usercolumn = {}, usercolumnname = [], output = True, filename_add = ''):
    '''
    scan: can be scan_id (integer) or uid (string). defaul = -1 (last scan run)
          default = -1
    header: a list of items that exist in the event data to be put into the header
    userheader: a dictionary defined by user to put into the hdeader
    column: a list of items that exist in the event data to be put into the column data
    output: print all header fileds. if output = False, only print the ones that were able to be written
            default = True
    
    '''   
    scanh= db[scan]
    print(scanh.start)
    events=list(get_events(scanh))

 
   #convert time stamp to localtime
    #timestamp=scanhh.start['time']
    #scantime=time.localtime(timestamp)   
    
    filedir='/nfs/xf05id1/userdata/2016cycle2/300265_inhouse/'
    
    if filename_add is not '':    
        filename='scan_'+ str(scanh.start['scan_id'])+'_'+filename_add
    else:
        filename='scan_'+ str(scanh.start['scan_id']) 


    f = open(filedir+filename, 'w')

    staticheader = '# XDI/1.0 MX/2.0\n' \
              +'# Beamline.name: '+scanh.start.beamline_id+'\n'  \
              +'# Facility.name: NSLS-II\n'  \
              +'# Facility.ring_current:' + str(events[0]['data']['ring_current'])+'\n' \
              +'# Scan.start.uid: '+scanh.start.uid+'\n'  \
              +'# Scan.start.time: '+str(scanh.start.time)+'\n'  \
              +'# Scan.start.ctime: '+time.ctime(scanh.start.time)+'\n'  \
              +'# Mono.name: Si 111\n'  \
              #+'# bpm.cam.exposure_time: '+str(events[0].descriptor.configuration['bpmAD']['data']['bpmAD_cam_acquire_time'])+'\n'  \
              #+'# Undulator.elevation: '+str(scanh.start.undulator_setup['elevation'])+'\n'  \
              #+'# Undulator.tilt: '+str(scanh.start.undulator_setup['tilt'])+'\n'  \
              #+'# Undulator.taper: '+str(scanh.start.undulator_setup['taper'])+'\n'              

    f.write(staticheader)

    for item in header:
        if item in events[0].data.keys():
            f.write('# '+item+': '+str(events[0]['data'][item])+'\n')
            if output is True:
                print(item+' is written')
        else: 
            print(item+' is not in the scan')
           
    for key in userheader:
        f.write('# '+key+': '+str(userheader[key])+'\n')
        if output is True:
            print(key+' is written')                   

    for idx, item in enumerate(column): 
        if item in events[0].data.keys():        
            f.write('# Column.'+str(idx+1)+': '+item+'\n')


    f.write('# ') 
    for item in column: 
        if item in events[0].data.keys():        
            f.write(str(item)+'\t')

    for item in usercolumnname: 
        f.write(item+'\t')
            
    f.write('\n')
    
    idx = 0
    for event in events:
        for item in column: 
            if item in events[0].data.keys():        
                f.write(str(event['data'][item])+'\t')
        for item in usercolumnname:
            f.write(str(usercolumn[item][idx])+'\t')

        idx = idx + 1
        f.write('\n')
        
    f.close()
def xanes_textout(scan=-1,
                  header=[],
                  userheader={},
                  column=[],
                  usercolumn={},
                  usercolumnname=[],
                  output=True,
                  filename_add='',
                  filedir=None):
    '''
    scan: can be scan_id (integer) or uid (string). default = -1 (last scan run)
    header: a list of items that exist in the event data to be put into the header
    userheader: a dictionary defined by user to put into the hdeader
    column: a list of items that exist in the event data to be put into the column data
    output: print all header fileds. if output = False, only print the ones that were able to be written
            default = True

    '''
    if (filedir is None):
        filedir = userdatadir
    h = db[scan]
    # get events using fill=False so it does not look for the metadata in filestorage with reference (hdf5 here)
    events = list(get_events(h, fill=False, stream_name='primary'))

    if (filename_add is not ''):
        filename = 'scan_' + str(h.start['scan_id']) + '_' + filename_add
    else:
        filename = 'scan_' + str(h.start['scan_id'])

    f = open(filedir + filename, 'w')

    staticheader = '# XDI/1.0 MX/2.0\n' \
              + '# Beamline.name: ' + h.start['beamline_id'] + '\n' \
              + '# Facility.name: NSLS-II\n' \
              + '# Facility.ring_current:' + str(events[0]['data']['ring_current']) + '\n' \
              + '# Scan.start.uid: ' + h.start['uid'] + '\n' \
              + '# Scan.start.time: '+ str(h.start['time']) + '\n' \
              + '# Scan.start.ctime: ' + ttime.ctime(h.start['time']) + '\n' \
              + '# Mono.name: Si 111\n'

    f.write(staticheader)

    for item in header:
        if (item in events[0].data.keys()):
            f.write('# ' + item + ': ' + str(events[0]['data'][item]) + '\n')
            if (output is True):
                print(item + ' is written')
        else:
            print(item + ' is not in the scan')

    for key in userheader:
        f.write('# ' + key + ': ' + str(userheader[key]) + '\n')
        if (output is True):
            print(key + ' is written')

    for idx, item in enumerate(column):
        if (item in events[0].data.keys()):
            f.write('# Column.' + str(idx + 1) + ': ' + item + '\n')

    f.write('# ')
    for item in column:
        if (item in events[0].data.keys()):
            f.write(str(item) + '\t')

    for item in usercolumnname:
        f.write(item + '\t')

    f.write('\n')
    f.flush()

    idx = 0
    for event in events:
        for item in column:
            if (item in events[0].data.keys()):
                f.write('{0:8.6g}  '.format(event['data'][item]))
        for item in usercolumnname:
            try:
                f.write('{0:8.6g}  '.format(usercolumn[item][idx]))
            except KeyError:
                idx += 1
                f.write('{0:8.6g}  '.format(usercolumn[item][idx]))
        idx = idx + 1
        f.write('\n')

    f.close()
Example #19
0
def test_get_events_bad_key():
    hdr = db[-1]
    with pytest.raises(ValueError):
        list(get_events(hdr, fields=['abcd123']))
    # For each sample plot the intra sample temperature curve
    for i in ns:
        print(i)
        save_folder = '../S{}'.format(i)

        # Get the folder where the data is
        folder = '/mnt/bulk-data/research_data/USC_beamtime/APS_March_2016/S' \
                 + str(i) + '/temp_exp'

        # Get the run header assocaited with that folder
        hdr = db(run_folder=folder)[0]

        # Mux the data so that we have the correct Temp->data relationship
        dm = DataMuxer()
        dm.append_events(get_events(hdr))
        df = dm.to_sparse_dataframe()
        print(df.keys())
        binned = dm.bin_on('img', interpolation={'T': 'linear'})

        for plot_type in [
            'gr',
            'chi'
        ]:
            if plot_type is 'gr':
                # Only to the G(r)
                key_list = [f for f in os.listdir(folder) if
                            f.endswith('.gr') and not f.startswith('d')]
                # If we are working with G(r) files use these offset and read parameters
                offset = .1
                skr = 0
Example #21
0
def textout(scan=-1,
            header=[],
            userheader={},
            column=[],
            usercolumn={},
            usercolumnname=[],
            output=True,
            filename_add='',
            filedir=None):
    '''
    scan: can be scan_id (integer) or uid (string). defaul = -1 (last scan run)
          default = -1
    header: a list of items that exist in the event data to be put into the header
    userheader: a dictionary defined by user to put into the hdeader
    column: a list of items that exist in the event data to be put into the column data
    output: print all header fileds. if output = False, only print the ones that were able to be written
            default = True
    
    '''
    if filedir is None:
        filedir = _DEFAULT_FILEDIR
    scanh = db[scan]
    #    print(scanh.start)
    events = list(
        get_events(scanh, fill=False, stream_name='primary')
    )  #fill=False so it does not look for the metadata in filestorage with reference (hdf5 here)

    #convert time stamp to localtime
    #timestamp=scanhh.start['time']
    #scantime=time.localtime(timestamp)

    #filedir=userdatadir

    if filename_add is not '':
        filename = 'scan_' + str(scanh.start['scan_id']) + '_' + filename_add
    else:
        filename = 'scan_' + str(scanh.start['scan_id'])

#    print(filedir)
#    print(filename)

    f = open(filedir + filename, 'w')

    staticheader = '# XDI/1.0 MX/2.0\n' \
              +'# Beamline.name: '+scanh.start.beamline_id+'\n'  \
              +'# Facility.name: NSLS-II\n'  \
              +'# Facility.ring_current:' + str(events[0]['data']['ring_current'])+'\n' \
              +'# Scan.start.uid: '+scanh.start.uid+'\n'  \
              +'# Scan.start.time: '+str(scanh.start.time)+'\n'  \
              +'# Scan.start.ctime: '+time.ctime(scanh.start.time)+'\n'  \
              +'# Mono.name: Si 111\n'  \
              #+'# bpm.cam.exposure_time: '+str(events[0].descriptor.configuration['bpmAD']['data']['bpmAD_cam_acquire_time'])+'\n'  \

    #+'# Undulator.elevation: '+str(scanh.start.undulator_setup['elevation'])+'\n'  \
    #+'# Undulator.tilt: '+str(scanh.start.undulator_setup['tilt'])+'\n'  \
    #+'# Undulator.taper: '+str(scanh.start.undulator_setup['taper'])+'\n'

    f.write(staticheader)

    for item in header:
        if item in events[0].data.keys():
            f.write('# ' + item + ': ' + str(events[0]['data'][item]) + '\n')
            if output is True:
                print(item + ' is written')
        else:
            print(item + ' is not in the scan')

    for key in userheader:
        f.write('# ' + key + ': ' + str(userheader[key]) + '\n')
        if output is True:
            print(key + ' is written')

    for idx, item in enumerate(column):
        if item in events[0].data.keys():
            f.write('# Column.' + str(idx + 1) + ': ' + item + '\n')

    f.write('# ')
    for item in column:
        if item in events[0].data.keys():
            f.write(str(item) + '\t')

    for item in usercolumnname:
        f.write(item + '\t')

    f.write('\n')
    f.flush()

    idx = 0
    for event in events:
        for item in column:
            if item in events[0].data.keys():
                #f.write(str(event['data'][item])+'\t')
                f.write('{0:8.6g}  '.format(event['data'][item]))
        for item in usercolumnname:
            try:
                #f.write(str(usercolumn[item][idx])+'\t')
                f.write('{0:8.6g}  '.format(usercolumn[item][idx]))
            except KeyError:
                idx += 1
                f.write('{0:8.6g}  '.format(usercolumn[item][idx]))
        idx = idx + 1
        f.write('\n')

    f.close()
Example #22
0
def save_tiff(headers,
              dark_sub=True,
              max_count=None,
              dryrun=False,
              handler=xpd_data_proc):
    """ save images obtained from dataBroker as tiff format files.

    Parameters
    ----------
    headers : list
        a list of header objects obtained from a query to dataBroker.

    dark_subtraction : bool, optional
        Default is True, which allows dark/background subtraction to 
        be done before saving each image. If header doesn't contain
        necessary information to perform dark subtraction, uncorrected
        image will be saved.

    max_count : int, optional
        The maximum number of events to process per-run.  This can be
        useful to 'preview' an export or if there are corrupted files
        in the data stream (ex from the IOC crashing during data
        acquisition).

    dryrun : bool, optional
        if set to True, file won't be saved. default is False

    handler : instance of class
        instance of class that handles data process, don't change it
        unless needed.
    """
    # normalize list
    header_list = _prepare_header_list(headers)

    for header in header_list:
        # create root_dir
        root = header.start.get(handler.root_dir_name, None)
        if root is not None:
            root_dir = os.path.join(W_DIR, root)
            os.makedirs(root_dir, exist_ok=True)
        else:
            root_dir = W_DIR
        # dark logic
        dark_img, dark_time = handler.pull_dark(header)
        if not dark_sub:
            dark_img = None  # no sub
        # event
        for event in get_events(header, fill=True):
            img, event_timestamp, ind, dark_sub = handler._dark_sub(
                event, dark_img)
            f_name = handler._file_name(event, event_timestamp, ind)
            if dark_sub:
                f_name = 'sub_' + f_name
            # save tif
            w_name = os.path.join(root_dir, f_name)
            if not dryrun:
                tif.imsave(w_name, img)
                if os.path.isfile(w_name):
                    print('image "%s" has been saved at "%s"' %
                          (f_name, root_dir))
                else:
                    print('Sorry, something went wrong with your tif saving')
                    return
            # dryrun : print
            else:
                print("dryrun: image {} has been saved at {}".format(
                    f_name, root_dir))
            if max_count is not None and ind >= max_count:
                # break the loop if max_count reached, move to next header
                break

        # save run_start
        stem, ext = os.path.splitext(w_name)
        config_name = w_name.replace(ext, '.yaml')
        with open(config_name, 'w') as f:
            #yaml.dump(header.start['sc_calibration_md'], f)
            yaml.dump(header.start, f)  # save all md in start

    print(" *** {} *** ".format('Saving process finished'))
Example #23
0
def integrate(headers,
              polarization_factor=0.99,
              root_dir=None,
              config_dict=None,
              handler=xpd_data_proc):
    """ integrate dark subtracted image for given list of headers

        Parameters
        ----------
        headers : list
            a list of header objects obtained from a query to
            dataBroker.

        polarization_factor : float, int
            polarization correction factor, ranged from -1(vertical) to
            +1 (horizontal). default is 0.99. set to None for no
            correction.

        root_dir : str, optional
            path of chi files that are going to be saved. default is
            xpdUser/userAnalysis/

        config_dict : dict, optional
            dictionary stores integration parameters of pyFAI azimuthal
            integrator. default is the most recent parameters saved in
            xpdUser/conifg_base

        handler : instance of class, optional
            instance of class that handles data process, don't change it
            unless needed.
    """
    # normalize list
    header_list = _prepare_header_list(headers)

    # config_dict
    if config_dict is None:
        config_dict = _load_config()  # default one
    ai.setPyFAI(**config_dict)
    npt = _npt_cal(config_dict)

    # iterate over header
    total_rv_list = []
    for header in header_list:
        root = header.start.get(handler.root_dir_name, None)
        if root is not None:
            root_dir = os.path.join(W_DIR, root)
            os.makedirs(root_dir, exist_ok=True)
        else:
            root_dir = W_DIR
        header_rv_list = []
        # dark logic
        dark_img = handler.pull_dark(header)
        for event in get_events(header, fill=True):
            img, event_timestamp, ind, dark_sub = handler._dark_sub(
                event, dark_img)
            f_name = handler._file_name(event, event_timestamp, ind)
            if dark_sub:
                f_name = 'sub_' + f_name
            stem, ext = os.path.splitext(f_name)
            chi_name = stem + '.chi'
            integration_dict = {
                'filename': os.path.join(root_dir, chi_name),
                'polarization_factor': polarization_factor
            }
            print("INFO: integrating image: {}".format(f_name))
            rv = ai.integrate1d(img, npt, **integration_dict)
            header_rv_list.append(rv)
            print("INFO: save chi file: {}".format(chi_name))
        total_rv_list.append(header_rv_list)
        # each header generate  a list of rv

    print(" *** {} *** ".format('Integration process finished'))
    print("INFO: chi files are saved at {}".format(root_dir))
    return total_rv_list
Example #24
0
hdr = hdrs[0]
print(hdr['start']['run_folder'], hdr['start']['uid'])

# Get calibrations
if not hdr['start']['is_calibration']:
    cals = [db[u]['start']['poni'][0] for u in
            hdr['start']['calibration']]
else:
    cals = [p for p in hdr['start']['poni']]

geos = [retrieve(p) for p in cals]
cal_dists = np.asarray(
    [g.dist for g in geos]) * 100  # convert to meters

events = get_events(hdr)
ev0 = events.next()
detz = ev0['data']['detz']
cal_idx = np.argmin((detz - cal_dists) ** 2)
geo = geos[cal_idx]
img = retrieve(ev0['data']['img'])
# Correct for polarization
img /= geo.polarization(img.shape, .95)

r = geo.rArray(img.shape)
q = geo.qArray(img.shape) / 10  # pyFAI works in nm**-1, we want A**-1
fq = geo.qArray(img.shape).ravel()
fimg = img.ravel()
bins = generate_q_bins(np.max(r) - .5 * geo.pixel1,
                       geo.pixel1, geo.dist, geo.wavelength * 10**10)
x = bin_edges_to_centers(bins)
from metadatastore.api import db_connect as mds_db_connect
fs_db_connect(**{'database': 'data-processing-dev', 'host': 'localhost', 'port': 27017})
mds_db_connect(**{'database': 'data-processing-dev', 'host': 'localhost', 'port': 27017})

# Get headers of interest
hdrs = [db[-1]]
for hdr in hdrs:
    time_dept_bg = True

    # Get calibrations
    geos = [retrieve(p) for p in hdr['start']['poni']]
    cal_dists = np.asarray(
        [g.dist for g in geos]) * 10  # pyFAI reports in meters
    # Get starting masks
    # start_masks = [retrieve(p) for p in hdr['start']['mask']]
    for event in get_events(hdr):
        # Pull relevent data into local vars
        data = event['data']
        img = data['img']
        detz = data['detz']

        # Find the correct calibration file, it's the one with the dist close
        # to the recorded detector dist
        cal_idx = np.argmin((detz - cal_dists) ** 2)
        geo = geos[cal_idx]
        # start_mask = start_masks[cal_idx]
        start_mask = np.zeros(img.shape, dtype=int)
        r = geo.rArray(img.shape)
        q = geo.qArray(img.shape)

        fr = r.ravel()