Пример #1
0
def get_data(scan_id, field='ivu_gap', intensity_field='elm_sum_all', det=None, debug=False):
    """Get data from the scan stored in the table.
from Maksim
    :param scan_id: scan id from bluesky.
    :param field: visualize the intensity vs. this field.
    :param intensity_field: the name of the intensity field.
    :param det: the name of the detector.
    :param debug: a debug flag.
    :return: a tuple of X, Y and timestamp values.
    """
    scan, t = get_scan(scan_id)
    if det:
        imgs = get_images(scan, det)
        im = imgs[-1]
        if debug:
            print(im)

    table = get_table(scan)
    fields = get_fields(scan)

    if debug:
        print(table)
        print(fields)
    x = table[field]
    y = table[intensity_field]

    return x, y, t
Пример #2
0
def _get_images(header, tag):
    t = ttime.time()
    images = get_images(header, tag)
    t = ttime.time() - t
    logger.info("Took {:.3}s to read data using get_images".format(t))

    return np.array([np.asarray(im, dtype=np.uint16) for im in images])
Пример #3
0
def _get_images(header, tag):
    t = ttime.time()
    images = get_images(header, tag)
    t = ttime.time() - t
    logger.info("Took {:.3}s to read data using get_images".format(t))

    for im in images:
        yield np.asarray(im, dtype=np.uint16)
Пример #4
0
def test_handler_options(image_example_uid):
    h = db[image_example_uid]
    list(get_events(h))
    list(get_table(h))
    list(get_images(h, "img"))
    res = list(get_events(h, fields=["img"], fill=True, handler_registry={"npy": DummyHandler}))
    res = [ev for ev in res if "img" in ev["data"]]
    res[0]["data"]["img"] == "dummy"
    res = list(get_events(h, fields=["img"], fill=True, handler_overrides={"image": DummyHandler}))
    res = [ev for ev in res if "img" in ev["data"]]
    res[0]["data"]["img"] == "dummy"
    res = get_table(h, ["img"], fill=True, handler_registry={"npy": DummyHandler})
    assert res["img"].iloc[0] == "dummy"
    res = get_table(h, ["img"], fill=True, handler_overrides={"img": DummyHandler})
    assert res["img"].iloc[0] == "dummy"
    res = get_images(h, "img", handler_registry={"npy": DummyHandler})
    assert res[0] == "dummy"
    res = get_images(h, "img", handler_override=DummyHandler)
    assert res[0] == "dummy"
Пример #5
0
def _get_images(header, tag, roi=None, handler_override=None):
    t = ttime.time()
    images = get_images(header, tag, handler_override=handler_override)
    t = ttime.time() - t
    logger.info("Took %.3f seconds to read data using get_images", t)

    if roi is not None:
        images = _crop_images(images, roi)

    return images
Пример #6
0
 def pull_dark(self, header):
     dark_uid = header.start.get(an_glbl.dark_field_key, None)
     if dark_uid is None:
         print("INFO: no dark frame is associated in this header, "
               "subrraction will not be processed")
         return None
     else:
         dark_search = {'group': 'XPD', 'uid': dark_uid}
         dark_header = db(**dark_search)
         dark_img = np.asarray(get_images(dark_header,
                                          self.image_field)).squeeze()
     return dark_img, dark_header[0].start.time
Пример #7
0
def test_handler_options(image_example_uid):
    h = db[image_example_uid]
    list(get_events(h))
    list(get_table(h))
    list(get_images(h, 'img'))
    res = list(get_events(h, fields=['img'], fill=True,
                          handler_registry={'npy': DummyHandler}))
    res = [ev for ev in res if 'img' in ev['data']]
    res[0]['data']['img'] == 'dummy'
    res = list(get_events(h, fields=['img'], fill=True,
                          handler_overrides={'image': DummyHandler}))
    res = [ev for ev in res if 'img' in ev['data']]
    res[0]['data']['img'] == 'dummy'
    res = get_table(h, ['img'], fill=True,
                    handler_registry={'npy': DummyHandler})
    assert res['img'].iloc[0] == 'dummy'
    res = get_table(h, ['img'], fill=True,
                    handler_overrides={'img': DummyHandler})
    assert res['img'].iloc[0] == 'dummy'
    res = get_images(h, 'img', handler_registry={'npy': DummyHandler})
    assert res[0] == 'dummy'
    res = get_images(h, 'img', handler_override=DummyHandler)
    assert res[0] == 'dummy'
Пример #8
0
 def f(h, dryrun=False):
     imgs = get_images(h, field)
     # Fill in h, defer filling in N.
     _template = template.format(h=h, N='{N}')
     filenames = [_template.format(N=i) for i in range(len(imgs))]
     # First check that none of the filenames exist.
     for filename in filenames:
         if os.path.isfile(filename):
             raise FileExistsError("There is already a file at {}. Delete "
                                   "it and try again.".format(filename))
     if not dryrun:
         # Write files.
         for filename, img in zip(filenames, imgs):
             tifffile.imsave(filename, np.asarray(img))
     return filenames
Пример #9
0
 def f(h, dryrun=False):
     imgs = get_images(h, field)
     # Fill in h, defer filling in N.
     _template = template.format(h=h, N='{N}')
     filenames = [_template.format(N=i) for i in range(len(imgs))]
     # First check that none of the filenames exist.
     for filename in filenames:
         if os.path.isfile(filename):
             raise FileExistsError("There is already a file at {}. Delete "
                                   "it and try again.".format(filename))
     if not dryrun:
         # Write files.
         for filename, img in zip(filenames, imgs):
             tifffile.imsave(filename, np.asarray(img))
     return filenames
Пример #10
0
def run_calibration(exposure=60, calibrant_file=None, wavelength=None,
                    detector=None, gaussian=None):
    """ function to run entire calibration process.

    Entire process includes: collect calibration image, trigger pyFAI 
    calibration process, store calibration parameters as a yaml file 
    under xpdUser/config_base/ and inject uid of calibration image to
    following scans, until this function is run again.

    Parameters
    ----------
    exposure : int, optional
        total exposure time in sec. Default is 60s
    calibrant_name : str, optional
        name of calibrant used, different calibrants correspond to 
        different d-spacing profiles. Default is 'Ni'. User can assign 
        different calibrant, given d-spacing file path presents
    wavelength : flot, optional
        current of x-ray wavelength, in angstrom. Default value is 
        read out from existing xpdacq.Beamtime object
    detector : pyfai.detector.Detector, optional.
        instance of detector which defines pxiel size in x- and
        y-direction. Default is set to Perkin Elmer detector
    gaussian : int, optional
        gaussian width between rings, Default is 100.
    """
    # default params
    interactive = True
    dist = 0.1

    _check_obj(_REQUIRED_OBJ_LIST)
    ips = get_ipython()
    bto = ips.ns_table['user_global']['bt']
    prun = ips.ns_table['user_global']['prun']
    # print('*** current beamtime info = {} ***'.format(bto.md))
    calibrant = Calibrant()
    # d-spacing
    if calibrant_file is not None:
        calibrant.load_file(calibrant_file)
        calibrant_name = os.path.split(calibrant_file)[1]
        calibrant_name = os.path.splitext(calibrant_name)[0]
    else:
        calibrant.load_file(os.path.join(glbl.usrAnalysis_dir, 'Ni24.D'))
        calibrant_name = 'Ni'
    # wavelength
    if wavelength is None:
        _wavelength = bto['bt_wavelength']
    else:
        _wavelength = wavelength
    calibrant.wavelength = _wavelength * 10 ** (-10)
    # detector
    if detector is None:
        detector = Perkin()
    # scan
    # simplified version of Sample object
    calib_collection_uid = str(uuid.uuid4())
    calibration_dict = {'sample_name':calibrant_name,
                        'sample_composition':{calibrant_name :1},
                        'is_calibration': True,
                        'calibration_collection_uid': calib_collection_uid}
    prun_uid = prun(calibration_dict, ScanPlan(bto, ct, exposure))
    light_header = glbl.db[prun_uid[-1]]  # last one is always light
    dark_uid = light_header.start['sc_dk_field_uid']
    dark_header = glbl.db[dark_uid]
    # unknown signature of get_images
    dark_img = np.asarray(
        get_images(dark_header, glbl.det_image_field)).squeeze()
    # dark_img = np.asarray(glbl.get_images(dark_header, glbl.det_image_field)).squeeze()
    for ev in glbl.get_events(light_header, fill=True):
        img = ev['data'][glbl.det_image_field]
        img -= dark_img
    # calibration
    timestr = _timestampstr(time.time())
    basename = '_'.join(['pyFAI_calib', calibrant_name, timestr])
    w_name = os.path.join(glbl.config_base, basename)  # poni name
    c = Calibration(wavelength=calibrant.wavelength,
                    detector=detector,
                    calibrant=calibrant,
                    gaussianWidth=gaussian)
    c.gui = interactive
    c.basename = w_name
    c.pointfile = w_name + ".npt"
    c.ai = AzimuthalIntegrator(dist=dist, detector=detector,
                               wavelength=calibrant.wavelength)
    c.peakPicker = PeakPicker(img, reconst=True, mask=detector.mask,
                              pointfile=c.pointfile, calibrant=calibrant,
                              wavelength=calibrant.wavelength)
    # method=method)
    if gaussian is not None:
        c.peakPicker.massif.setValleySize(gaussian)
    else:
        c.peakPicker.massif.initValleySize()

    if interactive:
        c.peakPicker.gui(log=True, maximize=True, pick=True)
        update_fig(c.peakPicker.fig)
    c.gui_peakPicker()
    c.ai.setPyFAI(**c.geoRef.getPyFAI())
    c.ai.wavelength = c.geoRef.wavelength
    # update until next time
    glbl.calib_config_dict = c.ai.getPyFAI()
    Fit2D_dict = c.ai.getFit2D()
    glbl.calib_config_dict.update(Fit2D_dict)
    glbl.calib_config_dict.update({'file_name':basename})
    glbl.calib_config_dict.update({'time':timestr})
    # FIXME: need a solution for selecting desired calibration image
    # based on calibration_collection_uid later
    glbl.calib_config_dict.update({'calibration_collection_uid':
                                   calib_collection_uid})
    # write yaml
    yaml_name = glbl.calib_config_name
    with open(os.path.join(glbl.config_base, yaml_name), 'w') as f:
        yaml.dump(glbl.calib_config_dict, f)

    return c.ai