def test_hdf5_mapped_ingestor(sample_file, tmp_path):
    ingestor = MappedHD5Ingestor(Mapping(**mapping_dict),
                                 sample_file,
                                 "test_root",
                                 thumbs_root=tmp_path)
    run_cache = SingleRunCache()
    descriptors = []
    result_events = []
    result_datums = []
    start_found = False
    stop_found = False
    run_uid = ""
    for name, doc in ingestor.generate_docstream():
        run_cache.callback(name, doc)
        if name == "start":
            assert doc[
                ":measurement:sample:name"] == "my sample", "metadata in start doc"
            assert doc["projections"][0][
                'name'] == "foo_bar", "projection is in start doc"
            start_found = True
            run_uid = doc['uid']
            continue
        if name == "descriptor":
            descriptors.append(doc)
            continue
        if name == "resource":
            doc["spec"] == mapping_dict["resource_spec"]
            continue
        if name == "datum":
            result_datums.append(doc)
            continue
        if name == "resource":
            result_events.append(doc)
            continue
        if name == "event":
            result_events.append(doc)
        if name == "stop":
            stop_found = True
            assert doc["num_events"]["primary"] == num_frames_primary
            # assert doc["num_events"]["darks"] == num_frames_darks
            continue

    assert start_found, "a start document was produced"
    assert stop_found, "a stop document was produced"

    assert len(descriptors) == 2, "return two descriptors"
    assert descriptors[0]["name"] == "primary", "first descriptor is primary"
    assert descriptors[1]["name"] == "darks", "second descriptor is darks"
    assert len(
        descriptors[0]["data_keys"].keys()) == 2, "primary has two data_keys"

    assert len(result_datums) == num_frames_primary + num_frames_darks
    assert len(result_events) == num_frames_primary + num_frames_darks

    run = run_cache.retrieve()
    stream = run["primary"].to_dask()
    assert stream
    dir = Path(tmp_path)
    file = run_uid + ".png"
    assert Path(dir / file).exists()
Exemplo n.º 2
0
def factory(name, doc):
    src = SingleRunCache()

    def export_on_stop(name, doc):
        if name == "stop":
            run = src.retrieve()
            if 'amptek' in run.metadata['start']['detectors']:
                export_spectra_to_csv(
                    run, f"/tmp/CSV-export/{run.metadata['start']['uid']}.csv",
                    ['amptek_energy_channels', 'amptek_mca_spectrum'])

    return [src.callback, export_on_stop], []
Exemplo n.º 3
0
    def plan(num_points=11):
        src = SingleRunCache()

        @bluesky.preprocessors.subs_decorator(src.callback)
        def inner_plan():
            yield from bluesky.plans.rel_scan([detector], hw.motor, -1, 1,
                                              num_points)
            run = src.retrieve()
            table = run.primary.read()['motor'].to_dataframe()
            assert len(table) == num_points

        yield from inner_plan()
Exemplo n.º 4
0
def rocking_curve(start=-0.10, stop=0.10, nsteps=101, choice="peak"):
    """Perform a relative scan of the DCM 2nd crystal pitch around the current
    position to find the peak of the crystal rocking curve.  Begin by opening
    the hutch slits to 3 mm. At the end, move to the position of maximum
    intensity on I0, then return to the hutch slits to their original height.
    Input:
      start:    (float)  starting position relative to current  [-0.1]
      end:      (float)  ending position relative to current    [0.1]
      nsteps:   (int)    number of steps                        [101]
      choice:   (string) 'peak', fit' or 'com' (center of mass) ['peak']
    If choice is fit, the fit is performed using the
    SkewedGaussianModel from lmfit, which works pretty well for this
    measurement at BMM.  The line shape is a bit skewed due to the
    convolution with the slightly misaligned entrance slits.
    """

    # Cache the data here as it is collected so we can examine it here and use
    # it to make decisions.
    src = SingleRunCache()

    @subs_decorator(LivePlot("I0", pitch.name, ax=plt.gca()))
    @subs_decorator(src.callback)
    def scan_dcm_pitch():
        line1 = "%s, %s, %.3f, %.3f, %d -- starting at %.3f\n" % (
            pitch.name,
            "I0",
            start,
            stop,
            nsteps,
            pitch.readback.get(),
        )
        uid = yield from rel_scan([I0], pitch, start, stop, nsteps)

        # The data that we just acquired has been cached in memory by src.
        # Access it as a pandas DataFrame so that we can conveniently do some
        # math on it.
        run = src.retrieve()
        t = run.primary.read().to_dataframe()
        if choice.lower() == "com":
            signal = numpy.array(t["I0"])
            position = com(signal)
            top = t['pitch'].iloc[position]
        elif choice.lower() == "fit":
            signal = numpy.array(t["I0"])
            pitch_ = numpy.array(t["pitch"])
            mod = SkewedGaussianModel()
            pars = mod.guess(signal, x=pitch_)
            out = mod.fit(signal, pars, x=pitch_)
            print(out.fit_report(min_correl=0))
            out.plot()
            top = out.params["center"].value
        else:
            signal = t['I0']
            position = peak(signal)
            top = t[pitch.name][position]

        print(
            "rocking curve scan: %s\tuid = %s, scan_id = %d"
            % (line1, uid, run.metadata["start"]["scan_id"])
        )
        print(f"Found and moved to peak at {top:.3} via method {choice}")
        yield from mv(pitch, top)

    yield from scan_dcm_pitch()