def test_dbfriendly(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) z = t.map(op.add, 1) n = ToEventStream(z, "out").DBFriendly() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert isinstance(d[0]["graph"], dict) h1 = d[0].get("graph_hash") assert h1 d.clear() RE(scan([hw.motor], hw.motor, 0, 9, 10)) h2 = d[0].get("graph_hash") assert h1 == h2 assert len(d) == 10 + 3 d.clear() z.args = (2,) RE(scan([hw.motor], hw.motor, 0, 9, 10)) h2 = d[0].get("graph_hash") assert h1 != h2 assert len(d) == 10 + 3
def test_db_insertion(RE, hw): db = Broker.named("temp") source = Stream() n0 = FromEventStream("event", ("data", "motor"), source, principle=True) n1 = ToEventStream(n0, "motor") n1.DBFriendly().starsink(db.v1.insert) RE.subscribe(lambda *x: source.emit(x)) RE(scan([hw.motor], hw.motor, 0, 1, 2)) assert db[-1]
def test_walk_up_partial(): raw = Stream() a_translation = FromEventStream("start", ("time",), raw, principle=True) b_translation = FromEventStream("event", ("data", "pe1_image"), raw) d = b_translation.zip_latest(a_translation) ddd = ToEventStream(d, ("data",)) dd = d.map(op.truediv) e = ToEventStream(dd, ("data",)) g = nx.DiGraph() walk_to_translation(e, g) att = [] for node, attrs in g.nodes.items(): att.append(attrs["stream"]) s = {ddd, dd, e, d} assert s == set(att) assert {_hash_or_uid(k) for k in s} == set(g.nodes)
def test_merkle_hash(): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = ToEventStream(t, ("ct",), data_key_md={"ct": {"units": "arb"}}) h = merkle_hash(n) assert h tt = FromEventStream("event", ("data", "motor"), source, principle=True) nn = ToEventStream(tt, ("ct",), data_key_md={"ct": {"units": "arb"}}) assert h == merkle_hash(nn) assert h != merkle_hash(tt) tt = FromEventStream("event", ("data", "motor"), source, principle=True) z = tt.map(op.add, 1) zz = tt.map(op.sub, 1) j = z.zip(zz) nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}}) order_1_hash = merkle_hash(nn) tt = FromEventStream("event", ("data", "motor"), source, principle=True) zz = tt.map(op.sub, 1) z = tt.map(op.add, 1) j = z.zip(zz) nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}}) order_2_hash = merkle_hash(nn) assert order_1_hash != order_2_hash tt = FromEventStream("event", ("data", "motor"), source, principle=True) z = tt.map(op.add, 1) zz = tt.map(op.sub, 1) j = zz.zip(z) nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}}) order_3_hash = merkle_hash(nn) assert order_1_hash != order_3_hash
def test_to_event_model(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = ToEventStream(t, ("ct",)) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[-1]["run_start"]
def test_no_stop(hw, RE): source = Stream().filter(lambda x: x[0] != "stop") t = FromEventStream("event", ("data",), source, principle=True) n = ToEventStream(t) p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == { "analyzer": {"fields": ["motor", "motor_setpoint"]} } assert d[2]["data"] == {"motor_setpoint": 0, "motor": 0}
def test_to_event_model_dict(RE, hw): source = Stream() t = FromEventStream("event", ("data",), source, principle=True) n = ToEventStream(t) p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() n.sink(print) RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) print(d[1]["hints"]) # AAA assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == { "analyzer": {"fields": ["motor", "motor_setpoint"]} } assert d[2]["data"] == {"motor_setpoint": 0, "motor": 0} assert d[-1]["run_start"]
from xpdan.pipelines.main import * # noqa: F403, F401 from bluesky.callbacks.broker import LiveImage from shed.translation import ToEventStream from xpdtools.pipelines.extra import z_score # Zscore z_score_plot = ToEventStream(z_score, ('z_score', )).starsink( LiveImage('z_score', cmap='viridis', window_title='z score', limit_func=lambda im: (-2, 2)), stream_name='z score vis')
# Create a graph source = Stream() # Convert from raw event model to data fes = FromEventStream('event', ('data', 'noisy_det'), source, principle=True) # Averageing graph adder = fes.accumulate(lambda x, y: x + y) counter = fes.accumulate(lambda s, x: s + 1, start=0) averager = adder.zip(counter).map(lambda x: x[0] / x[1]) # Binned averaging sw = fes.sliding_window(2).map(sum).map(lambda x: x / 2) # Convert back to Event Model tes1 = ToEventStream(averager, ('average', )) tes2 = ToEventStream(sw, ('binned', )) # sink to plotting tes1.sink(lambda x: lp(*x)) tes2.sink(lambda x: lp2(*x)) # Run the scan RE = RunEngine() t = RE.subscribe(lambda *x: source.emit(x)) # RE.subscribe(lp3) # RE.subscribe(print) source.visualize(source_node=True) RE(count([hw().noisy_det], 100)) plt.show()
from bluesky.callbacks.broker import LiveImage from shed.translation import ToEventStream from xpdan.pipelines.main import * from xpdview.callbacks import LiveWaterfall # Visualization # background corrected img ToEventStream(bg_corrected_img, ('image', )).starsink( LiveImage('image', window_title='Background_corrected_img', cmap='viridis')) # polarization corrected img with mask overlayed ToEventStream( pol_corrected_img.combine_latest(mask).starmap(overlay_mask), ('image', )).starsink( LiveImage('image', window_title='final img', limit_func=lambda im: (np.nanpercentile(im, 2.5), np.nanpercentile(im, 97.5)), cmap='viridis')) # integrated intensities iq_em = (ToEventStream(mean.combine_latest(q, emit_on=0), ('iq', 'q')).starsink(LiveWaterfall( 'q', 'iq', units=('1/A', 'Intensity'), window_title='{} vs {}'.format('iq', 'q')), stream_name='{} {} vis'.format( 'q', 'iq')))
if self.xlim is None: minx, maxx = np.minimum(np.min(x), self._minx), np.maximum( np.max(x), self._maxx) self.ax.set_xlim(minx, maxx) if self.ylim is None: miny, maxy = np.minimum(np.min(y), self._miny), np.maximum( np.max(y), self._maxy) self.ax.set_ylim(miny, maxy) if self.clim is None: clim = np.nanmin(self._Idata), np.nanmax(self._Idata) self.sc.set_clim(*clim) z = ToEventStream(q_peak_pos, ('q_peaks', )).AlignEventStreams( raw_source, ToEventStream(mean_intensity, ('mean_I', )), ToEventStream(pdf_intensity, ('pdf_I', )), ToEventStream(r_peak_pos, ('r_peaks', ))) lms = LiveMultiScatter('q_peaks', 'temperature', 'mean_I') lms.ax.set_aspect('auto') lms._norm = SymLogNorm(.001) lms2 = LiveMultiScatter('r_peaks', 'temperature', 'pdf_I') lms2.ax.set_aspect('auto') # z.sink(pprint) z.starsink(lms) z.starsink(lms2)