示例#1
0
def RE(request):
    loop = asyncio.new_event_loop()
    loop.set_debug(True)
    RE = RunEngine({}, loop=loop)

    bec = best_effort.BestEffortCallback()
    RE.subscribe(bec)
    return RE
示例#2
0
def bluesky_utils():
    # setup run engine
    RE = RunEngine({})
    bec = best_effort.BestEffortCallback()
    bec.disable_plots()
    RE.subscribe(bec)
    # setup databroker and mongo
    db = Broker.named("local")
    try:
        databroker.assets.utils.install_sentinels(db.reg.config, version=1)
    except Exception:
        pass
    # setup file handler
    RE.subscribe(db.insert)
    db.reg.register_handler("srw", SRWFileHandler, overwrite=True)
    db.reg.register_handler("SIREPO_FLYER", SRWFileHandler, overwrite=True)
    return RE, db
def utils():
    # bluesky RunEngine
    RE = RunEngine({})
    bec = best_effort.BestEffortCallback()
    RE.subscribe(bec)

    # Mongo Backend
    db = Broker.named("local")
    try:
        databroker.assets.utils.install_sentinels(db.reg.config, version=1)
    except Exception:
        pass
    # update database info
    RE.subscribe(db.insert)
    db.reg.register_handler("srw", SRWFileHandler, overwrite=True)

    # store data
    root_dir = "/tmp/sirepo_flyer_data"
    _ = make_dir_tree(datetime.datetime.now().year, base_path=root_dir)

    return RE, db
示例#4
0
# Register bluesky IPython magics.
#from bluesky.magics import BlueskyMagics
#get_ipython().register_magics(BlueskyMagics)

#nslsii.configure_base(get_ipython().user_ns, 'amx')
import bluesky.plans as bp

from bluesky.run_engine import RunEngine
from bluesky.utils import get_history
RE = RunEngine(get_history())
beamline = os.environ["BEAMLINE_ID"]
from databroker import Broker
db = Broker.named(beamline)

RE.subscribe(db.insert)

# from bluesky.callbacks.best_effort import BestEffortCallback
# bec = BestEffortCallback()
# RE.subscribe(bec)


# convenience imports
# from ophyd.commands import *
from bluesky.callbacks import *
# from bluesky.spec_api import *
# from bluesky.global_state import gs, abort, stop, resume
# from databroker import (DataBroker as db, get_events, get_images,
#                                                 get_table, get_fields, restream, process)
from time import sleep
import numpy as np
示例#5
0
        return np.squeeze(out)


f = FullField()
# det = SynSignal(f, name="img", labels={"detectors"})
det = SynSignalWithRegistry(f, name="img", labels={"detectors"},)
det.kind = "hinted"

#g = Pencil()
#det2 = SynSignal(g, name="img", labels={"detectors"})
#det2.kind = "hinted"

RE = RunEngine()
RE.md['analysis_stage'] = 'raw'
p = Publisher(glbl_dict["inbound_proxy_address"], prefix=b"raw")
t = RE.subscribe(p)
# RE.subscribe(print)

# Build scan
l = [0, 90]
for i in range(8):
    ll = l.copy()
    interval = sorted(set(ll))[1] / 2
    for lll in ll:
        j = lll + interval
        j = round(j, 0)
        if j not in l and j < 180:
            l.append(j)
# Run Full Field Scans, each scan has more slices, showing how we can minimize
# the number of slices by interleaving them by half
for i in [180]:
示例#6
0
import json  # noqa F401

import databroker
import matplotlib.pyplot as plt
import numpy as np  # noqa F401
from bluesky.callbacks import best_effort
from bluesky.run_engine import RunEngine
from databroker import Broker
from ophyd.utils import make_dir_tree

from sirepo_bluesky.shadow_handler import ShadowFileHandler
from sirepo_bluesky.srw_handler import SRWFileHandler

RE = RunEngine({})
bec = best_effort.BestEffortCallback()
RE.subscribe(bec)

# MongoDB backend:
db = Broker.named('local')  # mongodb backend
try:
    databroker.assets.utils.install_sentinels(db.reg.config, version=1)
except Exception:
    pass

RE.subscribe(db.insert)
db.reg.register_handler('srw', SRWFileHandler, overwrite=True)
# db.reg.register_handler('shadow', ShadowFileHandler, overwrite=True)
db.reg.register_handler('SIREPO_FLYER', SRWFileHandler, overwrite=True)

plt.ion()
示例#7
0
# det = SynSignal(f, name="img", labels={"detectors"})
det = SynSignalWithRegistry(
    f,
    name="img",
    labels={"detectors"},
)
det.kind = "hinted"

#g = Pencil()
#det2 = SynSignal(g, name="img", labels={"detectors"})
#det2.kind = "hinted"

RE = RunEngine()
RE.md['analysis_stage'] = 'raw'
p = Publisher(glbl_dict["inbound_proxy_address"], prefix=b"raw")
t = RE.subscribe(p)
# RE.subscribe(print)

# Build scan
l = [0, 90]
for i in range(8):
    ll = l.copy()
    interval = sorted(set(ll))[1] / 2
    for lll in ll:
        j = lll + interval
        j = round(j, 0)
        if j not in l and j < 180:
            l.append(j)
# Run Full Field Scans, each scan has more slices, showing how we can minimize
# the number of slices by interleaving them by half
for i in [180]:
示例#8
0
文件: average.py 项目: zthatch/SHED
# Create a graph
source = Stream()
# Convert from raw event model to data
fes = FromEventStream('event', ('data', 'noisy_det'), source, principle=True)

# Averageing graph
adder = fes.accumulate(lambda x, y: x + y)
counter = fes.accumulate(lambda s, x: s + 1, start=0)
averager = adder.zip(counter).map(lambda x: x[0] / x[1])

# Binned averaging
sw = fes.sliding_window(2).map(sum).map(lambda x: x / 2)

# Convert back to Event Model
tes1 = ToEventStream(averager, ('average', ))
tes2 = ToEventStream(sw, ('binned', ))

# sink to plotting
tes1.sink(lambda x: lp(*x))
tes2.sink(lambda x: lp2(*x))

# Run the scan
RE = RunEngine()
t = RE.subscribe(lambda *x: source.emit(x))
# RE.subscribe(lp3)
# RE.subscribe(print)
source.visualize(source_node=True)
RE(count([hw().noisy_det], 100))
plt.show()