# Following scripts to be run as ipython startup scripts. # Add contents to ~/.ipython/yourprofile/startup """connect with Bluesky""" from bluesky import RunEngine RE = RunEngine() from bluesky.utils import PersistentDict RE.md = PersistentDict('./metadata.md') # from ssrltools.utils import setup_user_metadata # user_md = setup_user_metadata() # RE.md.update(user_md) # Import matplotlib and put it in interactive mode. import matplotlib.pyplot as plt plt.ion() # Optional: set any metadata that rarely changes. in 60-metadata.py # convenience imports from bluesky.callbacks import * import bluesky.plans as bp import bluesky.plan_stubs as bps import bluesky.preprocessors as bpp from time import sleep import numpy as np import bluesky.magics # diagnostics
old_md = None md_path = get_md_path() if not os.path.exists(md_path): logger.info( "New directory to store RE.md between sessions: %s", md_path) os.makedirs(md_path) from bluesky.utils import get_history old_md = get_history() from bluesky import RunEngine RE = RunEngine() from bluesky.utils import PersistentDict RE.md = PersistentDict(md_path) if old_md is not None: logger.info('migrating RE.md storage to PersistentDict') RE.md.update(old_md) # keep track of callback subscriptions callback_db = {} # set up databroker import databroker db = databroker.Broker.named('mongoCat') callback_db['Broker'] = RE.subscribe(db.insert) # Set up SupplementalData. from bluesky import SupplementalData sd = SupplementalData()
'bp', 'bps', 'bpp', 'summarize_plan', 'np', 'callback_db', ] from ..session_logs import logger logger.info(__file__) from bluesky import RunEngine # Set up a RunEngine and use metadata backed PersistentDict RE = RunEngine({}) RE.md = {} # keep track of callback subscriptions callback_db = {} # Set up a Broker. import databroker db = databroker.Broker.named('temp') # Subscribe metadatastore to documents. # If this is removed, data is not saved to metadatastore. callback_db['db'] = RE.subscribe(db.insert) # Set up SupplementalData. from bluesky import SupplementalData sd = SupplementalData()
from IPython import get_ipython import databroker import ophyd from ophyd.signal import EpicsSignalBase import warnings # convenience imports from bluesky.simulators import summarize_plan import bluesky.plan_stubs as bps import bluesky.plans as bp import bluesky.preprocessors as bpp import numpy as np # Set up a RunEngine and use metadata backed PersistentDict RE = RunEngine({}) RE.md = {} # empty dict for lessons # RE.md = PersistentDict( # beam line use # os.path.join(os.environ["HOME"], ".config", "Bluesky_RunEngine_md") # ) # keep track of callback subscriptions callback_db = {} # Set up a Broker. db = databroker.temp().v1 # Subscribe metadatastore to documents. # If this is removed, data is not saved to metadatastore. callback_db['db'] = RE.subscribe(db.insert) # Set up SupplementalData.
from bluesky.plans import * import numpy as np from pyOlog.ophyd_tools import * # Uncomment the following lines to turn on verbose messages for # debugging. # import logging # ophyd.logger.setLevel(logging.DEBUG) # logging.basicConfig(level=logging.DEBUG) from pathlib import Path from historydict import HistoryDict try: RE.md = HistoryDict('/nsls2/xf08id/metadata/bluesky_history.db') print('gpfs') except Exception as exc: print('local') print(exc) RE.md = HistoryDict('{}/.config/bluesky/bluesky_history.db'.format( str(Path.home()))) RE.is_aborted = False #mds = MDS({'host': 'xf08id-ca1.cs.nsls2.local', 'port': 7770,'timezone': 'US/Eastern'}) #db = Broker(mds, FileStore({'host':'xf08id-ca1.cs.nsls2.local', 'port': 27017, 'database':'filestore'})) # register_builtin_handlers(db.fs) start = timer()
viz_rr = create_rr() db_rr = create_db_rr( '/media/christopher/DATA/Research/Columbia/data/tomo_sim_db') ns = create_analysis_pipeline(order=order, image_names=['dexela'], publisher=db_rr, mask_setting={'setting': 'first'}, stage_blacklist=[ 'fq', 'sq', 'pdf', 'mask', 'mask_overlay', 'calib', 'dark_sub', 'bg_sub' ]) RE.subscribe(lambda *x: ns['raw_source'].emit(x)) RE.md = dict(calibration_md=cal_params, composition_string='Ni', bt_wavelength=.1899, analysis_stage='raw') # important config things! # TODO: run as pchain pixel_size = .0002 holder_size = 11e-3 # m array_size = int(holder_size / pixel_size) + 1 arr = np.zeros((array_size, ) * 2) # 1 mm capilary inside {holder_size} mm holder offset near the front arr[1:7, array_size // 2 - 3:array_size // 2 + 3] = 1 ''' # important config things! # TODO: run as pchain # 10 mm square inside {holder_size} mm holder offset near the front
bec = BestEffortCallback() bec.disable_plots() bec.disable_table() RE.subscribe(bec) peaks = bec.peaks # just as alias for less typing # Make plots update live while scans run. from bluesky.utils import install_qt_kicker install_qt_kicker() from pathlib import Path from historydict import HistoryDict try: RE.md = HistoryDict('/nsls2/xf08id/metadata/bluesky_history.db') print('gpfs') except Exception as exc: print('local') print(exc) RE.md = HistoryDict('{}/.config/bluesky/bluesky_history.db'.format(str(Path.home()))) RE.is_aborted = False start = timer() def ensure_proposal_id(md): if 'proposal_id' not in md: raise ValueError("You forgot the proposal_id.") # Set up default metadata. RE.md['group'] = 'iss'
from ophyd import Device, Component, EpicsSignal from ophyd.signal import EpicsSignalBase from ophyd.areadetector.filestore_mixins import resource_factory import uuid import os from pathlib import Path import numpy as np from IPython import get_ipython # Set up a RunEngine and use metadata backed by a sqlite file. from bluesky import RunEngine from bluesky.utils import PersistentDict RE = RunEngine({}) RE.md = PersistentDict(str(Path("~/.bluesky_history").expanduser())) # Set up SupplementalData. from bluesky import SupplementalData sd = SupplementalData() RE.preprocessors.append(sd) # Set up a Broker. from databroker import Broker db = Broker.named("temp") # and subscribe it to the RunEngine RE.subscribe(db.insert)
import os import warnings # convenience imports import bluesky.plans as bp import bluesky.plan_stubs as bps import bluesky.preprocessors as bpp import numpy as np from ..session_logs import logger logger.info(__file__) # Set up a RunEngine and use metadata-backed PersistentDict RE = RunEngine({}) RE.md = PersistentDict( os.path.join(os.environ["HOME"], ".config", "Bluesky_RunEngine_md") ) # keep track of callback subscriptions callback_db = {} # Connect with mongodb database. db = databroker.catalog["mongodb_config"] # Subscribe metadatastore to documents. # If this is removed, data is not saved to metadatastore. callback_db["db"] = RE.subscribe(db.v1.insert) # Set up SupplementalData. sd = SupplementalData() RE.preprocessors.append(sd)