def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print('putting ', name, 'in queue') queue.put((name, doc)) d = RemoteDispatcher('127.0.0.1:5568') d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def __init__(self, *args, zmq_address, **kwargs): super().__init__(*args, **kwargs) self.dispatcher = RemoteDispatcher(zmq_address) def callback(name, doc): if name == 'start': log.debug("New streaming Run: uid=%r", doc['uid']) self.documents.emit((name, doc)) self.dispatcher.subscribe(callback)
class RunProcess(QRunnable): def __init__(self, *args, path, plan_name, axes, **kwargs): QObject.__init__(self, *args, **kwargs) self.axes = axes self.signals = ProcessLine() @pyqtSlot() def run(self): #self.lp = LivePlot(self.device.current.name, x=self.device.offset.name, ax=self.axes, marker='.') self.plan = Custom_Plan() self.plan.PRINT_CALLBACK = self.signals.signal.emit self.plan.AXIS = self.axes in_q = mp.Queue() out_q = mp.Queue() self.p_scan = mp.Process(target=execute_RE, args=[self.plan, out_q, in_q]) self.p_scan.start() self.plan.NAMES = out_q.get() print(self.plan.NAMES) self.plan.describe_printers() self.plan.describe_plots() self.remote() self.p_proxy = mp.Process(target=proxy) self.p_proxy.start() time.sleep(2) in_q.put('start scan') self.p_scan.join() self.terminate() def remote(self): self.remote_dispatcher = RemoteDispatcher(('localhost', 5568)) for printer in self.plan.PRINTERS: self.remote_dispatcher.subscribe(printer) for plot in self.plan.PLOTS: self.remote_dispatcher.subscribe(plot) # db = Broker.named('light') cat = catalog['light'] self.remote_dispatcher.subscribe(cat.v1.insert) t1 = threading.Thread(target=self.remote_dispatcher.start, daemon=True) t1.start() def terminate(self): self.p_scan.terminate() self.p_proxy.terminate() def close_worker(self): self.remote_dispatcher.stop() self.terminate() print('Dinge')
def remote(self): self.remote_dispatcher = RemoteDispatcher(('localhost', 5568)) for printer in self.plan.PRINTERS: self.remote_dispatcher.subscribe(printer) for plot in self.plan.PLOTS: self.remote_dispatcher.subscribe(plot) # db = Broker.named('light') cat = catalog['light'] self.remote_dispatcher.subscribe(cat.v1.insert) t1 = threading.Thread(target=self.remote_dispatcher.start, daemon=True) t1.start()
class ConsumerThread(QThread): documents = Signal([tuple]) def __init__(self, *args, zmq_address, **kwargs): super().__init__(*args, **kwargs) self.dispatcher = RemoteDispatcher(zmq_address) def callback(name, doc): if name == 'start': log.debug("New streaming Run: uid=%r", doc['uid']) self.documents.emit((name, doc)) self.dispatcher.subscribe(callback) def run(self): self.dispatcher.start()
def start_dispatcher(host, port, logfile=None): """The dispatcher function Parameters ---------- logfile : string string come from user command. ex --logfile=temp.log logfile will be "temp.log". logfile could be empty. """ dispatcher = RemoteDispatcher((host, port)) if logfile is not None: raise ValueError( "Parameter 'logfile' is deprecated and will be removed in future releases. " "Currently it does not have effect. Call the function with 'logfile=None' " ) def log_writer(name, doc): """logger's wrapper function This function will be used to fit .subscribe() method. It has two arguments as .subscribe expects. Inside, it calls logger.* to write doc which is a dict as a str into logfile """ if name in ('start', 'stop'): logger.info("%s: %r", name, doc) else: logger.debug("%s: %r", name, doc) dispatcher.subscribe(log_writer) # Subscribe log writer dispatcher.start()
def start_dispatcher(host, port, logfile): """The dispatcher function Parameters ---------- logfile : string string come from user command. ex --logfile=temp.log logfile will be "temp.log". logfile could be empty. """ dispatcher = RemoteDispatcher((host, port)) if logfile: set_handler(file=logfile) def log_writer(name, doc): """logger's wrapper function This function will be used to fit .subscribe() method. It has two arguments as .subscribe expects. Inside, it calls logger.* to write doc which is a dict as a str into logfile """ if name in ('start', 'stop'): logger.info("%s: %r", name, doc) else: logger.debug("%s: %r", name, doc) dispatcher.subscribe(log_writer) # Subscribe log writer dispatcher.start()
def test_zmq_components(): # The test `test_zmq` runs Proxy and RemoteDispatcher in a separate # process, which coverage misses. pid = os.getpid() def delayed_sigint(delay): time.sleep(delay) os.kill(os.getpid(), signal.SIGINT) proxy = Proxy(5567, 5568) assert not proxy.closed threading.Thread(target=delayed_sigint, args=(5, )).start() try: proxy.start() # delayed_sigint stops the proxy except KeyboardInterrupt: ... assert proxy.closed with pytest.raises(RuntimeError): proxy.start() proxy = Proxy() # random port threading.Thread(target=delayed_sigint, args=(5, )).start() try: proxy.start() # delayed_sigint stops the proxy except KeyboardInterrupt: ... repr(proxy) # test that two ways of specifying address are equivalent d = RemoteDispatcher('localhost:5555') assert d.address == ('localhost', 5555) d = RemoteDispatcher(('localhost', 5555)) assert d.address == ('localhost', 5555) repr(d)
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print('putting ', name, 'in queue') queue.put((name, doc)) d = RemoteDispatcher('127.0.0.1:5568', deserializer=cloudpickle.loads) d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print('putting ', name, 'in queue') queue.put((name, doc)) d = RemoteDispatcher('127.0.0.1:5568') d.subscribe('all', put_in_queue) print("REMOTE IS READY TO START") d._loop.call_later(9, d.stop) d.start()
def make_and_start_dispatcher(queue): def put_in_queue(name, doc): print("putting ", name, "in queue") queue.put((name, doc)) d = RemoteDispatcher("127.0.0.1:5568") d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def run(self): def put_in_queue(name, doc): print("putting ", name, "in queue") self._queue.put((name, doc)) from bluesky.callbacks.zmq import RemoteDispatcher d = RemoteDispatcher("127.0.0.1:5568") d.subscribe(put_in_queue) print("REMOTE IS READY TO START") d.loop.call_later(9, d.stop) d.start()
def main(): parser = argparse.ArgumentParser( description='Listen for documents over 0MQ and validate Resources.') parser.add_argument( 'proxy_address', type=str, help="bluesky-0MQ-proxy out address, given as in localhost:5578") parser.add_argument( '--emails', required=False, nargs='*', help="space-separated list of email addresses") args = parser.parse_args() log_handler = logging.StreamHandler() # stderr log_handler.setFormatter(LogFormatter()) logger.setLevel('INFO') logger.addHandler(log_handler) if args.emails: server_name = socket.gethostname() smtp_handler = SMTPHandler( mailhost='localhost', fromaddr=f'Resource Health Check <noreply@{server_name}>', toaddrs=args.emails, subject=(f'Error report from resource health check on ' f'{server_name}') ) smtp_handler.setFormatter(LogFormatter(color=False)) smtp_handler.setLevel('WARNING') # Use QueueHandler in case sending email is slow. LogRecords flow # from QueueHandler -> Queue -> QueueListener -> SMTPHandler. cleanup_listener = True que = queue.Queue() queue_handler = QueueHandler(que) queue_listener = QueueListener(que, smtp_handler, respect_handler_level=True) logger.addHandler(queue_handler) queue_listener.start() else: cleanup_listener = False rr = RunRouter([validator_factory]) rd = RemoteDispatcher(args.proxy_address) rd.subscribe(rr) logger.info(f'Listening to {args.proxy_address}') try: rd.start() # runs forever finally: if cleanup_listener: queue_listener.stop()
def main(): from bluesky.callbacks.zmq import Publisher, RemoteDispatcher parser = argparse.ArgumentParser( description= "Listen for unfilled documents over 0MQ and emit filled ones.") parser.add_argument( "receive_from", type=str, help="bluesky-0MQ-proxy out address, given as in localhost:5578", ) parser.add_argument( "send_to", type=str, help="bluesky-0MQ-proxy in address, given as in localhost:5578", ) args = parser.parse_args() # Data flows through: # * RemoteDispatcher (0MQ) # * Accumulator (caches until stop doc is received) # * EmittingFiller (fills external data) # * Publisher (0MQ) publisher = Publisher(args.send_to) handler_registry = discover_handlers() def factory(name, doc): filler = EmittingFiller(handler_registry, inplace=False, callback=publisher, coerce="force_numpy") accumulator = Accumulator(callback=filler) return [accumulator], [] rr = RunRouter([factory]) rd = RemoteDispatcher(args.receive_from) rd.subscribe(rr) print(f"Listening to {args.receive_from}") try: rd.start() # runs forever except KeyboardInterrupt: print("Terminated by user; exiting")
def start_callback(): from bluesky.callbacks.zmq import RemoteDispatcher from SciStreams.config import config as configd from SciStreams.interfaces.databroker.databases import databases cmsdb = databases['cms:data'] # db needed to fill events stream_buffer = BufferStream(db=cmsdb) def callback(*nds): nds = stream_buffer(nds) stream_input(*nds) # get the dispatcher port for bluesky zeromq process ipstring = "localhost:{:4d}".format(configd['bluesky']['port']) d = RemoteDispatcher(ipstring) d.subscribe(callback) #d.subscribe(print) # when done subscribing things and ready to use: d.start() # runs event loop forever
stream_source = self.descriptor_doc['data_keys'][stream_name]['source'] if stream_source == 'pizzabox-di-file': data = load_trig_trace(raw_data) if stream_source == 'pizzabox-adc-file': data = load_adc_trace(raw_data) stream_offset = f'{stream_name} offset' if stream_offset in self.start_doc: data.iloc[:, 1] -= self.start_doc[stream_offset] stream_gain = f'{stream_name} gain' if stream_gain in self.start_doc: data.iloc[:, 1] /= 10**self.start_doc[stream_gain] if stream_source == 'pizzabox-enc-file': data = load_enc_trace(raw_data) if dev_name == 'hhm_theta': data.iloc[:, 1] = xas.xray.encoder2energy( data['encoder'], 360000, -float(self.start_doc['angle_offset'])) dev_name = 'energy' self._preprocessed_data[dev_name] = data dispatcher = RemoteDispatcher('localhost:5578', prefix=b'raw') if __name__ == '__main__': dispatcher.subscribe(Interpolator(handler_registry)) dispatcher.start()
db, td.name, # vis=False, write_to_disk=False, # mask_setting=None ) # a = LiveImage('pe1_image') loop = zmq_asyncio.ZMQEventLoop() install_qt_kicker(loop=loop) def put_in_queue(nd): if nd[0] == 'event': nd[1]['data']['pe1_image'] = np.asarray(nd[1]['data']['pe1_image']) # if nd[0] == 'event': # db.fill_event(nd[1]) # print(nd) # source.emit(nd) a(*nd) plt.pause(.1) disp = RemoteDispatcher('127.0.0.1:5568', loop=loop) # disp.subscribe(istar(put_in_queue)) # disp.subscribe(a) disp.subscribe(istar(source.emit)) print("REMOTE IS READY TO START") # disp._loop.call_later(60, disp.stop) disp.start()
WindowsPath('//XF07ID1-WS17/RSoXS Documents/images/users/Eliot/NIST-Eph=459.9903474-40-primary-sw_det_saxs_image-1.tiff'), WindowsPath('//XF07ID1-WS17/RSoXS Documents/images/users/Eliot/NIST-Eph=459.9903474-40-primary-sw_det_waxs_image-1.tiff'), WindowsPath('//XF07ID1-WS17/RSoXS Documents/images/users/Eliot/NIST-Eph=460.0084854-40-primary-sw_det_saxs_image-2.tiff'), WindowsPath('//XF07ID1-WS17/RSoXS Documents/images/users/Eliot/NIST-Eph=460.0084854-40-primary-sw_det_waxs_image-2.tiff')]} """ from event_model import RunRouter from suitcase import tiff_series, csv import suitcase.jsonl import datetime from bluesky_darkframes import DarkSubtraction from bluesky.callbacks.zmq import RemoteDispatcher import databroker.assets.handlers USERDIR = '/DATA/users/' dispatcher = RemoteDispatcher('localhost:5578') def factory(name, start_doc): dt = datetime.datetime.now() formatted_date = dt.strftime('%Y-%m-%d') with suitcase.jsonl.Serializer(file_prefix=('{cycle}/' '{cycle}_' '{institution}_' '{user_name}/' '{project_name}/' f'{formatted_date}/' '{scan_id}/' '{scan_id}-' '{sample_name}-'), directory=USERDIR,
# talk to redis at XPD on xf28id2-srv1:6379 arg_parser.add_argument("--redis-host", type=str, default="xf28id2-srv1") arg_parser.add_argument("--redis-port", type=int, default=6379) # subscribe to 0MQ messages at XPD from xf28id2-ca1:5578 arg_parser.add_argument("--zmq-host", type=str, default="xf28id2-ca1") arg_parser.add_argument("--zmq-subscribe-port", type=int, default=5578) arg_parser.add_argument("--zmq-subscribe-prefix", type=str, default="rr") args = arg_parser.parse_args() pprint.pprint(vars(args)) # this process listens for 0MQ messages with prefix "rr" (roi-reduced) zmq_dispatcher = ZmqRemoteDispatcher( address=(args.zmq_host, args.zmq_subscribe_port), prefix=args.zmq_subscribe_prefix.encode(), ) redis_queue = RedisQueue( redis.StrictRedis(host=args.redis_host, port=args.redis_port, db=0)) gpopt = gp_optimizer.GPOptimizer( input_space_dimension=3, output_space_dimension=1, output_number=1, index_set_bounds=[[16, 81], [7.5, 60], [340, 460], [0, 1]], hyperparameter_bounds=[[0.1, 100], [0.1, 100], [0.1, 100], [0.1, 100]], ) gpcam_recommender_run_router, _ = recommender_factory( gp_optimizer_obj=gpopt,
def start_analysis( mask_kwargs=None, pdf_kwargs=None, fq_kwargs=None, mask_setting=None, save_kwargs=None, # pdf_argrelmax_kwargs=None, # mean_argrelmax_kwargs=None ): """Start analysis pipeline Parameters ---------- mask_kwargs : dict The kwargs passed to the masking see xpdtools.tools.mask_img pdf_kwargs : dict The kwargs passed to the pdf generator, see xpdtools.tools.pdf_getter fq_kwargs : dict The kwargs passed to the fq generator, see xpdtools.tools.fq_getter mask_setting : dict The setting of the mask save_kwargs : dict The kwargs passed to the main formatting node (mostly the filename template) """ # if pdf_argrelmax_kwargs is None: # pdf_argrelmax_kwargs = {} # if mean_argrelmax_kwargs is None: # mean_argrelmax_kwargs = {} d = RemoteDispatcher(glbl_dict['proxy_address']) install_qt_kicker( loop=d.loop) # This may need to be d._loop depending on tag if mask_setting is None: mask_setting = {} if fq_kwargs is None: fq_kwargs = {} if pdf_kwargs is None: pdf_kwargs = {} if mask_kwargs is None: mask_kwargs = {} if save_kwargs is None: save_kwargs = {} for a, b in zip( [ mask_kwargs, pdf_kwargs, fq_kwargs, mask_setting, save_kwargs, # pdf_argrelmax_kwargs, # mean_argrelmax_kwargs ], [ _mask_kwargs, _pdf_kwargs, _fq_kwargs, _mask_setting, _save_kwargs, # _pdf_argrelmax_kwargs, # _mean_argrelmax_kwargs ]): if a: b.update(a) d.subscribe(lambda *x: raw_source.emit(x)) print('Starting Analysis Server') d.start()
from bluesky.callbacks.zmq import RemoteDispatcher def echo(name, doc): print(f'got a {name} document') d = RemoteDispatcher('127.0.0.1:5678') d.subscribe(echo) print("REMOTE IS READY TO START") d.start()
import json import pprint import redis from bluesky.callbacks.zmq import RemoteDispatcher as ZmqRemoteDispatcher from bluesky_adaptive import recommendations from bluesky_adaptive import per_start from event_model import RunRouter # xpdan publishes 0MQ messages with prefix "an" zmq_listening_prefix = b"an" zmq_dispatcher = ZmqRemoteDispatcher(address=("127.0.0.1", 5678), prefix=zmq_listening_prefix) class RedisQueue: "fake just enough of the queue.Queue API on top of redis" def __init__(self, client): self.client = client def put(self, value): print(f"pushing to redis queue: {value}") self.client.lpush("adaptive", json.dumps(value)) redis_queue = RedisQueue(redis.StrictRedis(host="localhost", port=6379, db=0))
from pymongo import MongoClient from bluesky.callbacks.zmq import RemoteDispatcher from suitcase.mongo_layout1 import Serializer # This listens to a lightweight (0MQ-based) message bus for Documents # and inserts them into MongoDB. dispatcher = RemoteDispatcher('localhost:5578') client = MongoClient('localhost:27017') serializer = Serializer(client['mds'], client['assets']) print(client.address) dispatcher.subscribe(serializer) dispatcher.start()
import sys from bluesky.callbacks.zmq import RemoteDispatcher # listen for 0MQ messages from xf28id2-ca1:5578 zmq_server = "xf28id2-ca1:5578" zmq_prefix = sys.argv[1].encode() def echo(name, doc): print(f"got a {name} document with 0MQ prefix {zmq_prefix}") if name == "start": print(f" start id {doc['uid']}") elif name == "descriptor": print(f" start id {doc['run_start']}") print(f" uid {doc['uid']}") elif name == "event": print(f" descriptor id {doc['descriptor']}") print(f" uid {doc['uid']}") d = RemoteDispatcher("xf28id2-ca1:5578", prefix=zmq_prefix) d.subscribe(echo) print("ZMQ ECHO CONSUMER IS RUNNING") d.start()
db = Broker(mds=mds, reg=fs) td = TemporaryDirectory() source = conf_main_pipeline(db, td.name, # vis=False, write_to_disk=False, # mask_setting=None ) # a = LiveImage('pe1_image') loop = zmq_asyncio.ZMQEventLoop() install_qt_kicker(loop=loop) def put_in_queue(nd): if nd[0] == 'event': nd[1]['data']['pe1_image'] = np.asarray(nd[1]['data']['pe1_image']) # if nd[0] == 'event': # db.fill_event(nd[1]) # print(nd) # source.emit(nd) a(*nd) plt.pause(.1) disp = RemoteDispatcher('127.0.0.1:5568', loop=loop) # disp.subscribe(istar(put_in_queue)) # disp.subscribe(a) disp.subscribe(istar(source.emit)) print("REMOTE IS READY TO START") # disp._loop.call_later(60, disp.stop) disp.start()
def main(): rr = RunRouter([factory]) dispatcher = RemoteDispatcher('localhost:5578') dispatcher.subscribe(rr) dispatcher.start()
import json import redis from bluesky.callbacks.zmq import RemoteDispatcher from bluesky_adaptive import recommendations from bluesky_adaptive import per_start d = RemoteDispatcher("127.0.0.1:5678", prefix=b"adaptive") class RedisQueue: "fake just enough of the queue.Queue API on top of redis" def __init__(self, client): self.client = client def put(self, value): print(f"pushing {value}") self.client.lpush("adaptive", json.dumps(value)) rq = RedisQueue(redis.StrictRedis(host="localhost", port=6379, db=0)) adaptive_obj = recommendations.StepRecommender(1.5) independent_keys = ["motor"] dependent_keys = ["det"] queue = rq max_count = 15
import pprint from bluesky.callbacks.zmq import RemoteDispatcher as ZmqRemoteDispatcher from bluesky.callbacks.zmq import Publisher as ZmqPublisher from event_model import RunRouter zmq_listening_prefix = b"raw" zmq_dispatcher = ZmqRemoteDispatcher( address=("127.0.0.1", 5678), prefix=zmq_listening_prefix ) # publish 0MQ messages with xpdan's prefix zmq_publishing_prefix = b"an" zmq_analysis_publisher = ZmqPublisher( address=("127.0.0.1", 4567), prefix=zmq_publishing_prefix ) def zmq_publish_from_analysis_factory(start_name, start_doc): print( f"zmq_publish_from_analysis_factory called with {start_name}:\n{pprint.pformat(start_doc)}\n" ) def zmq_publish_from_analysis(name, doc): if name == "start": # add batch_count print("adding batch_count=1") doc["batch_count"] = 1 print(f"analysis consumer publishing {name}:\n{pprint.pformat(doc)}\n")