def run(run_start=None, sleep=0): if sleep != 0: raise NotImplementedError("A sleep time is not implemented for this " "example.") # Make the data ramp = common.stepped_ramp(start, stop, step, points_per_step) deadbanded_ramp = common.apply_deadband(ramp, deadband_size) rs = np.random.RandomState(5) point_det_data = rs.randn(num_exposures) # Create Event Descriptors data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')} data_keys2 = { 'Tsam': dict(source='PV:ES:Tsam', dtype='number'), 'Troom': dict(source='PV:ES:Troom', dtype='number') } ev_desc1_uid = insert_descriptor(run_start=run_start, data_keys=data_keys1, time=common.get_time(), uid=str(uuid.uuid4())) ev_desc2_uid = insert_descriptor(run_start=run_start, data_keys=data_keys2, time=common.get_time(), uid=str(uuid.uuid4())) print('event descriptor 1 uid = {0!s}'.format(ev_desc1_uid)) print('event descriptor 2 uid = {0!s}'.format(ev_desc2_uid)) # Create Events. events = [] # Point Detector Events base_time = common.get_time() for i in range(num_exposures): time = float(i + 0.5 * rs.randn()) + base_time data = {'point_det': (point_det_data[i], time)} data = {'point_det': point_det_data[i]} timestamps = {'point_det': time} event_uid = insert_event(descriptor=ev_desc1_uid, seq_num=i, time=time, data=data, uid=str(uuid.uuid4()), timestamps=timestamps) event, = find_events(uid=event_uid) events.append(event) # Temperature Events for i, (time, temp) in enumerate(zip(*deadbanded_ramp)): time = float(time) + base_time data = {'Tsam': temp, 'Troom': temp + 10} timestamps = {'Tsam': time, 'Troom': time} event_uid = insert_event(descriptor=ev_desc2_uid, time=time, data=data, seq_num=i, uid=str(uuid.uuid4()), timestamps=timestamps) event, = find_events(uid=event_uid) events.append(event) return events
def run(run_start_uid=None, sleep=0): if sleep != 0: raise NotImplementedError("A sleep time is not implemented for this " "example.") # Make the data ramp = common.stepped_ramp(start, stop, step, points_per_step) deadbanded_ramp = common.apply_deadband(ramp, deadband_size) rs = np.random.RandomState(5) point_det_data = rs.randn(num_exposures) + np.arange(num_exposures) # Create Event Descriptors data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')} data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')} ev_desc1_uid = insert_descriptor(run_start=run_start_uid, data_keys=data_keys1, time=common.get_time(), uid=str(uuid.uuid4()), name='primary') ev_desc2_uid = insert_descriptor(run_start=run_start_uid, data_keys=data_keys2, time=common.get_time(), uid=str(uuid.uuid4()), name='baseline') # Create Events. events = [] # Point Detector Events base_time = common.get_time() for i in range(num_exposures): time = float(2 * i + 0.5 * rs.randn()) + base_time data = {'point_det': point_det_data[i]} timestamps = {'point_det': time} event_dict = dict(descriptor=ev_desc1_uid, seq_num=i, time=time, data=data, timestamps=timestamps, uid=str(uuid.uuid4())) event_uid = insert_event(**event_dict) # grab the actual event from metadatastore event, = find_events(uid=event_uid) events.append(event) assert event['data'] == event_dict['data'] # Temperature Events for i, (time, temp) in enumerate(zip(*deadbanded_ramp)): time = float(time) + base_time data = {'Tsam': temp} timestamps = {'Tsam': time} event_dict = dict(descriptor=ev_desc2_uid, time=time, data=data, timestamps=timestamps, seq_num=i, uid=str(uuid.uuid4())) event_uid = insert_event(**event_dict) event, = find_events(uid=event_uid) events.append(event) assert event['data'] == event_dict['data'] return events
def run(run_start=None, sleep=0): if sleep != 0: raise NotImplementedError("A sleep time is not implemented for this " "example.") # Make the data ramp = common.stepped_ramp(start, stop, step, points_per_step) deadbanded_ramp = common.apply_deadband(ramp, deadband_size) rs = np.random.RandomState(5) point_det_data = rs.randn(num_exposures) # Create Event Descriptors data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')} data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number'), 'Troom': dict(source='PV:ES:Troom', dtype='number')} ev_desc1_uid = insert_event_descriptor(run_start=run_start, data_keys=data_keys1, time=common.get_time(), uid=str(uuid.uuid4())) ev_desc2_uid = insert_event_descriptor(run_start=run_start, data_keys=data_keys2, time=common.get_time(), uid=str(uuid.uuid4())) print('event descriptor 1 uid = %s' % ev_desc1_uid) print('event descriptor 2 uid = %s' % ev_desc2_uid) # Create Events. events = [] # Point Detector Events base_time = common.get_time() for i in range(num_exposures): time = float(i + 0.5 * rs.randn()) + base_time data = {'point_det': (point_det_data[i], time)} data = {'point_det': point_det_data[i]} timestamps = {'point_det': time} event_uid = insert_event(descriptor=ev_desc1_uid, seq_num=i, time=time, data=data, uid=str(uuid.uuid4()), timestamps=timestamps) event, = find_events(uid=event_uid) events.append(event) # Temperature Events for i, (time, temp) in enumerate(zip(*deadbanded_ramp)): time = float(time) + base_time data = {'Tsam': temp, 'Troom': temp + 10} timestamps = {'Tsam': time, 'Troom': time} event_uid = insert_event(descriptor=ev_desc2_uid, time=time, data=data, seq_num=i, uid=str(uuid.uuid4()), timestamps=timestamps) event, = find_events(uid=event_uid) events.append(event) return events
def fetch_events(cls, headers, fill=True): """ Get Events from given run(s). Parameters ---------- headers : RunHeader or iterable of RunHeader The headers to fetch the events for fill : bool, optional If non-scalar data should be filled in, Defaults to True Yields ------ event : Event The event, optionally with non-scalar data filled in """ try: headers.items() except AttributeError: pass else: headers = [headers] for header in headers: descriptors = find_event_descriptors( run_start=header.run_start_uid) for descriptor in descriptors: for event in find_events(descriptor=descriptor): if fill: fill_event(event) yield event
def run(run_start=None, sleep=0): if sleep != 0: raise NotImplementedError("A sleep time is not implemented for this " "example.") # Make the data ramp = common.stepped_ramp(start, stop, step, points_per_step) deadbanded_ramp = common.apply_deadband(ramp, deadband_size) # Create Event Descriptors data_keys = {'Tsam': dict(source='PV:ES:Tsam', dtype='number'), 'point_det': dict(source='PV:ES:point_det', dtype='number')} ev_desc = insert_descriptor(run_start=run_start, data_keys=data_keys, time=0., uid=str(uuid.uuid4())) # Create Events. events = [] # Temperature Events for i, (time, temp) in enumerate(zip(*deadbanded_ramp)): time = float(time) point_det = np.random.randn() data = {'Tsam': temp, 'point_det': point_det} timestamps = {'Tsam': time, 'point_det': time} event_uid = insert_event(descriptor=ev_desc, time=time, data=data, seq_num=i, timestamps=timestamps, uid=str(uuid.uuid4())) event, = find_events(uid=event_uid) events.append(event) return events
def hdf_data_io(): """ Save data to db and run test when data is retrieved. """ blc = insert_beamline_config({'cfg1': 1}, 0.0) run_start_uid = insert_run_start(time=0., scan_id=1, beamline_id='csx', uid=str(uuid.uuid4()), beamline_config=blc) # data keys entry data_keys = {'x_pos': dict(source='MCA:pos_x', dtype='number'), 'y_pos': dict(source='MCA:pos_y', dtype='number'), 'xrf_spectrum': dict(source='MCA:spectrum', dtype='array', #shape=(5,), external='FILESTORE:')} # save the event descriptor descriptor_uid = insert_event_descriptor( run_start=run_start_uid, data_keys=data_keys, time=0., uid=str(uuid.uuid4())) # number of positions to record, basically along a horizontal line num = 5 events = [] for i in range(num): v_pos = 0 h_pos = i spectrum_uid = get_data(v_pos, h_pos) # Put in actual ndarray data, as broker would do. data1 = {'xrf_spectrum': spectrum_uid, 'v_pos': v_pos, 'h_pos': h_pos} timestamps1 = {k: noisy(i) for k in data1} event_uid = insert_event(descriptor=descriptor_uid, seq_num=i, time=noisy(i), data=data1, uid=str(uuid.uuid4()), timestamps=timestamps1) event, = find_events(uid=event_uid) # test on retrieve data for all data sets events.append(event) return events
def update(self): """Obtain a fresh list of the relevant Events.""" # like fetch_events, but we don't fill in the data right away events = [] for header in self.headers: descriptors = find_event_descriptors(run_start=header.run_start_uid) for descriptor in descriptors: events.extend(list(find_events(descriptor=descriptor))) if not events: return new_events = [] for event in events: if event.uid not in self._known_uids: new_events.append(event) self._known_uids.add(event.uid) # The major performance savings is here: only fill the new events. [fill_event(event) for event in new_events] self._queue.append(new_events) # the entry can be an empty list
def run(run_start=None, sleep=0): if sleep != 0: raise NotImplementedError("A sleep time is not implemented for this " "example.") # Make the data ramp = common.stepped_ramp(start, stop, step, points_per_step) deadbanded_ramp = common.apply_deadband(ramp, deadband_size) # Create Event Descriptors data_keys = { "Tsam": dict(source="PV:ES:Tsam", dtype="number"), "point_det": dict(source="PV:ES:point_det", dtype="number"), } conf = { "point_det": { "data_keys": {"exposure_time": {"source": "PS:ES:point_det_exp"}}, "data": {"exposure_time": 5}, "timestamps": {"exposure_time": 0.0}, } } ev_desc = insert_descriptor( run_start=run_start, data_keys=data_keys, time=0.0, uid=str(uuid.uuid4()), configuration=conf ) # Create Events. events = [] # Temperature Events for i, (time, temp) in enumerate(zip(*deadbanded_ramp)): time = float(time) point_det = np.random.randn() data = {"Tsam": temp, "point_det": point_det} timestamps = {"Tsam": time, "point_det": time} event_uid = insert_event( descriptor=ev_desc, time=time, data=data, seq_num=i, timestamps=timestamps, uid=str(uuid.uuid4()) ) event, = find_events(uid=event_uid) events.append(event) return events
func = np.cos num = 1000 start = 0 stop = 10 sleep_time = .1 for idx, i in enumerate(np.linspace(start, stop, num)): data = { 'linear_motor': i, 'Tsam': i + 5, 'scalar_detector': func(i) + np.random.randn() / 100 } ts = {k: time.time() for k in data} e = insert_event(descriptor=descriptor, seq_num=idx, time=time.time(), timestamps=ts, data=data, uid=str(uuid.uuid4())) insert_run_stop(run_start, time=time.time(), uid=str(uuid.uuid4())) last_run = next(find_last()) try: if str(last_run.uid) != str(run_start): print("find_last() is broken") except AttributeError as ae: print(ae) res_2 = find_events(descriptor=descriptor) if not res_2: print("find_events() is broken")
def run(run_start_uid=None, sleep=0): frame_generator = frame_generators.brownian(img_size, step_scale=.5, I_fluc_function=I_func_gaus, step_fluc_function=scale_fluc) # seed data to make deterministic np.random.RandomState(5) # set up the data keys entry data_keys1 = {'linear_motor': dict(source='PV:ES:sam_x', dtype='number'), 'img': dict(source='CCD', shape=(5, 5), dtype='array', external='FILESTORE:'), 'total_img_sum': dict(source='CCD:sum', dtype='number'), 'img_x_max': dict(source='CCD:xmax', dtype='number'), 'img_y_max': dict(source='CCD:ymax', dtype='number'), 'img_sum_x': dict(source='CCD:xsum', dtype='array', shape=(5,), external='FILESTORE:'), 'img_sum_y': dict(source='CCD:ysum', dtype='array', shape=(5,), external='FILESTORE:') } data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')} # save the first event descriptor descriptor1_uid = insert_descriptor( run_start=run_start_uid, data_keys=data_keys1, time=0., uid=str(uuid.uuid4())) descriptor2_uid = insert_descriptor( run_start=run_start_uid, data_keys=data_keys2, time=0., uid=str(uuid.uuid4())) events = [] for idx1, i in enumerate(range(num1)): img = next(frame_generator) img_sum = float(img.sum()) img_sum_x = img.sum(axis=0) img_sum_y = img.sum(axis=1) img_x_max = float(img_sum_x.argmax()) img_y_max = float(img_sum_y.argmax()) fsid_img = save_ndarray(img) fsid_x = save_ndarray(img_sum_x) fsid_y = save_ndarray(img_sum_y) # Put in actual ndarray data, as broker would do. data1 = {'linear_motor': i, 'total_img_sum': img_sum, 'img': fsid_img, 'img_sum_x': fsid_x, 'img_sum_y': fsid_y, 'img_x_max': img_x_max, 'img_y_max': img_y_max } timestamps1 = {k: noisy(i) for k in data1} event_uid = insert_event(descriptor=descriptor1_uid, seq_num=idx1, time=noisy(i), data=data1, timestamps=timestamps1, uid=str(uuid.uuid4())) event, = find_events(uid=event_uid) events.append(event) for idx2, i2 in enumerate(range(num2)): time = noisy(i/num2) data2 = {'Tsam': idx1 + np.random.randn()} timestamps2 = {'Tsam': time} event_uid = insert_event(descriptor=descriptor2_uid, seq_num=idx2+idx1, time=time, data=data2, uid=str(uuid.uuid4()), timestamps=timestamps2) event, = find_events(uid=event_uid) events.append(event) ttime.sleep(sleep) return events
# keys and serves as header for set of Event(s) descriptor = insert_descriptor(data_keys=data_keys, time=time.time(), run_start=run_start, uid=str(uuid.uuid4())) func = np.cos num = 1000 start = 0 stop = 10 sleep_time = .1 for idx, i in enumerate(np.linspace(start, stop, num)): data = {'linear_motor': i, 'Tsam': i + 5, 'scalar_detector': func(i) + np.random.randn() / 100} ts = {k: time.time() for k in data} e = insert_event(descriptor=descriptor, seq_num=idx, time=time.time(), timestamps=ts, data=data, uid=str(uuid.uuid4())) insert_run_stop(run_start, time=time.time(), uid=str(uuid.uuid4())) last_run = next(find_last()) try: if str(last_run.uid) != str(run_start): print("find_last() is broken") except AttributeError as ae: print(ae) res_2 = find_events(descriptor=descriptor) if not res_2: print("find_events() is broken")
scan_id = 1 custom = {"plotx": "linear_motor", "ploty": "scalar_detector"} # Create a BeginRunEvent that serves as entry point for a run rs = insert_run_start(scan_id=scan_id, beamline_id="csx", time=time.time(), beamline_config=b_config, custom=custom) # Create an EventDescriptor that indicates the data # keys and serves as header for set of Event(s) e_desc = insert_event_descriptor(data_keys=data_keys, time=time.time(), run_start=rs) func = np.cos num = 1000 start = 0 stop = 10 sleep_time = 0.1 for idx, i in enumerate(np.linspace(start, stop, num)): data = { "linear_motor": [i, time.time()], "Tsam": [i + 5, time.time()], "scalar_detector": [func(i) + np.random.randn() / 100, time.time()], } e = insert_event(event_descriptor=e_desc, seq_num=idx, time=time.time(), data=data) last_run = next(find_last()) try: if str(last_run.id) != str(rs.id): print("find_last() is broken") except AttributeError as ae: print(ae) res_2 = find_events(descriptor=e_desc) if not res_2: print("find_events() is broken")