def test_basic_usage(): for i in range(5): insert_run_start(time=float(i), scan_id=i + 1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) header_1 = db[-1] header_ned = db(owner='nedbrainard') header_ned = db.find_headers(owner='nedbrainard') # deprecated API header_null = db(owner='this owner does not exist') # smoke test db.fetch_events(header_1) db.fetch_events(header_ned) db.fetch_events(header_null) list(get_events(header_1)) list(get_events(header_null)) get_table(header_1) get_table(header_ned) get_table(header_null) # get events for multiple headers list(get_events(db[-2:])) # test time shift issue GH9 table = get_table(db[105]) assert table.notnull().all().all()
def test_data_key(): rs1_uid = insert_run_start(time=100.0, scan_id=1, owner="nedbrainard", beamline_id="example", uid=str(uuid.uuid4())) rs2_uid = insert_run_start(time=200.0, scan_id=2, owner="nedbrainard", beamline_id="example", uid=str(uuid.uuid4())) rs1, = find_run_starts(uid=rs1_uid) rs2, = find_run_starts(uid=rs2_uid) data_keys = {"fork": {"source": "_", "dtype": "number"}, "spoon": {"source": "_", "dtype": "number"}} insert_descriptor(run_start=rs1_uid, data_keys=data_keys, time=100.0, uid=str(uuid.uuid4())) insert_descriptor(run_start=rs2_uid, data_keys=data_keys, time=200.0, uid=str(uuid.uuid4())) result1 = db(data_key="fork") result2 = db(data_key="fork", start_time=150) assert len(result1) == 2 assert len(result2) == 1 actual = result2[0]["start"]["uid"] assert actual == str(rs2.uid)
def test_find_by_string_time(): uid = insert_run_start(time=ttime.time(), scan_id=1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) today = datetime.today() tomorrow = date.today() + timedelta(days=1) today_str = today.strftime('%Y-%m-%d') tomorrow_str = tomorrow.strftime('%Y-%m-%d') result = db(start_time=today_str, stop_time=tomorrow_str) assert uid in [hdr['start']['uid'] for hdr in result]
def wrap_refresh(change): try: query = eval("dict({})".format(db_search_widget.value)) headers = db(**query) except NameError: headers = [] db_search_widget.value += " -- is an invalid search" scan_id_dict = get_scan_id_dict(headers) select_scan_id_widget.options = scan_id_dict
def test_find_by_string_time(): uid = insert_run_start( time=ttime.time(), scan_id=1, owner="nedbrainard", beamline_id="example", uid=str(uuid.uuid4()) ) today = datetime.today() tomorrow = date.today() + timedelta(days=1) today_str = today.strftime("%Y-%m-%d") tomorrow_str = tomorrow.strftime("%Y-%m-%d") result = db(start_time=today_str, stop_time=tomorrow_str) assert uid in [hdr["start"]["uid"] for hdr in result]
def wrap_refresh(change): """Query the databroker with user supplied text.""" try: query = eval("dict({})".format(db_search_widget.value)) headers = db(**query) except NameError: headers = [] db_search_widget.value += " -- is an invalid search" scan_id_dict = get_scan_id_dict(headers) select_scan_id_widget.options = scan_id_dict
def test_data_key(): rs1_uid = insert_run_start(time=100., scan_id=1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) rs2_uid = insert_run_start(time=200., scan_id=2, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) rs1, = find_run_starts(uid=rs1_uid) rs2, = find_run_starts(uid=rs2_uid) data_keys = {'fork': {'source': '_', 'dtype': 'number'}, 'spoon': {'source': '_', 'dtype': 'number'}} insert_descriptor(run_start=rs1_uid, data_keys=data_keys, time=100., uid=str(uuid.uuid4())) insert_descriptor(run_start=rs2_uid, data_keys=data_keys, time=200., uid=str(uuid.uuid4())) result1 = db(data_key='fork') result2 = db(data_key='fork', start_time=150) assert len(result1) == 2 assert len(result2) == 1 actual = result2[0]['start']['uid'] assert actual == str(rs2.uid)
def test_basic_usage(): for i in range(5): insert_run_start(time=float(i), scan_id=i + 1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) header_1 = db[-1] header_ned = db(owner='nedbrainard') header_ned = db.find_headers(owner='nedbrainard') # deprecated API header_null = db(owner='this owner does not exist') # smoke test db.fetch_events(header_1) db.fetch_events(header_ned) db.fetch_events(header_null) get_events(header_1) get_events(header_ned) get_events(header_null) get_table(header_1) get_table(header_ned) get_table(header_null) # get events for multiple headers get_events([header_1, header_ned])
2, 3, 4, 5, ] ns.sort() # For each sample plot the intra sample temperature curve for i in ns: print(i) save_folder = '../S{}'.format(i) # Get the folder where the data is folder = '/mnt/bulk-data/research_data/USC_beamtime/APS_March_2016/S' \ + str(i) + '/temp_exp' # Get the run header assocaited with that folder hdr = db(run_folder=folder)[0] # Mux the data so that we have the correct Temp->data relationship dm = DataMuxer() dm.append_events(get_events(hdr)) df = dm.to_sparse_dataframe() print(df.keys()) binned = dm.bin_on('img', interpolation={'T': 'linear'}) for plot_type in [ 'gr', 'chi' ]: if plot_type is 'gr': # Only to the G(r) key_list = [f for f in os.listdir(folder) if
[fig1, fig11, fig2, fig3, fig4, fig5]): fig.savefig( save_stem + '{}_{}.{}'.format(name, plot_name, end)) cbfig, cbax = plt.subplots(figsize=(.5, 6)) cb = mpl.colorbar.ColorbarBase(cbax, norm=mpl.colors.Normalize( vmin=np.min(img), vmax=np.max(img)), format='%.0e') cbfig.savefig(save_stem + '{}_cb.{}'.format(name, end), bbox_inches='tight', transparent='True') else: plt.show() hdrs = db( run_folder='/mnt/bulk-data/research_data/USC_beamtime/APS_March_2016/S1/temp_exp') hdr = hdrs[0] print(hdr['start']['run_folder'], hdr['start']['uid']) # Get calibrations if not hdr['start']['is_calibration']: cals = [db[u]['start']['poni'][0] for u in hdr['start']['calibration']] else: cals = [p for p in hdr['start']['poni']] geos = [retrieve(p) for p in cals] cal_dists = np.asarray( [g.dist for g in geos]) * 100 # convert to meters
def test_find_by_float_time(): uid = insert_run_start(time=100., scan_id=1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) result = db(start_time=99, stop_time=101) assert uid in [hdr['start']['uid'] for hdr in result]
# I've setup Metadatastore config in ~/.config/metadatastore/connection.yml # I've setup Filestore config in ~/.config/filestore/connection.yml from databroker import db # Find run number 154338 db(scan_id=154338) # All started on sunday db(start_time='2016-12-18') # All started/stopped on sunday db(start_time='2016-12-18', stop_time='2015-12-18') # Get the 'db_neutrons' data into a pandas dataframe h = db(scan_id=154338) t = db.get_table(h,fields=['bs_neutrons'])
def test_find_by_float_time(): uid = insert_run_start(time=100.0, scan_id=1, owner="nedbrainard", beamline_id="example", uid=str(uuid.uuid4())) result = db(start_time=99, stop_time=101) assert uid in [hdr["start"]["uid"] for hdr in result]