def read_scan(sid, fill=True): ''' read data from sid = scan_id''' from dataportal import (DataBroker as db, StepScan as ss, StepScan, DataBroker, DataMuxer as dm) import datetime hdr = db[sid] ev = db.fetch_events(hdr, fill=fill) #, fill=True) muxer = dm.from_events(ev) data = muxer.to_sparse_dataframe() dt = data.time #print dt #print dt[0] #print dt[len(dt)-1] #data = list( db.fetch_events(hdr)) t1 = datetime.datetime.fromtimestamp(dt[0]).strftime('%Y-%m-%d %H:%M:%S') t2 = datetime.datetime.fromtimestamp(dt[len(dt) - 1]).strftime('%Y-%m-%d %H:%M:%S') #t1 = dt[0].strftime('%Y-%m-%d %H:%M:%S') #t2 = dt[len(dt)-1].strftime('%Y-%m-%d %H:%M:%S') print('the first scan time is: %s' % t1) print('the last scan time is: %s' % t2) start_time = t1 end_time = t2 return data, start_time, end_time
def test_scan_and_get_data(): try: import metadatastore del metadatastore except ImportError: raise SkipTest try: from databroker import DataBroker as db except ImportError: raise SkipTest from bluesky.standard_config import gs uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing', config={}) hdr = db[uid] db.fetch_events(hdr)
def test_timestamps_as_data(): hdr = DataBroker[-1] events = DataBroker.fetch_events(hdr) dm = DataMuxer.from_events(events) data_name = list(dm.sources.keys()) for name in data_name: dm.include_timestamp_data(name) assert_true('{}_timestamp'.format(name) in dm._dataframe) dm.remove_timestamp_data(name) assert_false('{}_timestamp'.format(name) in dm._dataframe)
def test_basic_usage(): for i in range(5): insert_run_start(time=float(i), scan_id=i + 1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) header_1 = db[-1] header_ned = db(owner='nedbrainard') header_ned = db.find_headers(owner='nedbrainard') # deprecated API header_null = db(owner='this owner does not exist') # smoke test db.fetch_events(header_1) db.fetch_events(header_ned) db.fetch_events(header_null) list(get_events(header_1)) list(get_events(header_null)) get_table(header_1) get_table(header_ned) get_table(header_null) # get events for multiple headers list(get_events(db[-2:])) # test time shift issue GH9 table = get_table(db[105]) assert table.notnull().all().all()
def test_attributes(): hdr = DataBroker[-1] events = DataBroker.fetch_events(hdr) dm = DataMuxer.from_events(events) # merely testing that basic usage does not error for data_key in dm.sources.keys(): getattr(dm, data_key) dm[data_key] properties = ['ncols', '_dataframe', 'col_info_by_ndim', 'sources', 'col_info', '_data', '_time', '_timestamps', '_timestamps_as_data', '_known_events', '_known_descriptors', '_stale'] for prop in properties: getattr(dm, prop)
def test_basic_usage(): for i in range(5): insert_run_start(time=float(i), scan_id=i + 1, owner='nedbrainard', beamline_id='example', uid=str(uuid.uuid4())) header_1 = db[-1] header_ned = db(owner='nedbrainard') header_ned = db.find_headers(owner='nedbrainard') # deprecated API header_null = db(owner='this owner does not exist') # smoke test db.fetch_events(header_1) db.fetch_events(header_ned) db.fetch_events(header_null) get_events(header_1) get_events(header_ned) get_events(header_null) get_table(header_1) get_table(header_ned) get_table(header_null) # get events for multiple headers get_events([header_1, header_ned])
def __iter__(self): if self.key: for event in db.fetch_events(self.header, fill=False): yield event['data'][self.key]