Ejemplo n.º 1
0
    def test_get_callback_func(self):
        dbroker = DataBroker()

        test_topic = "/uri/to/topic"
        client_id = device_id_gen()
        message = WIFIPayloadGenerator().get()

        callback = dbroker.get_callback_func(test_topic)

        res = callback(client_id, None, message)

        self.assertEqual(res, None)
def test_scan_and_get_data():
    try:
        import metadatastore
        del metadatastore
    except ImportError:
        raise SkipTest
    try:
        from databroker import DataBroker as db
    except ImportError:
        raise SkipTest
    from bluesky.standard_config import gs
    uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
             config={})

    hdr = db[uid]
    db.fetch_events(hdr)
def read_scan(sid, fill=True):
    ''' read data from sid = scan_id'''

    from dataportal import (DataBroker as db, StepScan as ss, StepScan,
                            DataBroker, DataMuxer as dm)

    import datetime

    hdr = db[sid]
    ev = db.fetch_events(hdr, fill=fill)  #, fill=True)
    muxer = dm.from_events(ev)
    data = muxer.to_sparse_dataframe()
    dt = data.time
    #print dt
    #print dt[0]
    #print dt[len(dt)-1]
    #data = list( db.fetch_events(hdr))

    t1 = datetime.datetime.fromtimestamp(dt[0]).strftime('%Y-%m-%d %H:%M:%S')
    t2 = datetime.datetime.fromtimestamp(dt[len(dt) -
                                            1]).strftime('%Y-%m-%d %H:%M:%S')

    #t1 = dt[0].strftime('%Y-%m-%d %H:%M:%S')
    #t2 = dt[len(dt)-1].strftime('%Y-%m-%d %H:%M:%S')

    print('the first scan time is:   %s' % t1)
    print('the last scan time  is:   %s' % t2)
    start_time = t1
    end_time = t2
    return data, start_time, end_time
Ejemplo n.º 4
0
    def iterate_over_stream(self, stream_name, fill=False, key=None, **kwargs):
        if key is None:
            key = self.key

        if key:
            for event in db.get_events(self.header, fill=False,
                                       name=stream_name):
                yield event['data'][key]
Ejemplo n.º 5
0
def test_basic_usage():
    for i in range(5):
        insert_run_start(time=float(i), scan_id=i + 1,
                         owner='nedbrainard', beamline_id='example',
                         uid=str(uuid.uuid4()))
    header_1 = db[-1]

    header_ned = db(owner='nedbrainard')
    header_ned = db.find_headers(owner='nedbrainard')  # deprecated API
    header_null = db(owner='this owner does not exist')
    # smoke test
    db.fetch_events(header_1)
    db.fetch_events(header_ned)
    db.fetch_events(header_null)
    list(get_events(header_1))
    list(get_events(header_null))
    get_table(header_1)
    get_table(header_ned)
    get_table(header_null)

    # get events for multiple headers
    list(get_events(db[-2:]))

    # test time shift issue GH9
    table = get_table(db[105])
    assert table.notnull().all().all()
Ejemplo n.º 6
0
def test_get_resource_uid():
    from databroker import DataBroker as db
    fs = FileStoreMoving(db.fs.config)
    old_fs = db.fs
    db.fs = fs
    register_builtin_handlers(fs)
    events = list(image_and_scalar.run())
    fs_res = set()
    run_start = events[0]['descriptor']['run_start']
    hdr = db[run_start['uid']]
    for ev in db.get_events(hdr, fill=False):
        for k in ['img', 'img_sum_x', 'img_sum_y']:
            dd = ev['data']
            if k in dd:
                fs_res.add(fs.resource_given_eid(dd[k])['uid'])

    assert fs_res == db.get_resource_uids(hdr)
    db.fs = old_fs
def test_scan_and_get_data():
    try:
        import metadatastore
        del metadatastore
    except ImportError:
        raise SkipTest
    try:
        from databroker import DataBroker as db
    except ImportError:
        raise SkipTest
    from bluesky.standard_config import gs
    uid = gs.RE(stepscan(det, motor),
                group='foo',
                beamline_id='testing',
                config={})

    hdr = db[uid]
    db.fetch_events(hdr)
Ejemplo n.º 8
0
def test_timestamps_as_data():
    hdr = DataBroker[-1]
    events = DataBroker.fetch_events(hdr)
    dm = DataMuxer.from_events(events)
    data_name = list(dm.sources.keys())
    for name in data_name:
        dm.include_timestamp_data(name)
        assert_true('{}_timestamp'.format(name) in dm._dataframe)
        dm.remove_timestamp_data(name)
        assert_false('{}_timestamp'.format(name) in dm._dataframe)
Ejemplo n.º 9
0
    def event(self, doc):
        if self.field not in doc['data']:
            raise KeyError('required field = {} is not in header'
                           .format(self.field))

        db.fill_event(doc)  # modifies in place
        image = np.asarray(doc['data'][self.field])
        if self.dark_uid is not None:
            dark_header = db[self.dark_uid]
            dark_img = db.get_images(dark_header, self.field)
        dark_img = np.zeros_like(image)
        image = np.clip(image - dark_img, 0, None)
        if image.ndim == 2:
            filename = self.template.format(start=self._start, event=doc)
            self._save_image(image, filename)
        if image.ndim == 3:
            for i, plane in enumerate(image):
                filename = self.template.format(i=i, start=self._start,
                                                event=doc)
                self._save_image(plane, filename)
        return filename
Ejemplo n.º 10
0
    def event(self, doc):
        if self.field not in doc['data']:
            raise KeyError('required field = {} is not in header'.format(
                self.field))

        db.fill_event(doc)  # modifies in place
        image = np.asarray(doc['data'][self.field])
        if self.dark_uid is not None:
            dark_header = db[self.dark_uid]
            dark_img = db.get_images(dark_header, self.field)
        dark_img = np.zeros_like(image)
        image = np.clip(image - dark_img, 0, None)
        if image.ndim == 2:
            filename = self.template.format(start=self._start, event=doc)
            self._save_image(image, filename)
        if image.ndim == 3:
            for i, plane in enumerate(image):
                filename = self.template.format(i=i,
                                                start=self._start,
                                                event=doc)
                self._save_image(plane, filename)
        return filename
Ejemplo n.º 11
0
def test_attributes():
    hdr = DataBroker[-1]
    events = DataBroker.fetch_events(hdr)
    dm = DataMuxer.from_events(events)
    # merely testing that basic usage does not error
    for data_key in dm.sources.keys():
        getattr(dm, data_key)
        dm[data_key]
    properties = ['ncols', '_dataframe', 'col_info_by_ndim', 'sources',
                  'col_info', '_data', '_time', '_timestamps',
                  '_timestamps_as_data', '_known_events', '_known_descriptors',
                  '_stale']
    for prop in properties:
        getattr(dm, prop)
Ejemplo n.º 12
0
    def test_run_loop(self):

        dbroker = DataBroker()

        # Get callback to stop the loop
        disconnect_callback = dbroker.get_callback_func("disconnect")

        # On system messages, stop listening
        sub_topic = "$SYS/#"
        dbroker.set_callback_func(sub_topic, disconnect_callback)

        # Start looping. Must exit at any message with True
        dbroker.initialize()
        res = dbroker.run_loop()

        self.assertTrue(res)
Ejemplo n.º 13
0
 def f(name, doc):
     if name != 'stop':
         return
     uid = ensure_uid(doc['run_start'])
     header = db[uid]
     callback('start', header['start'])
     for descriptor in header['descriptors']:
         callback('descriptor', descriptor)
     for event in db.get_events(header, fill=fill):
         callback('event', event)
     # Depending on the order that this callback and the
     # databroker-insertion callback were called in, the databroker might
     # not yet have the 'stop' document that we currently have, so we'll
     # use our copy instead of expecting the header to include one.
     callback('stop', doc)
Ejemplo n.º 14
0
def test_alias():
    # basic usage of alias
    uid1 = db[-1]
    db.alias('foo', uid=uid1)
    print(db.aliases)
    db.foo == db[-1]

    # can't set alias to existing attribute name
    with pytest.raises(ValueError):
        db.alias('get_events', uid=uid1)
    with pytest.raises(ValueError):
        db.dynamic_alias('get_events', lambda: {'uid': uid1})

    # basic usage of dynamic alias
    db.dynamic_alias('bar', lambda: uid1)
    db.bar = uid1

    # normal AttributeError still works
    with pytest.raises(AttributeError):
        db.this_is_not_a_thing
Ejemplo n.º 15
0
def verify_files_saved(name, doc, db=None):
    "This is a brute-force approach. We retrieve all the data."
    if db is None:
        from databroker import DataBroker as db

    ttime.sleep(0.1)  # Wait for data to be saved.
    if name != 'stop':
        return
    print("  Verifying that all the run's Documents were saved...")
    try:
        header = db[ensure_uid(doc['run_start'])]
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
        return
    else:
        print('\x1b[1A\u2713')  # print a checkmark on the previous line
    print("  Verifying that all externally-stored files are accessible...")
    try:
        list(db.get_events(header, fill=True))
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
    else:
        print('\x1b[1A\u2713')  # print a checkmark on the previous line
Ejemplo n.º 16
0
def verify_files_saved(name, doc, db=None):
    "This is a brute-force approach. We retrieve all the data."
    if db is None:
        from databroker import DataBroker as db

    ttime.sleep(0.1)  # Wati for data to be saved.
    if name != 'stop':
        return
    print("  Verifying that all the run's Documents were saved...")
    try:
        header = db[doc['run_start']]
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
        return
    else:
        print('\x1b[1A\u2713')  # print a checkmark on the previous line
    print("  Verifying that all externally-stored files are accessible...")
    try:
        list(db.get_events(header, fill=True))
    except Exception as e:
        print("  Verification Failed! Error: {0}".format(e))
    else:
        print('\x1b[1A\u2713')  # print a checkmark on the previous line
Ejemplo n.º 17
0
def test_basic_usage():
    for i in range(5):
        insert_run_start(time=float(i), scan_id=i + 1,
                         owner='nedbrainard', beamline_id='example',
                         uid=str(uuid.uuid4()))
    header_1 = db[-1]

    header_ned = db(owner='nedbrainard')
    header_ned = db.find_headers(owner='nedbrainard')  # deprecated API
    header_null = db(owner='this owner does not exist')
    # smoke test
    db.fetch_events(header_1)
    db.fetch_events(header_ned)
    db.fetch_events(header_null)
    get_events(header_1)
    get_events(header_ned)
    get_events(header_null)
    get_table(header_1)
    get_table(header_ned)
    get_table(header_null)

    # get events for multiple headers
    get_events([header_1, header_ned])
Ejemplo n.º 18
0
 def f(name, stop_doc):
     if name != 'stop':
         return
     uid = stop_doc['run_start']
     return db.process(db[uid], callback)
Ejemplo n.º 19
0
 def __iter__(self):
     if self.key:
         for event in db.fetch_events(self.header, fill=False):
             yield event['data'][self.key]