Exemple #1
0
def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)
    rs = np.random.RandomState(5)
    point_det_data = rs.randn(num_exposures)

    # Create Event Descriptors
    data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')}
    data_keys2 = {
        'Tsam': dict(source='PV:ES:Tsam', dtype='number'),
        'Troom': dict(source='PV:ES:Troom', dtype='number')
    }
    ev_desc1_uid = insert_descriptor(run_start=run_start,
                                     data_keys=data_keys1,
                                     time=common.get_time(),
                                     uid=str(uuid.uuid4()))
    ev_desc2_uid = insert_descriptor(run_start=run_start,
                                     data_keys=data_keys2,
                                     time=common.get_time(),
                                     uid=str(uuid.uuid4()))
    print('event descriptor 1 uid = {0!s}'.format(ev_desc1_uid))
    print('event descriptor 2 uid = {0!s}'.format(ev_desc2_uid))
    # Create Events.
    events = []

    # Point Detector Events
    base_time = common.get_time()
    for i in range(num_exposures):
        time = float(i + 0.5 * rs.randn()) + base_time
        data = {'point_det': (point_det_data[i], time)}
        data = {'point_det': point_det_data[i]}
        timestamps = {'point_det': time}
        event_uid = insert_event(descriptor=ev_desc1_uid,
                                 seq_num=i,
                                 time=time,
                                 data=data,
                                 uid=str(uuid.uuid4()),
                                 timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time) + base_time
        data = {'Tsam': temp, 'Troom': temp + 10}
        timestamps = {'Tsam': time, 'Troom': time}
        event_uid = insert_event(descriptor=ev_desc2_uid,
                                 time=time,
                                 data=data,
                                 seq_num=i,
                                 uid=str(uuid.uuid4()),
                                 timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)
    return events
def run(run_start_uid=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)
    rs = np.random.RandomState(5)
    point_det_data = rs.randn(num_exposures) + np.arange(num_exposures)

    # Create Event Descriptors
    data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')}
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')}
    ev_desc1_uid = insert_descriptor(run_start=run_start_uid,
                                     data_keys=data_keys1,
                                     time=common.get_time(),
                                     uid=str(uuid.uuid4()),
                                     name='primary')
    ev_desc2_uid = insert_descriptor(run_start=run_start_uid,
                                     data_keys=data_keys2,
                                     time=common.get_time(),
                                     uid=str(uuid.uuid4()),
                                     name='baseline')

    # Create Events.
    events = []

    # Point Detector Events
    base_time = common.get_time()
    for i in range(num_exposures):
        time = float(2 * i + 0.5 * rs.randn()) + base_time
        data = {'point_det': point_det_data[i]}
        timestamps = {'point_det': time}
        event_dict = dict(descriptor=ev_desc1_uid, seq_num=i,
                          time=time, data=data, timestamps=timestamps,
                          uid=str(uuid.uuid4()))
        event_uid = insert_event(**event_dict)
        # grab the actual event from metadatastore
        event, = find_events(uid=event_uid)
        events.append(event)
        assert event['data'] == event_dict['data']

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time) + base_time
        data = {'Tsam': temp}
        timestamps = {'Tsam': time}
        event_dict = dict(descriptor=ev_desc2_uid, time=time,
                          data=data, timestamps=timestamps, seq_num=i,
                          uid=str(uuid.uuid4()))
        event_uid = insert_event(**event_dict)
        event, = find_events(uid=event_uid)
        events.append(event)
        assert event['data'] == event_dict['data']

    return events
def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)
    rs = np.random.RandomState(5)
    point_det_data = rs.randn(num_exposures)

    # Create Event Descriptors
    data_keys1 = {'point_det': dict(source='PV:ES:PointDet',
                                    dtype='number')}
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number'),
                  'Troom': dict(source='PV:ES:Troom', dtype='number')}
    ev_desc1_uid = insert_descriptor(run_start=run_start,
                                           data_keys=data_keys1, time=common.get_time(),
                                           uid=str(uuid.uuid4()))
    ev_desc2_uid = insert_descriptor(run_start=run_start,
                                           data_keys=data_keys2, time=common.get_time(),
                                           uid=str(uuid.uuid4()))
    print('event descriptor 1 uid = {0!s}'.format(ev_desc1_uid))
    print('event descriptor 2 uid = {0!s}'.format(ev_desc2_uid))
    # Create Events.
    events = []

    # Point Detector Events
    base_time = common.get_time()
    for i in range(num_exposures):
        time = float(i + 0.5 * rs.randn()) + base_time
        data = {'point_det': (point_det_data[i], time)}
        data = {'point_det': point_det_data[i]}
        timestamps = {'point_det': time}
        event_uid = insert_event(descriptor=ev_desc1_uid, seq_num=i, time=time,
                                 data=data, uid=str(uuid.uuid4()),
                                 timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time) + base_time
        data = {'Tsam': temp,
                'Troom': temp + 10}
        timestamps = {'Tsam': time,
                      'Troom': time}
        event_uid = insert_event(descriptor=ev_desc2_uid, time=time,
                                 data=data, seq_num=i, uid=str(uuid.uuid4()),
                                 timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)
    return events
Exemple #4
0
def test_data_key():
    rs1_uid = insert_run_start(time=100.0, scan_id=1, owner="nedbrainard", beamline_id="example", uid=str(uuid.uuid4()))
    rs2_uid = insert_run_start(time=200.0, scan_id=2, owner="nedbrainard", beamline_id="example", uid=str(uuid.uuid4()))
    rs1, = find_run_starts(uid=rs1_uid)
    rs2, = find_run_starts(uid=rs2_uid)
    data_keys = {"fork": {"source": "_", "dtype": "number"}, "spoon": {"source": "_", "dtype": "number"}}
    insert_descriptor(run_start=rs1_uid, data_keys=data_keys, time=100.0, uid=str(uuid.uuid4()))
    insert_descriptor(run_start=rs2_uid, data_keys=data_keys, time=200.0, uid=str(uuid.uuid4()))
    result1 = db(data_key="fork")
    result2 = db(data_key="fork", start_time=150)
    assert len(result1) == 2
    assert len(result2) == 1
    actual = result2[0]["start"]["uid"]
    assert actual == str(rs2.uid)
Exemple #5
0
def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)

    # Create Event Descriptors
    data_keys = {'Tsam': dict(source='PV:ES:Tsam', dtype='number'),
                 'point_det': dict(source='PV:ES:point_det', dtype='number')}
    ev_desc = insert_descriptor(run_start=run_start,
                                      data_keys=data_keys, time=0.,
                                      uid=str(uuid.uuid4()))

    # Create Events.
    events = []

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time)
        point_det = np.random.randn()
        data = {'Tsam': temp, 'point_det': point_det}
        timestamps = {'Tsam': time, 'point_det': time}
        event_uid = insert_event(descriptor=ev_desc, time=time, data=data,
                                 seq_num=i, timestamps=timestamps,
                                 uid=str(uuid.uuid4()))
        event, = find_events(uid=event_uid)
        events.append(event)

    return events
Exemple #6
0
def _insert_descriptor(name, doc):
    """Rearrange the dict for unpacking it into the MDS API."""
    # Move dynamic keys into 'custom' for MDS API.
    # We should change this in MDS to save the time of copying here:
    doc = copy.deepcopy(doc)
    for key in list(doc):
        if key not in known_descriptor_keys:
            try:
                doc['custom']
            except KeyError:
                doc['custom'] = {}
            doc['custom'][key] = doc.pop(key)
    return mds.insert_descriptor(**doc)
Exemple #7
0
def test_data_key():
    rs1_uid = insert_run_start(time=100., scan_id=1,
                               owner='nedbrainard', beamline_id='example',
                               uid=str(uuid.uuid4()))
    rs2_uid = insert_run_start(time=200., scan_id=2,
                               owner='nedbrainard', beamline_id='example',
                               uid=str(uuid.uuid4()))
    rs1, = find_run_starts(uid=rs1_uid)
    rs2, = find_run_starts(uid=rs2_uid)
    data_keys = {'fork': {'source': '_', 'dtype': 'number'},
                 'spoon': {'source': '_', 'dtype': 'number'}}
    insert_descriptor(run_start=rs1_uid, data_keys=data_keys,
                            time=100.,
                            uid=str(uuid.uuid4()))
    insert_descriptor(run_start=rs2_uid, data_keys=data_keys, time=200.,
                            uid=str(uuid.uuid4()))
    result1 = db(data_key='fork')
    result2 = db(data_key='fork', start_time=150)
    assert len(result1) == 2
    assert len(result2) == 1
    actual = result2[0]['start']['uid']
    assert actual == str(rs2.uid)
Exemple #8
0
def hdf_data_io():
    """
    Save data to db and run test when data is retrieved.
    """
    run_start_uid = insert_run_start(time=0., scan_id=1, beamline_id='csx',
                                     uid=str(uuid.uuid4()))

    # data keys entry
    data_keys = {'x_pos': dict(source='MCA:pos_x', dtype='number'),
                 'y_pos': dict(source='MCA:pos_y', dtype='number'),
                 'xrf_spectrum': dict(source='MCA:spectrum', dtype='array',
                                      #shape=(5,),
                                      external='FILESTORE:')}

    # save the event descriptor
    descriptor_uid = insert_descriptor(
        run_start=run_start_uid, data_keys=data_keys, time=0.,
        uid=str(uuid.uuid4()))

    # number of positions to record, basically along a horizontal line
    num = 5

    events = []
    for i in range(num):
        v_pos = 0
        h_pos = i

        spectrum_uid = get_data(v_pos, h_pos)

        # Put in actual ndarray data, as broker would do.
        data1 = {'xrf_spectrum': spectrum_uid,
                 'v_pos': v_pos,
                 'h_pos': h_pos}
        timestamps1 = {k: noisy(i) for k in data1}

        event_uid = insert_event(descriptor=descriptor_uid, seq_num=i,
                                 time=noisy(i), data=data1,
                                 uid=str(uuid.uuid4()),
                                 timestamps=timestamps1)
        event, = find_events(uid=event_uid)
        # test on retrieve data for all data sets
        events.append(event)
    return events
Exemple #9
0
def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this " "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)

    # Create Event Descriptors
    data_keys = {
        "Tsam": dict(source="PV:ES:Tsam", dtype="number"),
        "point_det": dict(source="PV:ES:point_det", dtype="number"),
    }
    conf = {
        "point_det": {
            "data_keys": {"exposure_time": {"source": "PS:ES:point_det_exp"}},
            "data": {"exposure_time": 5},
            "timestamps": {"exposure_time": 0.0},
        }
    }
    ev_desc = insert_descriptor(
        run_start=run_start, data_keys=data_keys, time=0.0, uid=str(uuid.uuid4()), configuration=conf
    )

    # Create Events.
    events = []

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time)
        point_det = np.random.randn()
        data = {"Tsam": temp, "point_det": point_det}
        timestamps = {"Tsam": time, "point_det": time}
        event_uid = insert_event(
            descriptor=ev_desc, time=time, data=data, seq_num=i, timestamps=timestamps, uid=str(uuid.uuid4())
        )
        event, = find_events(uid=event_uid)
        events.append(event)

    return events
Exemple #10
0
def hdf_data_io(rows, cols):
    """
    Save data to db and run test when data is retrieved.
    """
    run_start_uid = insert_run_start(time=0., scan_id=1, beamline_id='csx',
                                     uid=str(uuid.uuid4()))

    # data keys entry
    data_keys = {'v_pos': dict(source='MCA:pos_y', dtype='number'),
                 'h_pos': dict(source='MCA:pos_x', dtype='number'),
                 'xrf_spectrum': dict(source='MCA:spectrum', dtype='array',
                                      shape=(20, 1, 10),
                                      external='FILESTORE:')}

    # save the event descriptor
    descriptor_uid = insert_descriptor(
        run_start=run_start_uid, data_keys=data_keys, time=0.,
        uid=str(uuid.uuid4()))

    events = []
    for i, (v_pos, h_pos) in enumerate(product(range(rows), range(cols))):

        spectrum_uid = get_data(v_pos, h_pos, rows, cols)

        # Put in actual ndarray data, as broker would do.
        data1 = {'xrf_spectrum': spectrum_uid,
                 'v_pos': v_pos,
                 'h_pos': h_pos}
        timestamps1 = {k: noisy(i) for k in data1}

        event_uid = insert_event(descriptor=descriptor_uid, seq_num=i,
                                 time=noisy(i), data=data1,
                                 uid=str(uuid.uuid4()),
                                 timestamps=timestamps1)
        events.append(event_uid)

    return run_start_uid, events
Exemple #11
0
    scan_id = int(last_hdr.scan_id) + 1
except (IndexError, TypeError):
    scan_id = 1

custom = {}
# Create a BeginRunEvent that serves as entry point for a run
run_start = insert_run_start(scan_id=scan_id,
                             beamline_id='csx',
                             time=time.time(),
                             custom=custom,
                             uid=str(uuid.uuid4()))

# Create an EventDescriptor that indicates the data
# keys and serves as header for set of Event(s)
descriptor = insert_descriptor(data_keys=data_keys,
                               time=time.time(),
                               run_start=run_start,
                               uid=str(uuid.uuid4()))
func = np.cos
num = 1000
start = 0
stop = 10
sleep_time = .1
for idx, i in enumerate(np.linspace(start, stop, num)):
    data = {
        'linear_motor': i,
        'Tsam': i + 5,
        'scalar_detector': func(i) + np.random.randn() / 100
    }

    ts = {k: time.time() for k in data}
def run(run_start_uid=None, sleep=0):
    frame_generator = frame_generators.brownian(img_size, step_scale=.5,
                                                I_fluc_function=I_func_gaus,
                                                step_fluc_function=scale_fluc)
    # seed data to make deterministic
    np.random.RandomState(5)

    # set up the data keys entry
    data_keys1 = {'linear_motor': dict(source='PV:ES:sam_x', dtype='number'),
                  'img': dict(source='CCD', shape=(5, 5), dtype='array',
                              external='FILESTORE:'),
                  'total_img_sum': dict(source='CCD:sum', dtype='number'),
                  'img_x_max': dict(source='CCD:xmax', dtype='number'),
                  'img_y_max': dict(source='CCD:ymax', dtype='number'),
                  'img_sum_x': dict(source='CCD:xsum', dtype='array',
                                    shape=(5,), external='FILESTORE:'),
                  'img_sum_y': dict(source='CCD:ysum', dtype='array',
                                    shape=(5,), external='FILESTORE:')
                  }
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')}

    # save the first event descriptor
    descriptor1_uid = insert_descriptor(
        run_start=run_start_uid, data_keys=data_keys1, time=0.,
        uid=str(uuid.uuid4()))

    descriptor2_uid = insert_descriptor(
        run_start=run_start_uid, data_keys=data_keys2, time=0.,
        uid=str(uuid.uuid4()))

    events = []
    for idx1, i in enumerate(range(num1)):
        img = next(frame_generator)
        img_sum = float(img.sum())
        img_sum_x = img.sum(axis=0)
        img_sum_y = img.sum(axis=1)
        img_x_max = float(img_sum_x.argmax())
        img_y_max = float(img_sum_y.argmax())

        fsid_img = save_ndarray(img)
        fsid_x = save_ndarray(img_sum_x)
        fsid_y = save_ndarray(img_sum_y)

        # Put in actual ndarray data, as broker would do.
        data1 = {'linear_motor': i,
                 'total_img_sum': img_sum,
                 'img': fsid_img,
                 'img_sum_x': fsid_x,
                 'img_sum_y': fsid_y,
                 'img_x_max': img_x_max,
                 'img_y_max': img_y_max
                 }
        timestamps1 = {k: noisy(i) for k in data1}

        event_uid = insert_event(descriptor=descriptor1_uid, seq_num=idx1,
                                 time=noisy(i), data=data1,
                                 timestamps=timestamps1,
                                 uid=str(uuid.uuid4()))
        event, = find_events(uid=event_uid)
        events.append(event)
        for idx2, i2 in enumerate(range(num2)):
            time = noisy(i/num2)
            data2 = {'Tsam': idx1 + np.random.randn()}
            timestamps2 = {'Tsam': time}
            event_uid = insert_event(descriptor=descriptor2_uid,
                                     seq_num=idx2+idx1, time=time, data=data2,
                                     uid=str(uuid.uuid4()),
                                     timestamps=timestamps2)
            event, = find_events(uid=event_uid)
            events.append(event)
        ttime.sleep(sleep)

    return events
             }

try:
    last_hdr = next(find_last())
    scan_id = int(last_hdr.scan_id)+1
except (IndexError, TypeError):
    scan_id = 1

# Create a BeginRunEvent that serves as entry point for a run
run_start = insert_run_start(scan_id=scan_id, beamline_id='csx',
                             time=time.time(),
                             uid=str(uuid.uuid4()), function='cos')

# Create an EventDescriptor that indicates the data
# keys and serves as header for set of Event(s)
descriptor = insert_descriptor(data_keys=data_keys, time=time.time(),
                               run_start=run_start, uid=str(uuid.uuid4()))
func = np.cos
num = 1000
start = 0
stop = 10
sleep_time = .1
for idx, i in enumerate(np.linspace(start, stop, num)):
    data = {'linear_motor': i,
            'Tsam': i + 5,
            'scalar_detector': func(i) + np.random.randn() / 100}

    ts = {k: time.time() for k in data}

    e = insert_event(descriptor=descriptor, seq_num=idx,
                     time=time.time(),
                     timestamps=ts,