def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)
    rs = np.random.RandomState(5)
    point_det_data = rs.randn(num_exposures)

    # Create Event Descriptors
    data_keys1 = {'point_det': dict(source='PV:ES:PointDet',
                                    dtype='number')}
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number'),
                  'Troom': dict(source='PV:ES:Troom', dtype='number')}
    ev_desc1_uid = insert_event_descriptor(run_start=run_start,
                                           data_keys=data_keys1, time=common.get_time(),
                                           uid=str(uuid.uuid4()))
    ev_desc2_uid = insert_event_descriptor(run_start=run_start,
                                           data_keys=data_keys2, time=common.get_time(),
                                           uid=str(uuid.uuid4()))
    print('event descriptor 1 uid = %s' % ev_desc1_uid)
    print('event descriptor 2 uid = %s' % ev_desc2_uid)
    # Create Events.
    events = []

    # Point Detector Events
    base_time = common.get_time()
    for i in range(num_exposures):
        time = float(i + 0.5 * rs.randn()) + base_time
        data = {'point_det': (point_det_data[i], time)}
        data = {'point_det': point_det_data[i]}
        timestamps = {'point_det': time}
        event_uid = insert_event(descriptor=ev_desc1_uid, seq_num=i, time=time,
                                 data=data, uid=str(uuid.uuid4()),
                                 timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time) + base_time
        data = {'Tsam': temp,
                'Troom': temp + 10}
        timestamps = {'Tsam': time,
                      'Troom': time}
        event_uid = insert_event(descriptor=ev_desc2_uid, time=time,
                                 data=data, seq_num=i, uid=str(uuid.uuid4()),
                                 timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)
    return events
Esempio n. 2
0
def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)
    rs = np.random.RandomState(5)
    point_det_data = rs.randn(num_exposures)

    # Create Event Descriptors
    data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')}
    data_keys2 = {
        'Tsam': dict(source='PV:ES:Tsam', dtype='number'),
        'Troom': dict(source='PV:ES:Troom', dtype='number')
    }
    ev_desc1 = insert_event_descriptor(run_start=run_start,
                                       data_keys=data_keys1,
                                       time=0.,
                                       uid=str(uuid.uuid4()))
    ev_desc2 = insert_event_descriptor(run_start=run_start,
                                       data_keys=data_keys2,
                                       time=0.,
                                       uid=str(uuid.uuid4()))

    # Create Events.
    events = []

    # Point Detector Events
    for i in range(num_exposures):
        time = float(i + 0.01 * rs.randn())
        data = {'point_det': (point_det_data[i], time)}
        event = insert_event(event_descriptor=ev_desc1,
                             seq_num=i,
                             time=time,
                             data=data,
                             uid=str(uuid.uuid4()))
        events.append(event)

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time)
        data = {'Tsam': (temp, time), 'Troom': (temp + 10, time)}
        event = insert_event(event_descriptor=ev_desc2,
                             time=time,
                             data=data,
                             seq_num=i,
                             uid=str(uuid.uuid4()))
        events.append(event)

    return events
Esempio n. 3
0
def run(run_start_uid=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)
    rs = np.random.RandomState(5)
    point_det_data = rs.randn(num_exposures) + np.arange(num_exposures)

    # Create Event Descriptors
    data_keys1 = {'point_det': dict(source='PV:ES:PointDet', dtype='number')}
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')}
    ev_desc1_uid = insert_event_descriptor(run_start=run_start_uid,
                                           data_keys=data_keys1, time=common.get_time())
    ev_desc2_uid = insert_event_descriptor(run_start=run_start_uid,
                                           data_keys=data_keys2, time=common.get_time())

    # Create Events.
    events = []

    # Point Detector Events
    base_time = common.get_time()
    for i in range(num_exposures):
        time = float(2 * i + 0.5 * rs.randn()) + base_time
        data = {'point_det': point_det_data[i]}
        timestamps = {'point_det': time}
        event_dict = dict(descriptor=ev_desc1_uid, seq_num=i,
                          time=time, data=data, timestamps=timestamps)
        event_uid = insert_event(**event_dict)
        # grab the actual event from metadatastore
        event, = find_events(uid=event_uid)
        events.append(event)

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time) + base_time 
        data = {'Tsam': temp}
        timestamps = {'Tsam': time}
        event_dict = dict(descriptor=ev_desc2_uid, time=time,
                          data=data, timestamps=timestamps, seq_num=i)
        event_uid = insert_event(**event_dict)
        event, = find_events(uid=event_uid)
        events.append(event)

    #todo insert run stop if run_start_uid is not None

    return events
Esempio n. 4
0
def run(run_start=None, sleep=0):
    if sleep != 0:
        raise NotImplementedError("A sleep time is not implemented for this "
                                  "example.")
    # Make the data
    ramp = common.stepped_ramp(start, stop, step, points_per_step)
    deadbanded_ramp = common.apply_deadband(ramp, deadband_size)

    # Create Event Descriptors
    data_keys = {'Tsam': dict(source='PV:ES:Tsam', dtype='number'),
                 'point_det': dict(source='PV:ES:point_det', dtype='number')}
    ev_desc = insert_event_descriptor(run_start=run_start,
                                      data_keys=data_keys, time=0.)

    # Create Events.
    events = []

    # Temperature Events
    for i, (time, temp) in enumerate(zip(*deadbanded_ramp)):
        time = float(time)
        point_det = np.random.randn()
        data = {'Tsam': temp, 'point_det': point_det}
        timestamps = {'Tsam': time, 'point_det': time}
        event_uid = insert_event(descriptor=ev_desc, time=time, data=data,
                                 seq_num=i, timestamps=timestamps)
        event, = find_events(uid=event_uid)
        events.append(event)

    return events
Esempio n. 5
0
def test_data_key():
    rs1_uid = insert_run_start(time=100., scan_id=1,
                               owner='nedbrainard', beamline_id='example',
                               beamline_config=insert_beamline_config(
                                   {}, time=0.))
    rs2_uid = insert_run_start(time=200., scan_id=2,
                               owner='nedbrainard', beamline_id='example',
                               beamline_config=insert_beamline_config(
                                   {}, time=0.))
    rs1, = find_run_starts(uid=rs1_uid)
    rs2, = find_run_starts(uid=rs2_uid)
    data_keys = {'fork': {'source': '_', 'dtype': 'number'},
                 'spoon': {'source': '_', 'dtype': 'number'}}
    evd1_uid = insert_event_descriptor(run_start=rs1_uid, data_keys=data_keys,
                                       time=100.)
    insert_event_descriptor(run_start=rs2_uid, data_keys=data_keys, time=200.)
    result1 = db.find_headers(data_key='fork')
    result2 = db.find_headers(data_key='fork', start_time=150)
    assert_equal(len(result1), 2)
    assert_equal(len(result2), 1)
    actual = result2[0].run_start_uid
    assert_equal(actual, str(rs2.uid))
Esempio n. 6
0
def hdf_data_io():
    """
    Save data to db and run test when data is retrieved.
    """
    blc = insert_beamline_config({'cfg1': 1}, 0.0)
    run_start_uid = insert_run_start(time=0., scan_id=1, beamline_id='csx',
                                     uid=str(uuid.uuid4()),
                                     beamline_config=blc)

    # data keys entry
    data_keys = {'x_pos': dict(source='MCA:pos_x', dtype='number'),
                 'y_pos': dict(source='MCA:pos_y', dtype='number'),
                 'xrf_spectrum': dict(source='MCA:spectrum', dtype='array',
                                      #shape=(5,),
                                      external='FILESTORE:')}

    # save the event descriptor
    descriptor_uid = insert_event_descriptor(
        run_start=run_start_uid, data_keys=data_keys, time=0.,
        uid=str(uuid.uuid4()))

    # number of positions to record, basically along a horizontal line
    num = 5

    events = []
    for i in range(num):
        v_pos = 0
        h_pos = i

        spectrum_uid = get_data(v_pos, h_pos)

        # Put in actual ndarray data, as broker would do.
        data1 = {'xrf_spectrum': spectrum_uid,
                 'v_pos': v_pos,
                 'h_pos': h_pos}
        timestamps1 = {k: noisy(i) for k in data1}

        event_uid = insert_event(descriptor=descriptor_uid, seq_num=i,
                                 time=noisy(i), data=data1,
                                 uid=str(uuid.uuid4()),
                                 timestamps=timestamps1)
        event, = find_events(uid=event_uid)
        # test on retrieve data for all data sets
        events.append(event)
    return events
Esempio n. 7
0
 def setup(self, n):
     self.obj = range(n)
     self.bcfg = mdsc.insert_beamline_config(time=1315315135.5135,
                                             config_params={'param1': 1})
     self.data_keys = {'linear_motor': {'source': 'PV:pv1',
                                        'shape': None,
                                        'dtype': 'number'},
                       'scalar_detector': {'source': 'PV:pv2',
                                           'shape': None,
                                           'dtype': 'number'},
                       'Tsam': {'source': 'PV:pv3',
                                'dtype': 'number',
                                'shape': None}}
     self.custom = {'custom_key': 'value'}
     self.scan_id = 1903
     self.run_start = mdsc.insert_run_start(scan_id=int(self.scan_id),
                                            owner='benchmark_script',
                                            beamline_id='benchmark_b',
                                            time=1315315135.5135,
                                            beamline_config=self.bcfg,
                                            custom=self.custom)
     self.e_desc = mdsc.insert_event_descriptor(data_keys=self.data_keys,
                                                time=1315315135.5135,
                                                run_start=self.run_start)
     # Compose event data list for 1mil events in setup.
     # See params in event insert test to see how many of these are used
     func = np.cos
     num = EVENT_COUNT
     start = 0
     stop = 10
     sleep_time = .1
     self.data = list()
     for idx, i in enumerate(np.linspace(start, stop, num)):
         self.data.append({'linear_motor': [i, 1315315135.5135],
                           'Tsam': [i + 5, 1315315135.5135],
                           'scalar_detector': [func(i) +
                                               np.random.randn() / 100,
                                               1315315135.5135]})
Esempio n. 8
0
def run(run_start=None, sleep=0):
    # Make the data
    rs = np.random.RandomState(5)

    # set up the data keys entry
    data_keys1 = {'linear_motor': dict(source='PV:ES:sam_x', dtype='number'),
                  'img': dict(source='CCD', shape=(5, 5), dtype='array',
                              external='FILESTORE:'),
                  'total_img_sum': dict(source='CCD:sum', dtype='number'),
                  'img_x_max': dict(source='CCD:xmax', dtype='number'),
                  'img_y_max': dict(source='CCD:ymax', dtype='number'),
                  'img_sum_x': dict(source='CCD:xsum', dtype='array',
                                    shape=(5,), external='FILESTORE:'),
                  'img_sum_y': dict(source='CCD:ysum', dtype='array',
                                    shape=(5,), external='FILESTORE:')
                  }
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')}

    # save the first event descriptor
    e_desc1 = insert_event_descriptor(
        run_start=run_start, data_keys=data_keys1, time=0.,
        uid=str(uuid.uuid4()))

    e_desc2 = insert_event_descriptor(
        run_start=run_start, data_keys=data_keys2, time=0.,
        uid=str(uuid.uuid4()))

    # number of motor positions to fake
    num1 = 20
    # number of temperatures to record per motor position
    num2 = 10

    events = []
    for idx1, i in enumerate(range(num1)):
        img = next(frame_generator)
        img_sum = float(img.sum())
        img_sum_x = img.sum(axis=0)
        img_sum_y = img.sum(axis=1)
        img_x_max = float(img_sum_x.argmax())
        img_y_max = float(img_sum_y.argmax())

        fsid_img = save_ndarray(img)
        fsid_x = save_ndarray(img_sum_x)
        fsid_y = save_ndarray(img_sum_y)

        # Put in actual ndarray data, as broker would do.
        data1 = {'linear_motor': (i, noisy(i)),
                 'total_img_sum': (img_sum, noisy(i)),
                 'img': (fsid_img, noisy(i)),
                 'img_sum_x': (fsid_x, noisy(i)),
                 'img_sum_y': (fsid_y, noisy(i)),
                 'img_x_max': (img_x_max, noisy(i)),
                 'img_y_max': (img_y_max, noisy(i))
                 }

        event = insert_event(event_descriptor=e_desc1, seq_num=idx1,
                             time=noisy(i), data=data1, uid=str(uuid.uuid4()))
        fill_event(event)
        events.append(event)
        for idx2, i2 in enumerate(range(num2)):
            time = noisy(i/num2)
            data2 = {'Tsam': (idx1 + np.random.randn()/100, time)}
            event = insert_event(event_descriptor=e_desc2, seq_num=idx2+idx1,
                                 time=time, data=data2, uid=str(uuid.uuid4()))
            events.append(event)
        ttime.sleep(sleep)

    return events
begin_runs = db.begin_run_event.find()
for br in begin_runs:
    the_run_start = insert_run_start(
        time=br["time"],
        beamline_id=br["beamline_id"],
        beamline_config=the_bc,
        owner=br["owner"],
        scan_id=br["scan_id"],
        custom=br.get("custom", {}),
        uid=br["uid"],
    )
    event_descs = db.event_descriptor.find({"begin_run_id": br["_id"]})
    max_time = 0.0
    for e_desc in event_descs:
        the_e_desc = insert_event_descriptor(
            run_start=the_run_start, data_keys=e_desc["data_keys"], time=e_desc["time"], uid=e_desc["uid"]
        )
        events = db.event.find({"descriptor_id": e_desc["_id"]})
        for ev in events:
            if ev["time"] > max_time:
                max_time = ev["time"]
            insert_event(
                event_descriptor=the_e_desc, time=ev["time"], data=ev["data"], seq_num=ev["seq_num"], uid=ev["uid"]
            )
    insert_run_stop(run_start=the_run_start, time=float(max_time), exit_status="success", reason=None, uid=None)

run_start_mapping = dict()
run_starts = db.run_start.find()
for rs in run_starts:
    time = rs.pop("time")
    beamline_id = rs.pop("beamline_id")
Esempio n. 10
0
    def _start_scan(self, run_start=None, detectors=None,
                    data=None, positioners=None, **kwargs):

        dets = detectors
        triggers = [det for det in dets if isinstance(det, Detector)]

        # creation of the event descriptor should be delayed until the first
        # event comes in. Set it to None for now
        event_descriptor = None

        # provide header for formatted list of positioners and detectors in
        # INFO channel
        names = list()
        for pos in positioners + dets:
            names.extend(pos.describe().keys())

        self.logger.info(self._demunge_names(names))
        seq_num = 0
        while self._scan_state is True:
            self.logger.debug(
                'self._scan_state is True in self._start_scan')
            posvals = self._move_positioners(positioners=positioners, **kwargs)
            self.logger.debug('moved positioners')
            # if we're done iterating over positions, get outta Dodge
            if posvals is None:
                break

            # Trigger detector acquisision
            acq_status = [trig.acquire() for trig in triggers]

            while any([not stat.done for stat in acq_status]):
                time.sleep(0.05)

            time.sleep(0.05)
            # Read detector values
            tmp_detvals = {}
            for det in dets + positioners:
                tmp_detvals.update(det.read())

            detvals = mds.format_events(tmp_detvals)

            # pass data onto Demuxer for distribution
            self.logger.info(self._demunge_values(detvals, names))
            # grab the current time as a timestamp that describes when the
            # event data was bundled together
            bundle_time = time.time()
            # actually insert the event into metadataStore
            try:
                self.logger.debug(
                    'inserting event %d------------------', seq_num)
                event = mds.insert_event(event_descriptor=event_descriptor,
                                         time=bundle_time, data=detvals,
                                         seq_num=seq_num)
            except mds.EventDescriptorIsNoneError:
                # the time when the event descriptor was created
                self.logger.debug(
                    'event_descriptor has not been created. '
                    'creating it now...')
                evdesc_creation_time = time.time()
                data_key_info = _get_info(
                    positioners=positioners,
                    detectors=dets, data=detvals)

                event_descriptor = mds.insert_event_descriptor(
                    run_start=run_start, time=evdesc_creation_time,
                    data_keys=mds.format_data_keys(data_key_info))
                self.logger.debug(
                    'event_descriptor: %s', vars(event_descriptor))
                # insert the event again. this time it better damn well work
                self.logger.debug(
                    'inserting event %d------------------', seq_num)
                event = mds.insert_event(event_descriptor=event_descriptor,
                                         time=bundle_time, data=detvals,
                                         seq_num=seq_num)
            self.logger.debug('event %d--------', seq_num)
            self.logger.debug('%s', vars(event))

            seq_num += 1
            # update the 'data' object from detvals dict
            for k, v in detvals.items():
                data[k].append(v)

            if not positioners:
                break

        self._scan_state = False
        return
    beamline_cfg_mapping[bc['_id']] = the_bc

begin_runs = db.begin_run_event.find()
for br in begin_runs:
    the_run_start = insert_run_start(time=br['time'],
                                     beamline_id=br['beamline_id'],
                                     beamline_config=the_bc,
                                     owner=br['owner'],
                                     scan_id=br['scan_id'],
                                     custom=br.get('custom', {}),
                                     uid=br['uid'])
    event_descs = db.event_descriptor.find({'begin_run_id': br['_id']})
    max_time = 0.0
    for e_desc in event_descs:
        the_e_desc = insert_event_descriptor(run_start=the_run_start,
                                             data_keys=e_desc['data_keys'],
                                             time=e_desc['time'],
                                             uid=e_desc['uid'])
        events = db.event.find({'descriptor_id': e_desc['_id']})
        for ev in events:
            if ev['time'] > max_time:
                max_time = ev['time']
            insert_event(event_descriptor=the_e_desc,
                         time=ev['time'],
                         data=ev['data'],
                         seq_num=ev['seq_num'],
                         uid=ev['uid'])
    insert_run_stop(run_start=the_run_start,
                    time=float(max_time),
                    exit_status='success',
                    reason=None,
                    uid=None)
Esempio n. 12
0
 def time_single_descriptor(self, n):
     for _ in self.obj:
         mdsc.insert_event_descriptor(data_keys=self.data_keys,
                                      time=1315315135.5135,
                                      run_start=self.run_start)
Esempio n. 13
0
    "Tsam": {"source": "PV:pv3", "dtype": "number", "shape": None},
}

try:
    last_hdr = next(find_last())
    scan_id = int(last_hdr.scan_id) + 1
except (IndexError, TypeError):
    scan_id = 1

custom = {"plotx": "linear_motor", "ploty": "scalar_detector"}
# Create a BeginRunEvent that serves as entry point for a run
rs = insert_run_start(scan_id=scan_id, beamline_id="csx", time=time.time(), beamline_config=b_config, custom=custom)

# Create an EventDescriptor that indicates the data
# keys and serves as header for set of Event(s)
e_desc = insert_event_descriptor(data_keys=data_keys, time=time.time(), run_start=rs)
func = np.cos
num = 1000
start = 0
stop = 10
sleep_time = 0.1
for idx, i in enumerate(np.linspace(start, stop, num)):
    data = {
        "linear_motor": [i, time.time()],
        "Tsam": [i + 5, time.time()],
        "scalar_detector": [func(i) + np.random.randn() / 100, time.time()],
    }
    e = insert_event(event_descriptor=e_desc, seq_num=idx, time=time.time(), data=data)
last_run = next(find_last())
try:
    if str(last_run.id) != str(rs.id):
Esempio n. 14
0
def run(run_start_uid=None, sleep=0):
    frame_generator = frame_generators.brownian(img_size, step_scale=.5,
                                                I_fluc_function=I_func_gaus,
                                                step_fluc_function=scale_fluc)
    # Make the data
    rs = np.random.RandomState(5)

    # set up the data keys entry
    data_keys1 = {'linear_motor': dict(source='PV:ES:sam_x', dtype='number'),
                  'img': dict(source='CCD', shape=(5, 5), dtype='array',
                              external='FILESTORE:'),
                  'total_img_sum': dict(source='CCD:sum', dtype='number'),
                  'img_x_max': dict(source='CCD:xmax', dtype='number'),
                  'img_y_max': dict(source='CCD:ymax', dtype='number'),
                  'img_sum_x': dict(source='CCD:xsum', dtype='array',
                                    shape=(5,), external='FILESTORE:'),
                  'img_sum_y': dict(source='CCD:ysum', dtype='array',
                                    shape=(5,), external='FILESTORE:')
                  }
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')}

    # save the first event descriptor
    descriptor1_uid = insert_event_descriptor(
        run_start=run_start_uid, data_keys=data_keys1, time=0.,
        uid=str(uuid.uuid4()))

    descriptor2_uid = insert_event_descriptor(
        run_start=run_start_uid, data_keys=data_keys2, time=0.,
        uid=str(uuid.uuid4()))

    events = []
    for idx1, i in enumerate(range(num1)):
        img = next(frame_generator)
        img_sum = float(img.sum())
        img_sum_x = img.sum(axis=0)
        img_sum_y = img.sum(axis=1)
        img_x_max = float(img_sum_x.argmax())
        img_y_max = float(img_sum_y.argmax())

        fsid_img = save_ndarray(img)
        fsid_x = save_ndarray(img_sum_x)
        fsid_y = save_ndarray(img_sum_y)

        # Put in actual ndarray data, as broker would do.
        data1 = {'linear_motor': i,
                 'total_img_sum': img_sum,
                 'img': fsid_img,
                 'img_sum_x': fsid_x,
                 'img_sum_y': fsid_y,
                 'img_x_max': img_x_max,
                 'img_y_max': img_y_max
                 }
        timestamps1 = {k: noisy(i) for k in data1}

        event_uid = insert_event(descriptor=descriptor1_uid, seq_num=idx1,
                                 time=noisy(i), data=data1,
                                 timestamps=timestamps1,
                                 uid=str(uuid.uuid4()))
        event, = find_events(uid=event_uid)
        events.append(event)
        for idx2, i2 in enumerate(range(num2)):
            time = noisy(i/num2)
            data2 = {'Tsam': idx1 + np.random.randn()}
            timestamps2 = {'Tsam': time}
            event_uid = insert_event(descriptor=descriptor2_uid,
                                     seq_num=idx2+idx1, time=time, data=data2,
                                     uid=str(uuid.uuid4()),
                                     timestamps=timestamps2)
            event, = find_events(uid=event_uid)
            events.append(event)
        ttime.sleep(sleep)

    return events