Beispiel #1
0
def ui_imagearray(header, index=0):
    "Return image array from the header object and event index."
    e = blank_events(header)[index]
    fill_event(e)
    nm = [k for k in e.data if k.endswith('image_lightfield')][0]
    rv = e.data[nm][0]
    return rv
Beispiel #2
0
def ui_imagearray(header, index=0):
    "Return image array from the header object and event index."
    e = blank_events(header)[index]
    fill_event(e)
    nm = [k for k in e.data if k.endswith('image_lightfield')][0]
    rv = e.data[nm][0]
    return rv
Beispiel #3
0
def export(headers, filename):
    """
    Parameters
    ----------
    headers : a Header or a list of Headers
        objects retruned by the Data Broker
    filename : string
        path to a new or existing HDF5 file
    """
    with h5py.File(filename) as f:
        for header in headers:
            assert isinstance(header, MutableMapping)
            header = copy.deepcopy(header)
            try:
                descriptors = header.pop('event_descriptors')
            except KeyError:
                warnings.warn(
                    "Header with uid {header.uid} contains no "
                    "data.".format(header), UserWarning)
                continue
            top_group_name = repr(header).replace(' ', '_')[1:-1]
            group = f.create_group(top_group_name)
            _safe_attrs_assignment(group, header)
            for i, descriptor in enumerate(descriptors):
                desc_group = group.create_group('Event_Stream_{0}'.format(i))
                data_keys = descriptor.pop('data_keys')
                _safe_attrs_assignment(desc_group, descriptor)
                events = list(find_events(descriptor=descriptor))
                event_times = [e['time'] for e in events]
                desc_group.create_dataset('event_times', event_times)
                data_group = desc_group.create_group('data')
                ts_group = desc_group.create_group('timestamps')
                [fill_event(e) for e in events]
                for key, value in data_keys.items():
                    print('data key = %s' % key)
                    timestamps = [e['timestamps'][key] for e in events]
                    ts_group.create_dataset(key, data=timestamps)
                    data = [e['data'][key] for e in events]
                    dataset = data_group.create_dataset(key, data=data)
Beispiel #4
0
def export(headers, filename):
    """
    Parameters
    ----------
    headers : a Header or a list of Headers
        objects retruned by the Data Broker
    filename : string
        path to a new or existing HDF5 file
    """
    with h5py.File(filename) as f:
        for header in headers:
            assert isinstance(header, MutableMapping)
            header = copy.deepcopy(header)
            try:
                descriptors = header.pop('event_descriptors')
            except KeyError:
                warnings.warn("Header with uid {header.uid} contains no "
                              "data.".format(header), UserWarning)
                continue
            top_group_name = repr(header).replace(' ', '_')[1:-1]
            group = f.create_group(top_group_name)
            _safe_attrs_assignment(group, header)
            for i, descriptor in enumerate(descriptors):
                desc_group = group.create_group('Event_Stream_{0}'.format(i))
                data_keys = descriptor.pop('data_keys')
                _safe_attrs_assignment(desc_group, descriptor)
                events = list(find_events(descriptor=descriptor))
                event_times = [e['time'] for e in events]
                desc_group.create_dataset('event_times', event_times)
                data_group = desc_group.create_group('data')
                ts_group = desc_group.create_group('timestamps')
                [fill_event(e) for e in events]
                for key, value in data_keys.items():
                    print('data key = %s' % key)
                    timestamps = [e['timestamps'][key] for e in events]
                    ts_group.create_dataset(key, data=timestamps)
                    data = [e['data'][key] for e in events]
                    dataset = data_group.create_dataset(key, data=data)
Beispiel #5
0
def run(run_start=None, sleep=0):
    # Make the data
    rs = np.random.RandomState(5)

    # set up the data keys entry
    data_keys1 = {'linear_motor': dict(source='PV:ES:sam_x', dtype='number'),
                  'img': dict(source='CCD', shape=(5, 5), dtype='array',
                              external='FILESTORE:'),
                  'total_img_sum': dict(source='CCD:sum', dtype='number'),
                  'img_x_max': dict(source='CCD:xmax', dtype='number'),
                  'img_y_max': dict(source='CCD:ymax', dtype='number'),
                  'img_sum_x': dict(source='CCD:xsum', dtype='array',
                                    shape=(5,), external='FILESTORE:'),
                  'img_sum_y': dict(source='CCD:ysum', dtype='array',
                                    shape=(5,), external='FILESTORE:')
                  }
    data_keys2 = {'Tsam': dict(source='PV:ES:Tsam', dtype='number')}

    # save the first event descriptor
    e_desc1 = insert_event_descriptor(
        run_start=run_start, data_keys=data_keys1, time=0.,
        uid=str(uuid.uuid4()))

    e_desc2 = insert_event_descriptor(
        run_start=run_start, data_keys=data_keys2, time=0.,
        uid=str(uuid.uuid4()))

    # number of motor positions to fake
    num1 = 20
    # number of temperatures to record per motor position
    num2 = 10

    events = []
    for idx1, i in enumerate(range(num1)):
        img = next(frame_generator)
        img_sum = float(img.sum())
        img_sum_x = img.sum(axis=0)
        img_sum_y = img.sum(axis=1)
        img_x_max = float(img_sum_x.argmax())
        img_y_max = float(img_sum_y.argmax())

        fsid_img = save_ndarray(img)
        fsid_x = save_ndarray(img_sum_x)
        fsid_y = save_ndarray(img_sum_y)

        # Put in actual ndarray data, as broker would do.
        data1 = {'linear_motor': (i, noisy(i)),
                 'total_img_sum': (img_sum, noisy(i)),
                 'img': (fsid_img, noisy(i)),
                 'img_sum_x': (fsid_x, noisy(i)),
                 'img_sum_y': (fsid_y, noisy(i)),
                 'img_x_max': (img_x_max, noisy(i)),
                 'img_y_max': (img_y_max, noisy(i))
                 }

        event = insert_event(event_descriptor=e_desc1, seq_num=idx1,
                             time=noisy(i), data=data1, uid=str(uuid.uuid4()))
        fill_event(event)
        events.append(event)
        for idx2, i2 in enumerate(range(num2)):
            time = noisy(i/num2)
            data2 = {'Tsam': (idx1 + np.random.randn()/100, time)}
            event = insert_event(event_descriptor=e_desc2, seq_num=idx2+idx1,
                                 time=time, data=data2, uid=str(uuid.uuid4()))
            events.append(event)
        ttime.sleep(sleep)

    return events
Beispiel #6
0
def events_generator(header):
    "Return a generator of Events. Large (nonscalar) data is lazy-loaded."
    for e in blank_events(header):
        fill_event(e)
        yield e
Beispiel #7
0
def events_generator(header):
    "Return a generator of Events. Large (nonscalar) data is lazy-loaded."
    for e in blank_events(header):
        fill_event(e)
        yield e
Beispiel #8
0
 def fill_all_events(*args, **kwargs):
     events = func(*args, **kwargs)
     for event in events:
         fill_event(event)
     return events