예제 #1
0
def test_find_events_smoke():

    num = 50
    rs, e_desc, data_keys = setup_syn()
    all_data = syn_data(data_keys, num)

    mdsc.bulk_insert_events(e_desc, all_data, validate=False)
    mdsc.insert_run_stop(rs, ttime.time(), uid=str(uuid.uuid4()))
    mdsc.clear_process_cache()

    # make sure the uid works
    next(mdsc.find_events(descriptor=e_desc))

    mdsc.clear_process_cache()
    descriptor, = mdsc.find_descriptors(uid=e_desc)

    mdsc.clear_process_cache()
    # make sure that searching by descriptor document works
    next(mdsc.find_events(descriptor=descriptor))
예제 #2
0
def test_find_events_smoke():

    num = 50
    rs, e_desc, data_keys = setup_syn()
    all_data = syn_data(data_keys, num)

    mdsc.bulk_insert_events(e_desc, all_data, validate=False)
    mdsc.insert_run_stop(rs, ttime.time(), uid=str(uuid.uuid4()))
    mdsc.clear_process_cache()
    
    # make sure the uid works
    next(mdsc.find_events(descriptor=e_desc))
    
    mdsc.clear_process_cache()
    descriptor, = mdsc.find_descriptors(uid=e_desc)
    
    mdsc.clear_process_cache()
    # make sure that searching by descriptor document works
    next(mdsc.find_events(descriptor=descriptor))
예제 #3
0
파일: suitcase.py 프로젝트: giltis/suitcase
def export(headers, filename):
    """
    Parameters
    ----------
    headers : a Header or a list of Headers
        objects retruned by the Data Broker
    filename : string
        path to a new or existing HDF5 file
    """
    with h5py.File(filename) as f:
        for header in headers:
            assert isinstance(header, MutableMapping)
            header = copy.deepcopy(header)
            try:
                descriptors = header.pop('event_descriptors')
            except KeyError:
                warnings.warn("Header with uid {header.uid} contains no "
                              "data.".format(header), UserWarning)
                continue
            top_group_name = repr(header).replace(' ', '_')[1:-1]
            group = f.create_group(top_group_name)
            _safe_attrs_assignment(group, header)
            for i, descriptor in enumerate(descriptors):
                desc_group = group.create_group('Event Stream {0}'.format(i))
                data_keys = descriptor.pop('data_keys')
                _safe_attrs_assignment(desc_group, descriptor)
                events = list(find_events(descriptor=descriptor))
                event_times = [e['time'] for e in events]
                desc_group.create_dataset('event_times', event_times)
                data_group = desc_group.create_group('data')
                ts_group = desc_group.create_group('timestamps')
                for key, value in data_keys.items():
                    timestamps = [e['timestamps'][key] for e in events]
                    ts_group.create_dataset(key, data=timestamps)
                    for event in events:
                        fill_event(event)  # fill any external data, in place
                    data = [e['data'][key] for e in events]
                    dataset = data_group.create_dataset(key, data=data)
                    # Put contents of this data key (source, etc.)
                    # into an attribute on the associated data set.
                    value['data_broker_shape'] = value.pop('shape')
                    value['data_broker_dtype'] = value.pop('dtype')
                    print(value)
                    _safe_attrs_assignment(dataset, value)
예제 #4
0
def export(headers, filename):
    """
    Parameters
    ----------
    headers : a Header or a list of Headers
        objects retruned by the Data Broker
    filename : string
        path to a new or existing HDF5 file
    """
    with h5py.File(filename) as f:
        for header in headers:
            assert isinstance(header, MutableMapping)
            header = copy.deepcopy(header)
            try:
                descriptors = header.pop('event_descriptors')
            except KeyError:
                warnings.warn(
                    "Header with uid {header.uid} contains no "
                    "data.".format(header), UserWarning)
                continue
            top_group_name = repr(header).replace(' ', '_')[1:-1]
            group = f.create_group(top_group_name)
            _safe_attrs_assignment(group, header)
            for i, descriptor in enumerate(descriptors):
                desc_group = group.create_group('Event_Stream_{0}'.format(i))
                data_keys = descriptor.pop('data_keys')
                _safe_attrs_assignment(desc_group, descriptor)
                events = list(find_events(descriptor=descriptor))
                event_times = [e['time'] for e in events]
                desc_group.create_dataset('event_times', event_times)
                data_group = desc_group.create_group('data')
                ts_group = desc_group.create_group('timestamps')
                [fill_event(e) for e in events]
                for key, value in data_keys.items():
                    print('data key = %s' % key)
                    timestamps = [e['timestamps'][key] for e in events]
                    ts_group.create_dataset(key, data=timestamps)
                    data = [e['data'][key] for e in events]
                    dataset = data_group.create_dataset(key, data=data)
예제 #5
0
def export(headers, filename):
    """
    Parameters
    ----------
    headers : a Header or a list of Headers
        objects retruned by the Data Broker
    filename : string
        path to a new or existing HDF5 file
    """
    with h5py.File(filename) as f:
        for header in headers:
            assert isinstance(header, MutableMapping)
            header = copy.deepcopy(header)
            try:
                descriptors = header.pop('event_descriptors')
            except KeyError:
                warnings.warn("Header with uid {header.uid} contains no "
                              "data.".format(header), UserWarning)
                continue
            top_group_name = repr(header).replace(' ', '_')[1:-1]
            group = f.create_group(top_group_name)
            _safe_attrs_assignment(group, header)
            for i, descriptor in enumerate(descriptors):
                desc_group = group.create_group('Event_Stream_{0}'.format(i))
                data_keys = descriptor.pop('data_keys')
                _safe_attrs_assignment(desc_group, descriptor)
                events = list(find_events(descriptor=descriptor))
                event_times = [e['time'] for e in events]
                desc_group.create_dataset('event_times', event_times)
                data_group = desc_group.create_group('data')
                ts_group = desc_group.create_group('timestamps')
                [fill_event(e) for e in events]
                for key, value in data_keys.items():
                    print('data key = %s' % key)
                    timestamps = [e['timestamps'][key] for e in events]
                    ts_group.create_dataset(key, data=timestamps)
                    data = [e['data'][key] for e in events]
                    dataset = data_group.create_dataset(key, data=data)
예제 #6
0
def test_find_events_ValueError():
    with pytest.raises(ValueError):
        list(mdsc.find_events(event_descriptor='cat'))
예제 #7
0
def test_find_events_ValueError():
    list(mdsc.find_events(event_descriptor='cat'))