Beispiel #1
0
 def _handle_epochs_group(self, block):
     # Note that an NWB Epoch corresponds to a Neo Segment, not to a Neo Epoch.
     epochs = self._file.get('epochs')
     # todo: handle epochs.attrs.get('tags')
     for name, epoch in epochs.items():
         # todo: handle epoch.attrs.get('links')
         timeseries = []
         for key, value in epoch.items():
             if key == 'start_time':
                 t_start = value * pq.second
             elif key == 'stop_time':
                 t_stop = value * pq.second
             else:
                 # todo: handle value['count']
                 # todo: handle value['idx_start']
                 timeseries.append(self._handle_timeseries(key, value.get('timeseries')))
         segment = Segment(name=name)
         for obj in timeseries:
             obj.segment = segment
             if isinstance(obj, AnalogSignal):
                 segment.analogsignals.append(obj)
             elif isinstance(obj, IrregularlySampledSignal):
                 segment.irregularlysampledsignals.append(obj)
             elif isinstance(obj, Event):
                 segment.events.append(obj)
             elif isinstance(obj, Epoch):
                 segment.epochs.append(obj)
         segment.block = block
         block.segments.append(segment)
Beispiel #2
0
    def test__children(self):
        params = {'test2': 'y1', 'test3': True}
        epc = Epoch([1.1, 1.5, 1.7]*pq.ms, durations=[20, 40, 60]*pq.ns,
                    labels=np.array(['test epoch 1',
                                     'test epoch 2',
                                     'test epoch 3'], dtype='S'),
                    name='test', description='tester',
                    file_origin='test.file',
                    test1=1, **params)
        epc.annotate(test1=1.1, test0=[1, 2])
        assert_neo_object_is_compliant(epc)

        segment = Segment(name='seg1')
        segment.epochs = [epc]
        segment.create_many_to_one_relationship()

        self.assertEqual(epc._single_parent_objects, ('Segment',))
        self.assertEqual(epc._multi_parent_objects, ())

        self.assertEqual(epc._single_parent_containers, ('segment',))
        self.assertEqual(epc._multi_parent_containers, ())

        self.assertEqual(epc._parent_objects, ('Segment',))
        self.assertEqual(epc._parent_containers, ('segment',))

        self.assertEqual(len(epc.parents), 1)
        self.assertEqual(epc.parents[0].name, 'seg1')

        assert_neo_object_is_compliant(epc)
    def test_read_nse_data(self):
        t_start, t_stop = None, None  # in samples

        nio = NeuralynxIO(self.sn, use_cache='never')
        seg = Segment('testsegment')

        for el_id, el_dict in nio.parameters_nse.items():
            filepath = nio.parameters_nse[el_id]['recording_file_name']
            filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0]
            nio.read_nse(filename, seg, t_start=t_start, t_stop=t_stop,
                         waveforms=True)
            spiketrain = seg.filter({'electrode_id': el_id},
                                    objects=SpikeTrain)[0]

            # target_data = np.zeros((500, 32))
            # timestamps = np.zeros(500)
            entries = []
            with open(self.pd + '/%s.txt' % filename) as datafile:
                for i, line in enumerate(datafile):
                    line = line.strip('\xef\xbb\xbf')
                    entries.append(line.split())
            entries = np.asarray(entries, dtype=float)
            target_data = entries[:-1, 11:]
            timestamps = entries[:-1, 0]

            timestamps = (timestamps * pq.microsecond -
                          nio.parameters_global['t_start'])

            np.testing.assert_array_equal(timestamps.magnitude,
                                          spiketrain.magnitude)
            np.testing.assert_array_equal(target_data,
                                          spiketrain.waveforms)
Beispiel #4
0
    def test__children(self):
        params = {'test2': 'y1', 'test3': True}
        epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
                    labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='S'),
                    name='test', description='tester', file_origin='test.file', test1=1, **params)
        epc.annotate(test1=1.1, test0=[1, 2])
        assert_neo_object_is_compliant(epc)

        segment = Segment(name='seg1')
        segment.epochs = [epc]
        segment.create_many_to_one_relationship()

        self.assertEqual(epc._single_parent_objects, ('Segment',))
        self.assertEqual(epc._multi_parent_objects, ())

        self.assertEqual(epc._single_parent_containers, ('segment',))
        self.assertEqual(epc._multi_parent_containers, ())

        self.assertEqual(epc._parent_objects, ('Segment',))
        self.assertEqual(epc._parent_containers, ('segment',))

        self.assertEqual(len(epc.parents), 1)
        self.assertEqual(epc.parents[0].name, 'seg1')

        assert_neo_object_is_compliant(epc)
Beispiel #5
0
    def test_read_nse_data(self):
        t_start, t_stop = None, None  # in samples

        nio = NeuralynxIO(self.sn, use_cache='never')
        seg = Segment('testsegment')

        for el_id, el_dict in nio.parameters_nse.iteritems():
            filepath = nio.parameters_nse[el_id]['recording_file_name']
            filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0]
            nio.read_nse(filename, seg, t_start=t_start, t_stop=t_stop,
                         waveforms=True)
            spiketrain = seg.filter({'electrode_id': el_id},
                                    objects=SpikeTrain)[0]

            # target_data = np.zeros((500, 32))
            # timestamps = np.zeros(500)
            entries = []
            with open(self.pd + '/%s.txt' % filename) as datafile:
                for i, line in enumerate(datafile):
                    line = line.strip('\xef\xbb\xbf')
                    entries.append(line.split())
            entries = np.asarray(entries, dtype=float)
            target_data = entries[:-1, 11:]
            timestamps = entries[:-1, 0]

            timestamps = (timestamps * pq.microsecond -
                          nio.parameters_global['t_start'])

            np.testing.assert_array_equal(timestamps.magnitude,
                                          spiketrain.magnitude)
            np.testing.assert_array_equal(target_data,
                                          spiketrain.waveforms)
Beispiel #6
0
    def test__add_epoch(self):
        proxy_event = EventProxy(rawio=self.reader,
                                 event_channel_index=0,
                                 block_index=0,
                                 seg_index=0)

        loaded_event = proxy_event.load()

        regular_event = Event(times=loaded_event.times -
                              1 * loaded_event.units)

        loaded_event.annotate(nix_name='neo.event.0')
        regular_event.annotate(nix_name='neo.event.1')

        seg = Segment()
        seg.events = [regular_event, proxy_event]

        # test cutting with two events one of which is a proxy
        epoch = add_epoch(seg, regular_event, proxy_event)

        assert_neo_object_is_compliant(epoch)
        exp_annos = {
            k: v
            for k, v in regular_event.annotations.items() if k != 'nix_name'
        }
        self.assertDictEqual(epoch.annotations, exp_annos)
        assert_arrays_almost_equal(epoch.times, regular_event.times, 1e-12)
        assert_arrays_almost_equal(
            epoch.durations,
            np.ones(regular_event.shape) * loaded_event.units, 1e-12)
Beispiel #7
0
    def test_roundtrip_with_json_metadata(self):
        sample_data = np.random.uniform(size=(200, 3))
        filename = "test_roundtrip_with_json_metadata.txt"
        metadata_filename = "test_roundtrip_with_json_metadata_about.json"
        signal1 = AnalogSignal(sample_data,
                               units="pA",
                               sampling_rate=2 * pq.kHz)
        seg1 = Segment()
        block1 = Block()
        seg1.analogsignals.append(signal1)
        seg1.block = block1
        block1.segments.append(seg1)

        iow = AsciiSignalIO(filename, metadata_filename=metadata_filename)
        iow.write_block(block1)
        self.assert_(os.path.exists(metadata_filename))

        ior = AsciiSignalIO(filename)
        block2 = ior.read_block()
        assert len(block2.segments[0].analogsignals) == 3
        signal2 = block2.segments[0].analogsignals[1]

        assert_array_almost_equal(signal1.magnitude[:, 1],
                                  signal2.magnitude.reshape(-1),
                                  decimal=7)
        self.assertEqual(signal1.units, signal2.units)
        self.assertEqual(signal1.sampling_rate, signal2.sampling_rate)
        assert_array_equal(signal1.times, signal2.times)

        os.remove(filename)
        os.remove(metadata_filename)
Beispiel #8
0
    def test__children(self):
        params = {'test2': 'y1', 'test3': True}
        evt = Event(1.5 * pq.ms,
                    label='test epoch',
                    name='test',
                    description='tester',
                    file_origin='test.file',
                    test1=1,
                    **params)
        evt.annotate(test1=1.1, test0=[1, 2])
        assert_neo_object_is_compliant(evt)

        segment = Segment(name='seg1')
        segment.events = [evt]
        segment.create_many_to_one_relationship()

        self.assertEqual(evt._single_parent_objects, ('Segment', ))
        self.assertEqual(evt._multi_parent_objects, ())

        self.assertEqual(evt._single_parent_containers, ('segment', ))
        self.assertEqual(evt._multi_parent_containers, ())

        self.assertEqual(evt._parent_objects, ('Segment', ))
        self.assertEqual(evt._parent_containers, ('segment', ))

        self.assertEqual(len(evt.parents), 1)
        self.assertEqual(evt.parents[0].name, 'seg1')

        assert_neo_object_is_compliant(evt)
Beispiel #9
0
 def read_segment(self,
                 lazy = False,
                 cascade = True,
                 group = 0,
                 series = 0):
     seg = Segment( name = 'test')
     if cascade:
         tree = getbyroute(self.pul.tree,[0,group,series])
         for sw,sweep in enumerate(tree['children']):
             if sw == 0:
                 starttime = pq.Quantity(float(sweep['contents'].swTimer),'s')
             for ch,channel in enumerate(sweep['children']):
                 sig = self.read_analogsignal(group=group,
                                         series=series,
                                         sweep=sw,
                                         channel = ch)
                 annotations = sweep['contents'].__dict__.keys()
                 annotations.remove('readlist')
                 for a in annotations:
                     d = {a:str(sweep['contents'].__dict__[a])}
                     sig.annotate(**d)
                 sig.t_start = pq.Quantity(float(sig.annotations['swTimer']),'s') - starttime
                 seg.analogsignals.append(sig)
         annotations = tree['contents'].__dict__.keys()
         annotations.remove('readlist')
         for a in annotations:
             d = {a:str(tree['contents'].__dict__[a])}
             seg.annotate(**d)
     create_many_to_one_relationship(seg)
     return seg
 def setUp(self):
     self.segment = Segment()
     self.df = pd.DataFrame()
     self.df2 = pd.DataFrame()
     self.segment2 = Segment()
     self.segment2.dataframes = {}
     self.segment2.dataframes.update({'test': pd.DataFrame()})
Beispiel #11
0
    def read_block(self, lazy=False, cascade=True, channel_index=None):
        """
        Arguments:
            Channel_index: can be int, iterable or None to select one, many or all channel(s)

        """

        blk = Block()
        if cascade:
            seg = Segment(file_origin=self._filename)
            blk.segments += [seg]

            if channel_index:
                if type(channel_index) is int: channel_index = [channel_index]
                if type(channel_index) is list:
                    channel_index = np.array(channel_index)
            else:
                channel_index = np.arange(0, self._attrs['shape'][1])

            chx = ChannelIndex(name='all channels', index=channel_index)
            blk.channel_indexes.append(chx)

            ana = self.read_analogsignal(channel_index=channel_index,
                                         lazy=lazy,
                                         cascade=cascade)
            ana.channel_index = chx
            seg.duration = (self._attrs['shape'][0] /
                            self._attrs['kwik']['sample_rate']) * pq.s

            # neo.tools.populate_RecordingChannel(blk)
        blk.create_many_to_one_relationship()
        return blk
 def _handle_epochs_group(self, block):
     # Note that an NWB Epoch corresponds to a Neo Segment, not to a Neo Epoch.
     epochs = self._file.get('epochs')
     # todo: handle epochs.attrs.get('tags')
     for name, epoch in epochs.items():
         # todo: handle epoch.attrs.get('links')
         timeseries = []
         for key, value in epoch.items():
             if key == 'start_time':
                 t_start = value * pq.second
             elif key == 'stop_time':
                 t_stop = value * pq.second
             else:
                 # todo: handle value['count']
                 # todo: handle value['idx_start']
                 timeseries.append(self._handle_timeseries(key, value.get('timeseries')))
         segment = Segment(name=name)
         for obj in timeseries:
             obj.segment = segment
             if isinstance(obj, AnalogSignal):
                 segment.analogsignals.append(obj)
             elif isinstance(obj, IrregularlySampledSignal):
                 segment.irregularlysampledsignals.append(obj)
             elif isinstance(obj, Event):
                 segment.events.append(obj)
             elif isinstance(obj, Epoch):
                 segment.epochs.append(obj)
         segment.block = block
         block.segments.append(segment)
    def test__children(self):
        signal = self.signals[0]

        segment = Segment(name='seg1')
        segment.analogsignals = [signal]
        segment.create_many_to_one_relationship()

        rchan = RecordingChannel(name='rchan1')
        rchan.analogsignals = [signal]
        rchan.create_many_to_one_relationship()

        self.assertEqual(signal._single_parent_objects,
                         ('Segment', 'RecordingChannel'))
        self.assertEqual(signal._multi_parent_objects, ())

        self.assertEqual(signal._single_parent_containers,
                         ('segment', 'recordingchannel'))
        self.assertEqual(signal._multi_parent_containers, ())

        self.assertEqual(signal._parent_objects,
                         ('Segment', 'RecordingChannel'))
        self.assertEqual(signal._parent_containers,
                         ('segment', 'recordingchannel'))

        self.assertEqual(len(signal.parents), 2)
        self.assertEqual(signal.parents[0].name, 'seg1')
        self.assertEqual(signal.parents[1].name, 'rchan1')

        assert_neo_object_is_compliant(signal)
    def test__children(self):
        signal = self.signals[0]

        segment = Segment(name='seg1')
        segment.analogsignals = [signal]
        segment.create_many_to_one_relationship()

        chx = ChannelIndex(name='chx1', index=np.arange(signal.shape[1]))
        chx.analogsignals = [signal]
        chx.create_many_to_one_relationship()

        self.assertEqual(signal._single_parent_objects,
                         ('Segment', 'ChannelIndex'))
        self.assertEqual(signal._multi_parent_objects, ())

        self.assertEqual(signal._single_parent_containers,
                         ('segment', 'channel_index'))
        self.assertEqual(signal._multi_parent_containers, ())

        self.assertEqual(signal._parent_objects, ('Segment', 'ChannelIndex'))
        self.assertEqual(signal._parent_containers,
                         ('segment', 'channel_index'))

        self.assertEqual(len(signal.parents), 2)
        self.assertEqual(signal.parents[0].name, 'seg1')
        self.assertEqual(signal.parents[1].name, 'chx1')

        assert_neo_object_is_compliant(signal)
Beispiel #15
0
    def read_block(self, lazy=False, cascade=True, channel_index=None):
        """
        Arguments:
            Channel_index: can be int, iterable or None to select one, many or all channel(s)

        """

        blk = Block()
        if cascade:
            seg = Segment(file_origin=self._filename)
            blk.segments += [seg]

            if channel_index:
                if type(channel_index) is int:
                    channel_index = [channel_index]
                if type(channel_index) is list:
                    channel_index = np.array(channel_index)
            else:
                channel_index = np.arange(0, self._attrs["shape"][1])

            chx = ChannelIndex(name="all channels", index=channel_index)
            blk.channel_indexes.append(chx)

            ana = self.read_analogsignal(channel_index=channel_index, lazy=lazy, cascade=cascade)
            ana.channel_index = chx
            seg.duration = (self._attrs["shape"][0] / self._attrs["kwik"]["sample_rate"]) * pq.s

            # neo.tools.populate_RecordingChannel(blk)
        blk.create_many_to_one_relationship()
        return blk
 def setUp(self):
     self.signal = AnalogSignal(np.random.randn(1000, 1),
                                units='V',
                                sampling_rate=1 * pq.Hz)
     self.signal2 = AnalogSignal(np.random.randn(1000, 1),
                                 units='V',
                                 sampling_rate=1 * pq.Hz)
     self.signal_start = 10
     self.signal_end = 10
     self.evt = Event(np.arange(0, 100, 1) * pq.s,
                      labels=np.repeat(np.array(['t0', 't1'], dtype='S'),
                                       50))
     self.evt2 = Event(np.arange(0, 100, 1) * pq.s,
                       labels=np.repeat(np.array(['t2', 't3'], dtype='S'),
                                        50))
     self.evt_start = 15
     self.evt_pre_start = self.evt_start - 5
     self.evt_end = 85
     self.evt_post_end = self.evt_end + 5
     self.not_segment = [100]
     self.segment = Segment()
     self.segment.analogsignals.append(self.signal)
     self.segment.events.append(self.evt)
     self.segment2 = Segment()
     self.segment2.analogsignals.append(self.signal2)
     self.segment2.events.append(self.evt2)
     self.segments = [self.segment, self.segment2]
Beispiel #17
0
    def test__get_events(self):
        starts_1 = Event(times=[0.5, 10.0, 25.2] * pq.s)
        starts_1.annotate(event_type='trial start', pick='me')
        starts_1.array_annotate(trial_id=[1, 2, 3])

        stops_1 = Event(times=[5.5, 14.9, 30.1] * pq.s)
        stops_1.annotate(event_type='trial stop')
        stops_1.array_annotate(trial_id=[1, 2, 3])

        proxy_event = EventProxy(rawio=self.reader,
                                 event_channel_index=0,
                                 block_index=0,
                                 seg_index=0)

        proxy_event.annotate(event_type='trial start')

        seg = Segment()
        seg.events = [starts_1, stops_1, proxy_event]

        # test getting multiple events including a proxy
        extracted_starts = get_events(seg, event_type='trial start')

        self.assertEqual(len(extracted_starts), 2)

        # make sure the event is loaded and a neo.Event object is returned
        self.assertTrue(isinstance(extracted_starts[0], Event))
        self.assertTrue(isinstance(extracted_starts[1], Event))
Beispiel #18
0
    def test__add_epoch(self):
        starts = Event(times=[0.5, 10.0, 25.2] * pq.s)
        starts.annotate(event_type='trial start')
        starts.array_annotate(trial_id=[1, 2, 3])

        stops = Event(times=[5.5, 14.9, 30.1] * pq.s)
        stops.annotate(event_type='trial stop')
        stops.array_annotate(trial_id=[1, 2, 3])

        seg = Segment()
        seg.events = [starts, stops]

        # test cutting with one event only
        ep_starts = add_epoch(seg, starts, pre=-300 * pq.ms, post=250 * pq.ms)

        assert_neo_object_is_compliant(ep_starts)
        assert_same_annotations(ep_starts, starts)
        assert_arrays_almost_equal(ep_starts.times, starts.times - 300 * pq.ms,
                                   1e-12)
        assert_arrays_almost_equal(
            ep_starts.durations,
            (550 * pq.ms).rescale(ep_starts.durations.units) * np.ones(
                (len(starts))), 1e-12)

        # test cutting with two events
        ep_trials = add_epoch(seg, starts, stops)

        assert_neo_object_is_compliant(ep_trials)
        assert_same_annotations(ep_trials, starts)
        assert_arrays_almost_equal(ep_trials.times, starts.times, 1e-12)
        assert_arrays_almost_equal(ep_trials.durations, stops - starts, 1e-12)
Beispiel #19
0
    def test__children(self):
        segment = Segment(name='seg1')
        segment.spikes = [self.spike1]
        segment.create_many_to_one_relationship()

        unit = Unit(name='unit1')
        unit.spikes = [self.spike1]
        unit.create_many_to_one_relationship()

        self.assertEqual(self.spike1._single_parent_objects,
                         ('Segment', 'Unit'))
        self.assertEqual(self.spike1._multi_parent_objects, ())

        self.assertEqual(self.spike1._single_parent_containers,
                         ('segment', 'unit'))
        self.assertEqual(self.spike1._multi_parent_containers, ())

        self.assertEqual(self.spike1._parent_objects,
                         ('Segment', 'Unit'))
        self.assertEqual(self.spike1._parent_containers,
                         ('segment', 'unit'))

        self.assertEqual(len(self.spike1.parents), 2)
        self.assertEqual(self.spike1.parents[0].name, 'seg1')
        self.assertEqual(self.spike1.parents[1].name, 'unit1')

        assert_neo_object_is_compliant(self.spike1)
Beispiel #20
0
 def read_segment(self, lazy=False, cascade=True):
     data, metadata = self._read_file_contents()
     annotations = dict(
         (k, metadata.get(k, 'unknown'))
         for k in ("label", "variable", "first_id", "last_id"))
     seg = Segment(**annotations)
     if cascade:
         if metadata['variable'] == 'spikes':
             for i in range(metadata['first_index'],
                            metadata['last_index']):
                 spiketrain = self._extract_spikes(data, metadata, i, lazy)
                 if spiketrain is not None:
                     seg.spiketrains.append(spiketrain)
             seg.annotate(
                 dt=metadata['dt']
             )  # store dt for SpikeTrains only, as can be retrieved from sampling_period for AnalogSignal
         else:
             for i in range(metadata['first_index'],
                            metadata['last_index']):
                 # probably slow. Replace with numpy-based version from 0.1
                 signal = self._extract_signal(data, metadata, i, lazy)
                 if signal is not None:
                     seg.analogsignals.append(signal)
         create_many_to_one_relationship(seg)
     return seg
Beispiel #21
0
    def test__get_epochs(self):
        a = Epoch([0.5, 10.0, 25.2] * pq.s, durations=[5.1, 4.8, 5.0] * pq.s)
        a.annotate(epoch_type='a', pick='me')
        a.array_annotate(trial_id=[1, 2, 3])

        b = Epoch([5.5, 14.9, 30.1] * pq.s, durations=[4.7, 4.9, 5.2] * pq.s)
        b.annotate(epoch_type='b')
        b.array_annotate(trial_id=[1, 2, 3])

        proxy_epoch = EpochProxy(rawio=self.reader,
                                 event_channel_index=1,
                                 block_index=0,
                                 seg_index=0)

        proxy_epoch.annotate(epoch_type='a')

        seg = Segment()
        seg.epochs = [a, b, proxy_epoch]

        # test getting multiple epochs including a proxy
        extracted_epochs = get_epochs(seg, epoch_type='a')

        self.assertEqual(len(extracted_epochs), 2)

        # make sure the epoch is loaded and a neo.Epoch object is returned
        self.assertTrue(isinstance(extracted_epochs[0], Epoch))
        self.assertTrue(isinstance(extracted_epochs[1], Epoch))
Beispiel #22
0
    def test__children(self):
        params = {"test2": "y1", "test3": True}
        evt = Event(
            1.5 * pq.ms,
            label="test epoch",
            name="test",
            description="tester",
            file_origin="test.file",
            test1=1,
            **params
        )
        evt.annotate(test1=1.1, test0=[1, 2])
        assert_neo_object_is_compliant(evt)

        segment = Segment(name="seg1")
        segment.events = [evt]
        segment.create_many_to_one_relationship()

        self.assertEqual(evt._single_parent_objects, ("Segment",))
        self.assertEqual(evt._multi_parent_objects, ())

        self.assertEqual(evt._single_parent_containers, ("segment",))
        self.assertEqual(evt._multi_parent_containers, ())

        self.assertEqual(evt._parent_objects, ("Segment",))
        self.assertEqual(evt._parent_containers, ("segment",))

        self.assertEqual(len(evt.parents), 1)
        self.assertEqual(evt.parents[0].name, "seg1")

        assert_neo_object_is_compliant(evt)
    def test__children(self):
        signal = self.signals[0]

        segment = Segment(name='seg1')
        segment.analogsignals = [signal]
        segment.create_many_to_one_relationship()

        rchan = RecordingChannel(name='rchan1')
        rchan.analogsignals = [signal]
        rchan.create_many_to_one_relationship()

        self.assertEqual(signal._single_parent_objects,
                         ('Segment', 'RecordingChannel'))
        self.assertEqual(signal._multi_parent_objects, ())

        self.assertEqual(signal._single_parent_containers,
                         ('segment', 'recordingchannel'))
        self.assertEqual(signal._multi_parent_containers, ())

        self.assertEqual(signal._parent_objects,
                         ('Segment', 'RecordingChannel'))
        self.assertEqual(signal._parent_containers,
                         ('segment', 'recordingchannel'))

        self.assertEqual(len(signal.parents), 2)
        self.assertEqual(signal.parents[0].name, 'seg1')
        self.assertEqual(signal.parents[1].name, 'rchan1')

        assert_neo_object_is_compliant(signal)
    def test__children(self):
        signal = self.signals[0]

        segment = Segment(name='seg1')
        segment.analogsignals = [signal]
        segment.create_many_to_one_relationship()

        chx = ChannelIndex(name='chx1', index=np.arange(signal.shape[1]))
        chx.analogsignals = [signal]
        chx.create_many_to_one_relationship()

        self.assertEqual(signal._single_parent_objects, ('Segment', 'ChannelIndex'))
        self.assertEqual(signal._multi_parent_objects, ())

        self.assertEqual(signal._single_parent_containers, ('segment', 'channel_index'))
        self.assertEqual(signal._multi_parent_containers, ())

        self.assertEqual(signal._parent_objects, ('Segment', 'ChannelIndex'))
        self.assertEqual(signal._parent_containers, ('segment', 'channel_index'))

        self.assertEqual(len(signal.parents), 2)
        self.assertEqual(signal.parents[0].name, 'seg1')
        self.assertEqual(signal.parents[1].name, 'chx1')

        assert_neo_object_is_compliant(signal)
def proc_dam(filename):
    '''Load an dam file that has already been processed by the official matlab
    file converter.  That matlab data is saved to an m-file, which is then
    converted to a numpy '.npz' file.  This numpy file is the file actually
    loaded.  This function converts it to a neo block and returns the block.
    This block can be compared to the block produced by BrainwareDamIO to
    make sure BrainwareDamIO is working properly

    block = proc_dam(filename)

    filename: The file name of the numpy file to load.  It should end with
    '*_dam_py?.npz'. This will be converted to a neo 'file_origin' property
    with the value '*.dam', so the filename to compare should fit that pattern.
    'py?' should be 'py2' for the python 2 version of the numpy file or 'py3'
    for the python 3 version of the numpy file.

    example: filename = 'file1_dam_py2.npz'
             dam file name = 'file1.dam'
    '''
    with np.load(filename) as damobj:
        damfile = damobj.items()[0][1].flatten()

    filename = os.path.basename(filename[:-12]+'.dam')

    signals = [res.flatten() for res in damfile['signal']]
    stimIndexes = [int(res[0, 0].tolist()) for res in damfile['stimIndex']]
    timestamps = [res[0, 0] for res in damfile['timestamp']]

    block = Block(file_origin=filename)

    rcg = RecordingChannelGroup(file_origin=filename)
    chan = RecordingChannel(file_origin=filename, index=0, name='Chan1')
    rcg.channel_indexes = np.array([1])
    rcg.channel_names = np.array(['Chan1'], dtype='S')

    block.recordingchannelgroups.append(rcg)
    rcg.recordingchannels.append(chan)

    params = [res['params'][0, 0].flatten() for res in damfile['stim']]
    values = [res['values'][0, 0].flatten() for res in damfile['stim']]
    params = [[res1[0] for res1 in res] for res in params]
    values = [[res1 for res1 in res] for res in values]
    stims = [dict(zip(param, value)) for param, value in zip(params, values)]

    fulldam = zip(stimIndexes, timestamps, signals, stims)
    for stimIndex, timestamp, signal, stim in fulldam:
        sig = AnalogSignal(signal=signal*pq.mV,
                           t_start=timestamp*pq.d,
                           file_origin=filename,
                           sampling_period=1.*pq.s)
        segment = Segment(file_origin=filename,
                          index=stimIndex,
                          **stim)
        segment.analogsignals = [sig]
        block.segments.append(segment)

    create_many_to_one_relationship(block)

    return block
Beispiel #26
0
    def _read_segment(self, fobject):
        '''
        Read a single segment with a single analogsignal

        Returns the segment or None if there are no more segments
        '''

        try:
            # float64 -- start time of the AnalogSignal
            t_start = np.fromfile(fobject, dtype=np.float64, count=1)[0]
        except IndexError:
            # if there are no more Segments, return
            return False

        # int16 -- index of the stimulus parameters
        seg_index = np.fromfile(fobject, dtype=np.int16, count=1)[0].tolist()

        # int16 -- number of stimulus parameters
        numelements = np.fromfile(fobject, dtype=np.int16, count=1)[0]

        # read the name strings for the stimulus parameters
        paramnames = []
        for _ in range(numelements):
            # unit8 -- the number of characters in the string
            numchars = np.fromfile(fobject, dtype=np.uint8, count=1)[0]

            # char * numchars -- a single name string
            name = np.fromfile(fobject, dtype=np.uint8, count=numchars)

            # exclude invalid characters
            name = str(name[name >= 32].view('c').tostring())

            # add the name to the list of names
            paramnames.append(name)

        # float32 * numelements -- the values for the stimulus parameters
        paramvalues = np.fromfile(fobject, dtype=np.float32, count=numelements)

        # combine parameter names and the parameters as a dict
        params = dict(zip(paramnames, paramvalues))

        # int32 -- the number elements in the AnalogSignal
        numpts = np.fromfile(fobject, dtype=np.int32, count=1)[0]

        # int16 * numpts -- the AnalogSignal itself
        signal = np.fromfile(fobject, dtype=np.int16, count=numpts)

        sig = AnalogSignal(signal.astype(np.float) * pq.mV,
                           t_start=t_start * pq.d,
                           file_origin=self._filename,
                           sampling_period=1. * pq.s,
                           copy=False)
        # Note: setting the sampling_period to 1 s is arbitrary

        # load the AnalogSignal and parameters into a new Segment
        seg = Segment(file_origin=self._filename, index=seg_index, **params)
        seg.analogsignals = [sig]

        return seg
Beispiel #27
0
 def _group_to_neo(self, nix_group):
     neo_attrs = self._nix_attr_to_neo(nix_group)
     neo_segment = Segment(**neo_attrs)
     neo_segment.rec_datetime = datetime.fromtimestamp(
         nix_group.created_at
     )
     self._neo_map[nix_group.name] = neo_segment
     return neo_segment
Beispiel #28
0
    def test_segment_write(self):
        block = Block(name=self.rword())
        segment = Segment(name=self.rword(), description=self.rword())
        block.segments.append(segment)
        self.write_and_compare([block])

        segment.annotate(**self.rdict(2))
        self.write_and_compare([block])
Beispiel #29
0
    def test_segment_write(self):
        block = Block(name=self.rword())
        segment = Segment(name=self.rword(), description=self.rword())
        block.segments.append(segment)
        self.write_and_compare([block])

        segment.annotate(**self.rdict(2))
        self.write_and_compare([block])
def proc_dam(filename):
    '''Load an dam file that has already been processed by the official matlab
    file converter.  That matlab data is saved to an m-file, which is then
    converted to a numpy '.npz' file.  This numpy file is the file actually
    loaded.  This function converts it to a neo block and returns the block.
    This block can be compared to the block produced by BrainwareDamIO to
    make sure BrainwareDamIO is working properly

    block = proc_dam(filename)

    filename: The file name of the numpy file to load.  It should end with
    '*_dam_py?.npz'. This will be converted to a neo 'file_origin' property
    with the value '*.dam', so the filename to compare should fit that pattern.
    'py?' should be 'py2' for the python 2 version of the numpy file or 'py3'
    for the python 3 version of the numpy file.

    example: filename = 'file1_dam_py2.npz'
             dam file name = 'file1.dam'
    '''
    with np.load(filename) as damobj:
        damfile = damobj.items()[0][1].flatten()

    filename = os.path.basename(filename[:-12] + '.dam')

    signals = [res.flatten() for res in damfile['signal']]
    stimIndexes = [int(res[0, 0].tolist()) for res in damfile['stimIndex']]
    timestamps = [res[0, 0] for res in damfile['timestamp']]

    block = Block(file_origin=filename)

    chx = ChannelIndex(file_origin=filename,
                       index=np.array([0]),
                       channel_ids=np.array([1]),
                       channel_names=np.array(['Chan1'], dtype='S'))

    block.channel_indexes.append(chx)

    params = [res['params'][0, 0].flatten() for res in damfile['stim']]
    values = [res['values'][0, 0].flatten() for res in damfile['stim']]
    params = [[res1[0] for res1 in res] for res in params]
    values = [[res1 for res1 in res] for res in values]
    stims = [dict(zip(param, value)) for param, value in zip(params, values)]

    fulldam = zip(stimIndexes, timestamps, signals, stims)
    for stimIndex, timestamp, signal, stim in fulldam:
        sig = AnalogSignal(signal=signal * pq.mV,
                           t_start=timestamp * pq.d,
                           file_origin=filename,
                           sampling_period=1. * pq.s)
        segment = Segment(file_origin=filename,
                          index=stimIndex,
                          **stim)
        segment.analogsignals = [sig]
        block.segments.append(segment)

    block.create_many_to_one_relationship()

    return block
Beispiel #31
0
    def read_segment(self,
                     lazy=False,
                     cascade=True,
                     gdf_id_list=None,
                     time_unit=pq.ms,
                     t_start=None,
                     t_stop=None,
                     id_column=0,
                     time_column=1,
                     **args):
        """
        Read a Segment which contains SpikeTrain(s) with specified neuron IDs
        from the GDF data.

        Parameters
        ----------
        lazy : bool, optional, default: False
        cascade : bool, optional, default: True
        gdf_id_list : list or tuple, default: None
            Can be either list of GDF IDs of which to return SpikeTrain(s) or
            a tuple specifying the range (includes boundaries [start, stop])
            of GDF IDs. Must be specified if the GDF file contains neuron
            IDs, the default None then raises an error. Specify an empty
            list [] to retrieve the spike trains of all neurons with at least
            one spike.
        time_unit : Quantity (time), optional, default: quantities.ms
            The time unit of recorded time stamps.
        t_start : Quantity (time), default: None
            Start time of SpikeTrain. t_start must be specified, the default None
            raises an error.
        t_stop : Quantity (time), default: None
            Stop time of SpikeTrain. t_stop must be specified, the default None
            raises an error.
        id_column : int, optional, default: 0
            Column index of neuron IDs.
        time_column : int, optional, default: 1
            Column index of time stamps.

        Returns
        -------
        seg : Segment
            The Segment contains one SpikeTrain for each ID in gdf_id_list.
        """

        if isinstance(gdf_id_list, tuple):
            gdf_id_list = range(gdf_id_list[0], gdf_id_list[1] + 1)

        # __read_spiketrains() needs a list of IDs
        if gdf_id_list is None:
            gdf_id_list = [None]

        # create an empty Segment and fill in the spike trains
        seg = Segment()
        seg.spiketrains = self.__read_spiketrains(gdf_id_list, time_unit,
                                                  t_start, t_stop, id_column,
                                                  time_column, **args)

        return seg
Beispiel #32
0
    def _read_segment(self, node, parent):
        attributes = self._get_standard_attributes(node)
        segment = Segment(**attributes)

        signals = []
        for name, child_node in node['analogsignals'].items():
            if "AnalogSignal" in name:
                signals.append(self._read_analogsignal(child_node, parent=segment))
        if signals and self.merge_singles:
            segment.unmerged_analogsignals = signals  # signals will be merged later
            signals = []
        for name, child_node in node['analogsignalarrays'].items():
            if "AnalogSignalArray" in name:
                signals.append(self._read_analogsignalarray(child_node, parent=segment))
        segment.analogsignals = signals

        irr_signals = []
        for name, child_node in node['irregularlysampledsignals'].items():
            if "IrregularlySampledSignal" in name:
                irr_signals.append(self._read_irregularlysampledsignal(child_node, parent=segment))
        if irr_signals and self.merge_singles:
            segment.unmerged_irregularlysampledsignals = irr_signals
            irr_signals = []
        segment.irregularlysampledsignals = irr_signals

        epochs = []
        for name, child_node in node['epochs'].items():
            if "Epoch" in name:
                epochs.append(self._read_epoch(child_node, parent=segment))
        if self.merge_singles:
            epochs = self._merge_data_objects(epochs)
        for name, child_node in node['epocharrays'].items():
            if "EpochArray" in name:
                epochs.append(self._read_epocharray(child_node, parent=segment))
        segment.epochs = epochs

        events = []
        for name, child_node in node['events'].items():
            if "Event" in name:
                events.append(self._read_event(child_node, parent=segment))
        if self.merge_singles:
            events = self._merge_data_objects(events)
        for name, child_node in node['eventarrays'].items():
            if "EventArray" in name:
                events.append(self._read_eventarray(child_node, parent=segment))
        segment.events = events

        spiketrains = []
        for name, child_node in node['spikes'].items():
            raise NotImplementedError('Spike objects not yet handled.')
        for name, child_node in node['spiketrains'].items():
            if "SpikeTrain" in name:
                spiketrains.append(self._read_spiketrain(child_node, parent=segment))
        segment.spiketrains = spiketrains

        segment.block = parent
        return segment
    def read_segment(self, n_start, n_stop, chlist=None, lazy=False, cascade=True):
        """Reads a Segment from the file and stores in database.

        The Segment will contain one AnalogSignal for each channel
        and will go from n_start to n_stop (in samples).

        Arguments:
            n_start : time in samples that the Segment begins
            n_stop : time in samples that the Segment ends

        Python indexing is used, so n_stop is not inclusive.

        Returns a Segment object containing the data.
        """
        # If no channel numbers provided, get all of them
        if chlist is None:
            chlist = self.loader.get_neural_channel_numbers()

        # Conversion from bits to full_range units
        conversion = self.full_range / 2**(8*self.header.sample_width)

        # Create the Segment
        seg = Segment(file_origin=self.filename)
        t_start = float(n_start) / self.header.f_samp
        t_stop = float(n_stop) / self.header.f_samp
        seg.annotate(t_start=t_start)
        seg.annotate(t_stop=t_stop)

        # Load data from each channel and store
        for ch in chlist:
            if lazy:
                sig = np.array([]) * conversion
            else:
                # Get the data from the loader
                sig = np.array(\
                    self.loader._get_channel(ch)[n_start:n_stop]) * conversion

            # Create an AnalogSignal with the data in it
            anasig = AnalogSignal(signal=sig,
                sampling_rate=self.header.f_samp*pq.Hz,
                t_start=t_start*pq.s, file_origin=self.filename,
                description='Channel %d from %f to %f' % (ch, t_start, t_stop),
                channel_index=int(ch))

            if lazy:
                anasig.lazy_shape = n_stop-n_start


            # Link the signal to the segment
            seg.analogsignals.append(anasig)

            # Link the signal to the recording channel from which it came
            #rc = self.channel_number_to_recording_channel[ch]
            #rc.analogsignals.append(anasig)

        return seg
Beispiel #34
0
    def read_segment(self, lazy=False, cascade=True,
                     gdf_id_list=None, time_unit=pq.ms, t_start=None,
                     t_stop=None, id_column=0, time_column=1, **args):
        """
        Read a Segment which contains SpikeTrain(s) with specified neuron IDs
        from the GDF data.

        Parameters
        ----------
        lazy : bool, optional, default: False
        cascade : bool, optional, default: True
        gdf_id_list : list or tuple, default: None
            Can be either list of GDF IDs of which to return SpikeTrain(s) or
            a tuple specifying the range (includes boundaries [start, stop])
            of GDF IDs. Must be specified if the GDF file contains neuron
            IDs, the default None then raises an error. Specify an empty
            list [] to retrieve the spike trains of all neurons with at least
            one spike.
        time_unit : Quantity (time), optional, default: quantities.ms
            The time unit of recorded time stamps.
        t_start : Quantity (time), default: None
            Start time of SpikeTrain. t_start must be specified, the default None
            raises an error.
        t_stop : Quantity (time), default: None
            Stop time of SpikeTrain. t_stop must be specified, the default None
            raises an error.
        id_column : int, optional, default: 0
            Column index of neuron IDs.
        time_column : int, optional, default: 1
            Column index of time stamps.

        Returns
        -------
        seg : Segment
            The Segment contains one SpikeTrain for each ID in gdf_id_list.
        """

        if isinstance(gdf_id_list, tuple):
            gdf_id_list = range(gdf_id_list[0], gdf_id_list[1] + 1)

        # __read_spiketrains() needs a list of IDs
        if gdf_id_list is None:
            gdf_id_list = [None]

        # create an empty Segment and fill in the spike trains
        seg = Segment()
        seg.spiketrains = self.__read_spiketrains(gdf_id_list,
                                                  time_unit, t_start,
                                                  t_stop,
                                                  id_column, time_column,
                                                  **args)

        return seg
Beispiel #35
0
    def _read_segment(self, node, parent):
        attributes = self._get_standard_attributes(node)
        segment = Segment(**attributes)

        signals = []
        for name, child_node in node['analogsignals'].items():
            if "AnalogSignal" in name:
                signals.append(self._read_analogsignal(child_node, parent=segment))
        if signals and self.merge_singles:
            segment.unmerged_analogsignals = signals  # signals will be merged later
            signals = []
        for name, child_node in node['analogsignalarrays'].items():
            if "AnalogSignalArray" in name:
                signals.append(self._read_analogsignalarray(child_node, parent=segment))
        segment.analogsignals = signals

        irr_signals = []
        for name, child_node in node['irregularlysampledsignals'].items():
            if "IrregularlySampledSignal" in name:
                irr_signals.append(self._read_irregularlysampledsignal(child_node, parent=segment))
        if irr_signals and self.merge_singles:
            segment.unmerged_irregularlysampledsignals = irr_signals
            irr_signals = []
        segment.irregularlysampledsignals = irr_signals

        epochs = []
        for name, child_node in node['epochs'].items():
            if "Epoch" in name:
                epochs.append(self._read_epoch(child_node, parent=segment))
        if self.merge_singles:
            epochs = self._merge_data_objects(epochs)
        for name, child_node in node['epocharrays'].items():
            if "EpochArray" in name:
                epochs.append(self._read_epocharray(child_node, parent=segment))
        segment.epochs = epochs

        events = []
        for name, child_node in node['events'].items():
            if "Event" in name:
                events.append(self._read_event(child_node, parent=segment))
        if self.merge_singles:
            events = self._merge_data_objects(events)
        for name, child_node in node['eventarrays'].items():
            if "EventArray" in name:
                events.append(self._read_eventarray(child_node, parent=segment))
        segment.events = events

        spiketrains = []
        for name, child_node in node['spikes'].items():
            raise NotImplementedError('Spike objects not yet handled.')
        for name, child_node in node['spiketrains'].items():
            if "SpikeTrain" in name:
                spiketrains.append(self._read_spiketrain(child_node, parent=segment))
        segment.spiketrains = spiketrains

        segment.block = parent
        return segment
Beispiel #36
0
    def create_segment(self, parent=None, name='Segment'):
        segment = Segment()

        segment.block = parent

        self._assign_basic_attributes(segment, name=name)
        self._assign_datetime_attributes(segment)
        self._assign_index_attribute(segment)

        self._create_segment_children(segment)

        self._assign_annotations(segment)

        return segment
Beispiel #37
0
    def create_segment(self, parent=None, name='Segment'):
        segment = Segment()

        segment.block = parent

        self._assign_basic_attributes(segment, name=name)
        self._assign_datetime_attributes(segment)
        self._assign_index_attribute(segment)

        self._create_segment_children(segment)

        self._assign_annotations(segment)

        return segment
Beispiel #38
0
 def read_segment(self,
                 lazy = False,
                 cascade = True,
                 group = 0,
                 series = 0):
     seg = Segment( name = 'test')
     if cascade:
         tree = getbyroute(self.pul.tree,[0,group,series])
         for sw,sweep in enumerate(tree['children']):
             if sw == 0:
                 starttime = pq.Quantity(float(sweep['contents'].swTimer),'s')
             for ch,channel in enumerate(sweep['children']):
                 sig = self.read_analogsignal(group=group,
                                         series=series,
                                         sweep=sw,
                                         channel = ch)
                 annotations = sweep['contents'].__dict__.keys()
                 annotations.remove('readlist')
                 for a in annotations:
                     d = {a:str(sweep['contents'].__dict__[a])}
                     sig.annotate(**d)
                 sig.t_start = pq.Quantity(float(sig.annotations['swTimer']),'s') - starttime
                 seg.analogsignals.append(sig)
         annotations = tree['contents'].__dict__.keys()
         annotations.remove('readlist')
         for a in annotations:
             d = {a:str(tree['contents'].__dict__[a])}
             seg.annotate(**d)
     create_many_to_one_relationship(seg)
     ### add protocols to signals
     for sig_index,sig in enumerate(seg.analogsignals):
         pgf_index = sig.annotations['pgf_index']
         st_rec = self.pgf.tree['children'][pgf_index]['contents']
         chnls = [ch for ch in self.pgf.tree['children'][pgf_index]['children']]
         for ch_index, chnl in enumerate(chnls):
             ep_start = sig.t_start
             for se_epoch_index, se_epoch in enumerate(chnl['children']):
                 se_rec = se_epoch['contents']
                 se_duration = pq.Quantity(float(se_rec.seDuration),'s')
                 if not(int(se_rec.seVoltageSource)):
                     se_voltage = pq.Quantity(float(se_rec.seVoltage),'V')
                 else:
                     se_voltage = pq.Quantity(float(chnl['contents'].chHolding),'V')
                 epoch = neo.Epoch(ep_start,se_duration,'protocol_epoch',value=se_voltage,channel_index=ch_index)
                 fully_annototate(chnl,epoch)
                 epoch.annotations['sig_index'] = sig_index
                 ep_start = ep_start + se_duration
                 seg.epochs.append(epoch)
     return seg
 def setUp(self):
     self.segment = Segment()
     self.epoch = Epoch(name='my epoch')
     self.segment.epochs.append(self.epoch)
     self.signal = AnalogSignal(np.random.randn(1000, 1),
                                units='V',
                                sampling_rate=1 * pq.Hz,
                                name='my signal')
     self.segment.analogsignals.append(self.signal)
     self.trials = pd.DataFrame()
     self.trials.name = 'trials'
     self.segment2 = Segment()
     self.segment2.epochs.append(self.epoch)
     self.segment2.analogsignals.append(self.signal)
     self.segment2.dataframes = [self.trials]
Beispiel #40
0
    def read_block(self, lazy=False, cascade=True):
        # TODO read block
        blk = Block()
        if cascade:
            seg = Segment(file_origin=self._absolute_folder_path)

            for name in self._processing:
                if (name == "Position"):
                    seg.irregularlysampledsignals += self.read_tracking(
                        path="")
                if (name == "LFP"):
                    seg.analogsignals += self.read_analogsignal(path="")
                if (name == "EventWaveform"):
                    seg.spiketrains += self.read_spiketrain(path="")

                for key in self._processing[name]:
                    if (key == "Position"):
                        seg.irregularlysampledsignals += self.read_tracking(
                            path=name)
                    if (key == "LFP"):
                        seg.analogsignals += self.read_analogsignal(path=name)
                    if (key == "EventWaveform"):
                        seg.spiketrains += self.read_spiketrain(path=name)

            #blk.channel_indexes = self._channel_indexes

            blk.segments += [seg]

            # TODO add duration
            #seg.duration = self._duration

            # TODO May need to "populate_RecordingChannel"

        #blk.create_many_to_one_relationship()
        return blk
 def setUp(self):
     self.evt = Event(times=np.arange(0, 100, 1) * pq.s,
                      name='Ch1',
                      labels=np.repeat(np.array(['t0', 't1'], dtype='S'),
                                       50))
     self.evt2 = Event(times=np.arange(0, 100, 3) * pq.s,
                       name='Ch2',
                       labels=np.repeat(np.array(['t2', 't3'], dtype='S'),
                                        17))
     self.segment = Segment()
     self.segment.events.append(self.evt)
     self.segment.events.append(self.evt2)
     self.df = pd.DataFrame(data=[[1, 0], [1, 1]],
                            index=['start', 'stop'],
                            columns=['Ch1', 'Ch2'])
     self.startoftrial = ['start']
     self.epochs = ['results']
     self.name = 'MyEvents'
     self.typeframe = pd.DataFrame(data=['start', 'results'],
                                   columns=['type'],
                                   index=['start', 'stop'])
     ProcessEvents(seg=self.segment,
                   tolerance=1,
                   evtframe=self.df,
                   name=self.name)
     self.columns = ['time', 'event', 'trial_idx', 'results', \
         'with_previous_results', 'event_type']
Beispiel #42
0
    def test_segment_with_proxy(self):
        seg = Segment()

        proxy_anasig = AnalogSignalProxy(
            rawio=self.reader,
            global_channel_indexes=None,
            block_index=0,
            seg_index=0,
        )
        seg.analogsignals.append(proxy_anasig)

        proxy_sptr = SpikeTrainProxy(rawio=self.reader,
                                     unit_index=0,
                                     block_index=0,
                                     seg_index=0)
        seg.spiketrains.append(proxy_sptr)

        proxy_event = EventProxy(rawio=self.reader,
                                 event_channel_index=0,
                                 block_index=0,
                                 seg_index=0)
        seg.events.append(proxy_event)

        proxy_epoch = EpochProxy(rawio=self.reader,
                                 event_channel_index=1,
                                 block_index=0,
                                 seg_index=0)
        seg.epochs.append(proxy_epoch)
Beispiel #43
0
 def read_segment(self, lazy=False, cascade=True):
     segment = Segment(file_origin=self.filename)
     if cascade:
         segment.analogsignals.append(
             self.read_analogsignal(lazy=lazy, cascade=cascade))
         segment.analogsignals[-1].segment = segment
     return segment
Beispiel #44
0
 def setUp(self):
     self.params = {'test0': 'y1', 'test1': ['deeptest'], 'test2': True}
     self.seg = Segment()
     self.epc = Epoch(times=[10, 20, 30, 40, 50] * pq.s, durations=[10, 5, 7, 14, 9] * pq.ms,
                 labels=np.array(['btn0', 'btn1', 'btn2', 'btn0', 'btn3'], dtype='S'),
                      **self.params)
     self.epc.segment = self.seg
Beispiel #45
0
    def __save_segment(self):
        '''
        Write the segment to the Block if it exists
        '''
        # if this is the beginning of the first condition, then we don't want
        # to save, so exit
        # but set __seg from None to False so we know next time to create a
        # segment even if there are no spike in the condition
        if self.__seg is None:
            self.__seg = False
            return

        if not self.__seg:
            # create dummy values if there are no SpikeTrains in this condition
            self.__seg = Segment(file_origin=self._filename, **self.__params)
            self.__spiketimes = []

        times = pq.Quantity(self.__spiketimes, dtype=np.float32, units=pq.ms)
        train = SpikeTrain(times,
                           t_start=0 * pq.ms,
                           t_stop=self.__t_stop * pq.ms,
                           file_origin=self._filename)

        self.__seg.spiketrains = [train]
        self.__unit.spiketrains.append(train)
        self._blk.segments.append(self.__seg)

        # set an empty segment
        # from now on, we need to set __seg to False rather than None so
        # that if there is a condition with no SpikeTrains we know
        # to create an empty Segment
        self.__seg = False
Beispiel #46
0
 def read_segment(self, lazy=False, cascade=True):
     data, metadata = self._read_file_contents()
     annotations = dict((k, metadata.get(k, 'unknown')) for k in ("label", "variable", "first_id", "last_id"))
     seg = Segment(**annotations)
     if cascade:
         if metadata['variable'] == 'spikes':
             for i in range(metadata['first_index'], metadata['last_index'] + 1):
                 spiketrain = self._extract_spikes(data, metadata, i, lazy)
                 if spiketrain is not None:
                     seg.spiketrains.append(spiketrain)
             seg.annotate(dt=metadata['dt']) # store dt for SpikeTrains only, as can be retrieved from sampling_period for AnalogSignal
         else:
             signal = self._extract_signals(data, metadata, lazy)
             if signal is not None:
                 seg.analogsignals.append(signal)
         seg.create_many_to_one_relationship()
     return seg
    def test__children(self):
        signal = self.signals[0]

        segment = Segment(name='seg1')
        segment.analogsignalarrays = [signal]
        segment.create_many_to_one_relationship()

        rcg = RecordingChannelGroup(name='rcg1')
        rcg.analogsignalarrays = [signal]
        rcg.create_many_to_one_relationship()

        self.assertEqual(signal._container_child_objects, ())
        self.assertEqual(signal._data_child_objects, ())
        self.assertEqual(signal._single_parent_objects,
                         ('Segment', 'RecordingChannelGroup'))
        self.assertEqual(signal._multi_child_objects, ())
        self.assertEqual(signal._multi_parent_objects, ())
        self.assertEqual(signal._child_properties, ())

        self.assertEqual(signal._single_child_objects, ())

        self.assertEqual(signal._container_child_containers, ())
        self.assertEqual(signal._data_child_containers, ())
        self.assertEqual(signal._single_child_containers, ())
        self.assertEqual(signal._single_parent_containers,
                         ('segment', 'recordingchannelgroup'))
        self.assertEqual(signal._multi_child_containers, ())
        self.assertEqual(signal._multi_parent_containers, ())

        self.assertEqual(signal._child_objects, ())
        self.assertEqual(signal._child_containers, ())
        self.assertEqual(signal._parent_objects,
                         ('Segment', 'RecordingChannelGroup'))
        self.assertEqual(signal._parent_containers,
                         ('segment', 'recordingchannelgroup'))

        self.assertEqual(signal.children, ())
        self.assertEqual(len(signal.parents), 2)
        self.assertEqual(signal.parents[0].name, 'seg1')
        self.assertEqual(signal.parents[1].name, 'rcg1')

        signal.create_many_to_one_relationship()
        signal.create_many_to_many_relationship()
        signal.create_relationship()
        assert_neo_object_is_compliant(signal)
    def test__children(self):
        params = {'testarg2': 'yes', 'testarg3': True}
        evta = EventArray([1.1, 1.5, 1.7]*pq.ms,
                          labels=np.array(['test event 1',
                                           'test event 2',
                                           'test event 3'], dtype='S'),
                          name='test', description='tester',
                          file_origin='test.file',
                          testarg1=1, **params)
        evta.annotate(testarg1=1.1, testarg0=[1, 2, 3])
        assert_neo_object_is_compliant(evta)

        segment = Segment(name='seg1')
        segment.eventarrays = [evta]
        segment.create_many_to_one_relationship()

        self.assertEqual(evta._container_child_objects, ())
        self.assertEqual(evta._data_child_objects, ())
        self.assertEqual(evta._single_parent_objects, ('Segment',))
        self.assertEqual(evta._multi_child_objects, ())
        self.assertEqual(evta._multi_parent_objects, ())
        self.assertEqual(evta._child_properties, ())

        self.assertEqual(evta._single_child_objects, ())

        self.assertEqual(evta._container_child_containers, ())
        self.assertEqual(evta._data_child_containers, ())
        self.assertEqual(evta._single_child_containers, ())
        self.assertEqual(evta._single_parent_containers, ('segment',))
        self.assertEqual(evta._multi_child_containers, ())
        self.assertEqual(evta._multi_parent_containers, ())

        self.assertEqual(evta._child_objects, ())
        self.assertEqual(evta._child_containers, ())
        self.assertEqual(evta._parent_objects, ('Segment',))
        self.assertEqual(evta._parent_containers, ('segment',))

        self.assertEqual(evta.children, ())
        self.assertEqual(len(evta.parents), 1)
        self.assertEqual(evta.parents[0].name, 'seg1')

        evta.create_many_to_one_relationship()
        evta.create_many_to_many_relationship()
        evta.create_relationship()
        assert_neo_object_is_compliant(evta)
Beispiel #49
0
    def read_block(self,
                     lazy=False,
                     cascade=True,
                     channel_index=None
                    ):
        """
        Arguments:
            Channel_index: can be int, iterable or None to select one, many or all channel(s)

        """

        blk = Block()
        if cascade:
            seg = Segment( file_origin=self._filename )
            blk.segments += [ seg ]



            if channel_index:
                if type(channel_index) is int: channel_index = [ channel_index ]
                if type(channel_index) is list: channel_index = np.array( channel_index )
            else:
                channel_index = np.arange(0,self._attrs['shape'][1])

            rcg = RecordingChannelGroup(name='all channels',
                                 channel_indexes=channel_index)
            blk.recordingchannelgroups.append(rcg)

            for idx in channel_index:
                # read nested analosignal
                ana = self.read_analogsignal(channel_index=idx,
                                        lazy=lazy,
                                        cascade=cascade,
                                         )
                chan = RecordingChannel(index=int(idx))
                seg.analogsignals += [ ana ]
                chan.analogsignals += [ ana ]
                rcg.recordingchannels.append(chan)
            seg.duration = (self._attrs['shape'][0]
                          / self._attrs['kwik']['sample_rate']) * pq.s

            # neo.tools.populate_RecordingChannel(blk)
        blk.create_many_to_one_relationship()
        return blk
Beispiel #50
0
    def test_read_nev_data(self):
        t_start, t_stop = 0 * pq.s, 1000 * pq.s

        nio = NeuralynxIO(self.sn, use_cache='never')
        seg = Segment('testsegment')

        filename = 'Events'
        nio.read_nev(filename + '.nev', seg, t_start=t_start, t_stop=t_stop)

        timestamps = []
        nttls = []
        names = []
        event_ids = []

        with open(self.pd + '/%s.txt' % filename) as datafile:
            for i, line in enumerate(datafile):
                line = line.strip('\xef\xbb\xbf')
                entries = line.split('\t')
                nttls.append(int(entries[5]))
                timestamps.append(int(entries[3]))
                names.append(entries[10].rstrip('\r\n'))
                event_ids.append(int(entries[4]))

        timestamps = (np.array(timestamps) * pq.microsecond -
                      nio.parameters_global['t_start'])
        # masking only requested spikes
        mask = np.where(timestamps < t_stop)[0]

        # return if no event fits criteria
        if len(mask) == 0:
            return
        timestamps = timestamps[mask]
        nttls = np.asarray(nttls)[mask]
        names = np.asarray(names)[mask]
        event_ids = np.asarray(event_ids)[mask]

        for i in range(len(timestamps)):
            events = seg.filter({'nttl': nttls[i]}, objects=Event)
            events = [e for e in events
                      if (e.annotations['marker_id'] == event_ids[i] and
                          e.labels == names[i])]
            self.assertTrue(len(events) == 1)
            self.assertTrue(timestamps[i] in events[0].times)
Beispiel #51
0
    def create_all_annotated(cls):
        times = cls.rquant(1, pq.s)
        signal = cls.rquant(1, pq.V)
        blk = Block()
        blk.annotate(**cls.rdict(3))

        seg = Segment()
        seg.annotate(**cls.rdict(4))
        blk.segments.append(seg)

        asig = AnalogSignal(signal=signal, sampling_rate=pq.Hz)
        asig.annotate(**cls.rdict(2))
        seg.analogsignals.append(asig)

        isig = IrregularlySampledSignal(times=times, signal=signal,
                                        time_units=pq.s)
        isig.annotate(**cls.rdict(2))
        seg.irregularlysampledsignals.append(isig)

        epoch = Epoch(times=times, durations=times)
        epoch.annotate(**cls.rdict(4))
        seg.epochs.append(epoch)

        event = Event(times=times)
        event.annotate(**cls.rdict(4))
        seg.events.append(event)

        spiketrain = SpikeTrain(times=times, t_stop=pq.s, units=pq.s)
        d = cls.rdict(6)
        d["quantity"] = pq.Quantity(10, "mV")
        d["qarray"] = pq.Quantity(range(10), "mA")
        spiketrain.annotate(**d)
        seg.spiketrains.append(spiketrain)

        chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10])
        chx.annotate(**cls.rdict(5))
        blk.channel_indexes.append(chx)

        unit = Unit()
        unit.annotate(**cls.rdict(2))
        chx.units.append(unit)

        return blk
Beispiel #52
0
    def test__children(self):
        params = {'testarg2': 'yes', 'testarg3': True}
        epc = Epoch(1.5*pq.ms, duration=20*pq.ns,
                    label='test epoch', name='test', description='tester',
                    file_origin='test.file',
                    testarg1=1, **params)
        epc.annotate(testarg1=1.1, testarg0=[1, 2, 3])
        assert_neo_object_is_compliant(epc)

        segment = Segment(name='seg1')
        segment.epochs = [epc]
        segment.create_many_to_one_relationship()

        self.assertEqual(epc._container_child_objects, ())
        self.assertEqual(epc._data_child_objects, ())
        self.assertEqual(epc._single_parent_objects, ('Segment',))
        self.assertEqual(epc._multi_child_objects, ())
        self.assertEqual(epc._multi_parent_objects, ())
        self.assertEqual(epc._child_properties, ())

        self.assertEqual(epc._single_child_objects, ())

        self.assertEqual(epc._container_child_containers, ())
        self.assertEqual(epc._data_child_containers, ())
        self.assertEqual(epc._single_child_containers, ())
        self.assertEqual(epc._single_parent_containers, ('segment',))
        self.assertEqual(epc._multi_child_containers, ())
        self.assertEqual(epc._multi_parent_containers, ())

        self.assertEqual(epc._child_objects, ())
        self.assertEqual(epc._child_containers, ())
        self.assertEqual(epc._parent_objects, ('Segment',))
        self.assertEqual(epc._parent_containers, ('segment',))

        self.assertEqual(epc.children, ())
        self.assertEqual(len(epc.parents), 1)
        self.assertEqual(epc.parents[0].name, 'seg1')

        epc.create_many_to_one_relationship()
        epc.create_many_to_many_relationship()
        epc.create_relationship()
        assert_neo_object_is_compliant(epc)
    def test__children(self):
        signal = self.signals[0]

        segment = Segment(name="seg1")
        segment.analogsignals = [signal]
        segment.create_many_to_one_relationship()

        rchan = RecordingChannel(name="rchan1")
        rchan.analogsignals = [signal]
        rchan.create_many_to_one_relationship()

        self.assertEqual(signal._container_child_objects, ())
        self.assertEqual(signal._data_child_objects, ())
        self.assertEqual(signal._single_parent_objects, ("Segment", "RecordingChannel"))
        self.assertEqual(signal._multi_child_objects, ())
        self.assertEqual(signal._multi_parent_objects, ())
        self.assertEqual(signal._child_properties, ())

        self.assertEqual(signal._single_child_objects, ())

        self.assertEqual(signal._container_child_containers, ())
        self.assertEqual(signal._data_child_containers, ())
        self.assertEqual(signal._single_child_containers, ())
        self.assertEqual(signal._single_parent_containers, ("segment", "recordingchannel"))
        self.assertEqual(signal._multi_child_containers, ())
        self.assertEqual(signal._multi_parent_containers, ())

        self.assertEqual(signal._child_objects, ())
        self.assertEqual(signal._child_containers, ())
        self.assertEqual(signal._parent_objects, ("Segment", "RecordingChannel"))
        self.assertEqual(signal._parent_containers, ("segment", "recordingchannel"))

        self.assertEqual(signal.children, ())
        self.assertEqual(len(signal.parents), 2)
        self.assertEqual(signal.parents[0].name, "seg1")
        self.assertEqual(signal.parents[1].name, "rchan1")

        signal.create_many_to_one_relationship()
        signal.create_many_to_many_relationship()
        signal.create_relationship()
        assert_neo_object_is_compliant(signal)
    def read_segment(self,
                            lazy = False,
                            cascade = True,
                            delimiter = '\t',
                            t_start = 0.*pq.s,
                            unit = pq.s,
                            ):
        """
        Arguments:
            delimiter  :  columns delimiter in file  '\t' or one space or two space or ',' or ';'
            t_start : time start of all spiketrain 0 by default
            unit : unit of spike times, can be a str or directly a Quantities
        """
        unit = pq.Quantity(1, unit)

        seg = Segment(file_origin = os.path.basename(self.filename))
        if not cascade:
            return seg

        f = open(self.filename, 'Ur')
        for i,line in enumerate(f) :
            alldata = line[:-1].split(delimiter)
            if alldata[-1] == '': alldata = alldata[:-1]
            if alldata[0] == '': alldata = alldata[1:]
            if lazy:
                spike_times = [ ]
                t_stop = t_start
            else:
                spike_times = np.array(alldata).astype('f')
                t_stop = spike_times.max()*unit

            sptr = SpikeTrain(spike_times*unit, t_start=t_start, t_stop=t_stop)
            if lazy:
                sptr.lazy_shape = len(alldata)

            sptr.annotate(channel_index = i)
            seg.spiketrains.append(sptr)
        f.close()

        seg.create_many_to_one_relationship()
        return seg
Beispiel #55
0
    def test__children(self):
        segment = Segment(name="seg1")
        segment.spikes = [self.spike1]
        segment.create_many_to_one_relationship()

        unit = Unit(name="unit1")
        unit.spikes = [self.spike1]
        unit.create_many_to_one_relationship()

        self.assertEqual(self.spike1._container_child_objects, ())
        self.assertEqual(self.spike1._data_child_objects, ())
        self.assertEqual(self.spike1._single_parent_objects, ("Segment", "Unit"))
        self.assertEqual(self.spike1._multi_child_objects, ())
        self.assertEqual(self.spike1._multi_parent_objects, ())
        self.assertEqual(self.spike1._child_properties, ())

        self.assertEqual(self.spike1._single_child_objects, ())

        self.assertEqual(self.spike1._container_child_containers, ())
        self.assertEqual(self.spike1._data_child_containers, ())
        self.assertEqual(self.spike1._single_child_containers, ())
        self.assertEqual(self.spike1._single_parent_containers, ("segment", "unit"))
        self.assertEqual(self.spike1._multi_child_containers, ())
        self.assertEqual(self.spike1._multi_parent_containers, ())

        self.assertEqual(self.spike1._child_objects, ())
        self.assertEqual(self.spike1._child_containers, ())
        self.assertEqual(self.spike1._parent_objects, ("Segment", "Unit"))
        self.assertEqual(self.spike1._parent_containers, ("segment", "unit"))

        self.assertEqual(self.spike1.children, ())
        self.assertEqual(len(self.spike1.parents), 2)
        self.assertEqual(self.spike1.parents[0].name, "seg1")
        self.assertEqual(self.spike1.parents[1].name, "unit1")

        self.spike1.create_many_to_one_relationship()
        self.spike1.create_many_to_many_relationship()
        self.spike1.create_relationship()
        assert_neo_object_is_compliant(self.spike1)
Beispiel #56
0
def proc_src_comments(srcfile, filename):
    '''Get the comments in an src file that has been#!N
    processed by the official
    matlab function.  See proc_src for details'''
    comm_seg = Segment(name='Comments', file_origin=filename)
    commentarray = srcfile['comments'].flatten()[0]
    senders = [res[0] for res in commentarray['sender'].flatten()]
    texts = [res[0] for res in commentarray['text'].flatten()]
    timeStamps = [res[0, 0] for res in commentarray['timeStamp'].flatten()]

    timeStamps = np.array(timeStamps, dtype=np.float32)
    t_start = timeStamps.min()
    timeStamps = pq.Quantity(timeStamps - t_start, units=pq.d).rescale(pq.s)
    texts = np.array(texts, dtype='S')
    senders = np.array(senders, dtype='S')
    t_start = brainwaresrcio.convert_brainwaresrc_timestamp(t_start.tolist())

    comments = Event(times=timeStamps, labels=texts, senders=senders)
    comm_seg.events = [comments]
    comm_seg.rec_datetime = t_start

    return comm_seg
Beispiel #57
0
    def test_read_ncs_data(self):
        t_start, t_stop = 0, 500 * 512  # in samples

        nio = NeuralynxIO(self.sn, use_cache='never')
        seg = Segment('testsegment')

        for el_id, el_dict in nio.parameters_ncs.iteritems():
            filepath = nio.parameters_ncs[el_id]['recording_file_name']
            filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0]
            nio.read_ncs(filename, seg, t_start=t_start, t_stop=t_stop)
            anasig = seg.filter({'electrode_id': el_id},
                                objects=AnalogSignal)[0]

            target_data = np.zeros((16679, 512))
            with open(self.pd + '/%s.txt' % filename) as datafile:
                for i, line in enumerate(datafile):
                    line = line.strip('\xef\xbb\xbf')
                    entries = line.split()
                    target_data[i, :] = np.asarray(entries[4:])

            target_data = target_data.reshape((-1, 1)) * el_dict['ADBitVolts']

            np.testing.assert_array_equal(target_data[:len(anasig)],
                                          anasig.magnitude)
Beispiel #58
0
    def read_segment(self, import_neuroshare_segment = True,
                     lazy=False, cascade=True):
        """
        Arguments:
            import_neuroshare_segment: import neuroshare segment as SpikeTrain with associated waveforms or not imported at all.

        """
        seg = Segment( file_origin = os.path.basename(self.filename), )
        
        if sys.platform.startswith('win'):
            neuroshare = ctypes.windll.LoadLibrary(self.dllname)
        elif sys.platform.startswith('linux'):
            neuroshare = ctypes.cdll.LoadLibrary(self.dllname)
        neuroshare = DllWithError(neuroshare)
        
        #elif sys.platform.startswith('darwin'):
        

        # API version
        info = ns_LIBRARYINFO()
        neuroshare.ns_GetLibraryInfo(ctypes.byref(info) , ctypes.sizeof(info))
        seg.annotate(neuroshare_version = str(info.dwAPIVersionMaj)+'.'+str(info.dwAPIVersionMin))

        if not cascade:
            return seg


        # open file
        hFile = ctypes.c_uint32(0)
        neuroshare.ns_OpenFile(ctypes.c_char_p(self.filename) ,ctypes.byref(hFile))
        fileinfo = ns_FILEINFO()
        neuroshare.ns_GetFileInfo(hFile, ctypes.byref(fileinfo) , ctypes.sizeof(fileinfo))
        
        # read all entities
        for dwEntityID in range(fileinfo.dwEntityCount):
            entityInfo = ns_ENTITYINFO()
            neuroshare.ns_GetEntityInfo( hFile, dwEntityID, ctypes.byref(entityInfo), ctypes.sizeof(entityInfo))

            # EVENT
            if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_EVENT':
                pEventInfo = ns_EVENTINFO()
                neuroshare.ns_GetEventInfo ( hFile,  dwEntityID,  ctypes.byref(pEventInfo), ctypes.sizeof(pEventInfo))

                if pEventInfo.dwEventType == 0: #TEXT
                    pData = ctypes.create_string_buffer(pEventInfo.dwMaxDataLength)
                elif pEventInfo.dwEventType == 1:#CVS
                    pData = ctypes.create_string_buffer(pEventInfo.dwMaxDataLength)
                elif pEventInfo.dwEventType == 2:# 8bit
                    pData = ctypes.c_byte(0)
                elif pEventInfo.dwEventType == 3:# 16bit
                    pData = ctypes.c_int16(0)
                elif pEventInfo.dwEventType == 4:# 32bit
                    pData = ctypes.c_int32(0)
                pdTimeStamp  = ctypes.c_double(0.)
                pdwDataRetSize = ctypes.c_uint32(0)

                ea = Event(name = str(entityInfo.szEntityLabel),)
                if not lazy:
                    times = [ ]
                    labels = [ ]
                    for dwIndex in range(entityInfo.dwItemCount ):
                        neuroshare.ns_GetEventData ( hFile, dwEntityID, dwIndex,
                                            ctypes.byref(pdTimeStamp), ctypes.byref(pData),
                                            ctypes.sizeof(pData), ctypes.byref(pdwDataRetSize) )
                        times.append(pdTimeStamp.value)
                        labels.append(str(pData.value))
                    ea.times = times*pq.s
                    ea.labels = np.array(labels, dtype ='S')
                else :
                    ea.lazy_shape = entityInfo.dwItemCount
                seg.eventarrays.append(ea)

            # analog
            if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_ANALOG':
                pAnalogInfo = ns_ANALOGINFO()

                neuroshare.ns_GetAnalogInfo( hFile, dwEntityID,ctypes.byref(pAnalogInfo),ctypes.sizeof(pAnalogInfo) )
                dwIndexCount = entityInfo.dwItemCount

                if lazy:
                    signal = [ ]*pq.Quantity(1, pAnalogInfo.szUnits)
                else:
                    pdwContCount = ctypes.c_uint32(0)
                    pData = np.zeros( (entityInfo.dwItemCount,), dtype = 'float64')
                    total_read = 0
                    while total_read< entityInfo.dwItemCount:
                        dwStartIndex = ctypes.c_uint32(total_read)
                        dwStopIndex = ctypes.c_uint32(entityInfo.dwItemCount - total_read)
                        
                        neuroshare.ns_GetAnalogData( hFile,  dwEntityID,  dwStartIndex,
                                     dwStopIndex, ctypes.byref( pdwContCount) , pData[total_read:].ctypes.data_as(ctypes.POINTER(ctypes.c_double)))
                        total_read += pdwContCount.value
                            
                    signal =  pq.Quantity(pData, units=pAnalogInfo.szUnits, copy = False)

                #t_start
                dwIndex = 0
                pdTime = ctypes.c_double(0)
                neuroshare.ns_GetTimeByIndex( hFile,  dwEntityID,  dwIndex, ctypes.byref(pdTime))

                anaSig = AnalogSignal(signal,
                                                    sampling_rate = pAnalogInfo.dSampleRate*pq.Hz,
                                                    t_start = pdTime.value * pq.s,
                                                    name = str(entityInfo.szEntityLabel),
                                                    )
                anaSig.annotate( probe_info = str(pAnalogInfo.szProbeInfo))
                if lazy:
                    anaSig.lazy_shape = entityInfo.dwItemCount
                seg.analogsignals.append( anaSig )


            #segment
            if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_SEGMENT' and import_neuroshare_segment:

                pdwSegmentInfo = ns_SEGMENTINFO()
                if not str(entityInfo.szEntityLabel).startswith('spks'):
                    continue

                neuroshare.ns_GetSegmentInfo( hFile,  dwEntityID,
                                             ctypes.byref(pdwSegmentInfo), ctypes.sizeof(pdwSegmentInfo) )
                nsource = pdwSegmentInfo.dwSourceCount

                pszMsgBuffer  = ctypes.create_string_buffer(" "*256)
                neuroshare.ns_GetLastErrorMsg(ctypes.byref(pszMsgBuffer), 256)
                
                for dwSourceID in range(pdwSegmentInfo.dwSourceCount) :
                    pSourceInfo = ns_SEGSOURCEINFO()
                    neuroshare.ns_GetSegmentSourceInfo( hFile,  dwEntityID, dwSourceID,
                                    ctypes.byref(pSourceInfo), ctypes.sizeof(pSourceInfo) )

                if lazy:
                    sptr = SpikeTrain(times, name = str(entityInfo.szEntityLabel), t_stop = 0.*pq.s)
                    sptr.lazy_shape = entityInfo.dwItemCount
                else:
                    pdTimeStamp  = ctypes.c_double(0.)
                    dwDataBufferSize = pdwSegmentInfo.dwMaxSampleCount*pdwSegmentInfo.dwSourceCount
                    pData = np.zeros( (dwDataBufferSize), dtype = 'float64')
                    pdwSampleCount = ctypes.c_uint32(0)
                    pdwUnitID= ctypes.c_uint32(0)

                    nsample  = int(dwDataBufferSize)
                    times = np.empty( (entityInfo.dwItemCount), dtype = 'f')
                    waveforms = np.empty( (entityInfo.dwItemCount, nsource, nsample), dtype = 'f')
                    for dwIndex in range(entityInfo.dwItemCount ):
                        neuroshare.ns_GetSegmentData ( hFile,  dwEntityID,  dwIndex,
                            ctypes.byref(pdTimeStamp), pData.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
                            dwDataBufferSize * 8, ctypes.byref(pdwSampleCount),
                                ctypes.byref(pdwUnitID ) )

                        times[dwIndex] = pdTimeStamp.value
                        waveforms[dwIndex, :,:] = pData[:nsample*nsource].reshape(nsample ,nsource).transpose()
                    
                    sptr = SpikeTrain(times = pq.Quantity(times, units = 's', copy = False),
                                        t_stop = times.max(),
                                        waveforms = pq.Quantity(waveforms, units = str(pdwSegmentInfo.szUnits), copy = False ),
                                        left_sweep = nsample/2./float(pdwSegmentInfo.dSampleRate)*pq.s,
                                        sampling_rate = float(pdwSegmentInfo.dSampleRate)*pq.Hz,
                                        name = str(entityInfo.szEntityLabel),
                                        )
                seg.spiketrains.append(sptr)


            # neuralevent
            if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_NEURALEVENT':

                pNeuralInfo = ns_NEURALINFO()
                neuroshare.ns_GetNeuralInfo ( hFile,  dwEntityID,
                                 ctypes.byref(pNeuralInfo), ctypes.sizeof(pNeuralInfo))

                if lazy:
                    times = [ ]*pq.s
                    t_stop = 0*pq.s
                else:
                    pData = np.zeros( (entityInfo.dwItemCount,), dtype = 'float64')
                    dwStartIndex = 0
                    dwIndexCount = entityInfo.dwItemCount
                    neuroshare.ns_GetNeuralData( hFile,  dwEntityID,  dwStartIndex,
                        dwIndexCount,  pData.ctypes.data_as(ctypes.POINTER(ctypes.c_double)))
                    times = pData*pq.s
                    t_stop = times.max()
                sptr = SpikeTrain(times, t_stop =t_stop,
                                                name = str(entityInfo.szEntityLabel),)
                if lazy:
                    sptr.lazy_shape = entityInfo.dwItemCount
                seg.spiketrains.append(sptr)

        # close
        neuroshare.ns_CloseFile(hFile)

        seg.create_many_to_one_relationship()
        return seg