Beispiel #1
0
    def test__issue_285(self):
        ##Spiketrain
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch([0, 10, 20], [2, 2, 2], ["a", "b", "c"], units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
        os.remove('blk.pkl')
        ##Epoch
        train = Epoch(times=np.arange(0, 30, 10)*pq.s,durations=[10, 5, 7]*pq.ms,labels=np.array(['btn0', 'btn1', 'btn2'], dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)
        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##Event
        train = Event(np.arange(0, 30, 10)*pq.s,labels=np.array(['trig0', 'trig1', 'trig2'],dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##IrregularlySampledSignal
        train =  IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3],units='mV', time_units='ms')
        train.segment = Segment()
        unit = Unit()
        train.channel_index = ChannelIndex(1)
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)
        blk.segments[0].block = blk

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        self.assertIsInstance(r_seg.spiketrains[0].channel_index, ChannelIndex)
        os.remove('blk.pkl')
Beispiel #2
0
    def test__issue_285(self):
        ##Spiketrain
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch([0, 10, 20], [2, 2, 2], ["a", "b", "c"], units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
        os.remove('blk.pkl')
        ##Epoch
        train = Epoch(times=np.arange(0, 30, 10)*pq.s,durations=[10, 5, 7]*pq.ms,labels=np.array(['btn0', 'btn1', 'btn2'], dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)
        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##Event
        train = Event(np.arange(0, 30, 10)*pq.s,labels=np.array(['trig0', 'trig1', 'trig2'],dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##IrregularlySampledSignal
        train =  IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3],units='mV', time_units='ms')
        train.segment = Segment()
        unit = Unit()
        train.channel_index = ChannelIndex(1)
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)
        blk.segments[0].block = blk

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        self.assertIsInstance(r_seg.spiketrains[0].channel_index, ChannelIndex)
        os.remove('blk.pkl')
Beispiel #3
0
    def read_spiketrain(self):
        # TODO add parameter to allow user to read raw data or not?
        assert (SpikeTrain in self.readable_objects)

        spike_trains = []

        channel_group_files = glob.glob(
            os.path.join(self._path, self._base_filename) + ".[0-9]*")
        for raw_filename in sorted(channel_group_files):
            with open(raw_filename, "rb") as f:
                params = parse_header_and_leave_cursor(f)

                channel_group_index = int(raw_filename.split(".")[-1])
                bytes_per_timestamp = params.get("bytes_per_timestamp", 4)
                bytes_per_sample = params.get("bytes_per_sample", 1)
                num_spikes = params.get("num_spikes", 0)
                num_chans = params.get("num_chans", 1)
                samples_per_spike = params.get("samples_per_spike", 50)
                timebase = int(
                    params.get("timebase", "96000 hz").split(" ")[0]) * pq.Hz
                sampling_rate = params.get("rawrate", 48000) * pq.Hz
                bytes_per_spike_without_timestamp = samples_per_spike * bytes_per_sample
                bytes_per_spike = bytes_per_spike_without_timestamp + bytes_per_timestamp

                timestamp_dtype = ">u" + str(bytes_per_timestamp)
                waveform_dtype = "<i" + str(bytes_per_sample)

                dtype = np.dtype([("times", (timestamp_dtype, 1), 1),
                                  ("waveforms", (waveform_dtype, 1),
                                   samples_per_spike)])

                data = np.fromfile(f,
                                   dtype=dtype,
                                   count=num_spikes * num_chans)
                assert_end_of_data(f)

            # times are saved for each channel
            times = data["times"][::num_chans] / timebase
            assert len(times) == num_spikes
            waveforms = data["waveforms"]
            waveforms = np.reshape(waveforms,
                                   (num_spikes, num_chans, samples_per_spike))
            # TODO HACK !!!! findout if recording is sig - ref or the other
            # way around, this determines the way of the peak which should be
            # possible to set in a parameter e.g. peak='negative'/'positive'
            waveforms = -waveforms.astype(float)

            channel_gain_matrix = np.ones(waveforms.shape)
            for i in range(num_chans):
                channel_gain_matrix[:, i, :] *= self._channel_gain(
                    channel_group_index, i)
            waveforms = scale_analog_signal(waveforms, channel_gain_matrix,
                                            self._adc_fullscale,
                                            bytes_per_sample)

            # TODO get left_sweep form setfile?
            spike_train = SpikeTrain(times,
                                     t_stop=self._duration,
                                     waveforms=waveforms * pq.uV,
                                     sampling_rate=sampling_rate,
                                     left_sweep=0.2 * pq.ms,
                                     **params)
            spike_trains.append(spike_train)
            channel_index = self._channel_group_to_channel_index[
                channel_group_index]
            spike_train.channel_index = channel_index
            unit = Unit(
            )  # TODO unit can have several spiketrains from different segments, not necessarily relevant here though
            unit.spiketrains.append(spike_train)
            channel_index.units.append(unit)

        return spike_trains
Beispiel #4
0
    def setUp(self):
        self.fname = '/tmp/test.exdir'
        if os.path.exists(self.fname):
            shutil.rmtree(self.fname)
        self.n_channels = 5
        self.n_samples = 20
        self.n_spikes = 50
        blk = Block()
        seg = Segment()
        blk.segments.append(seg)
        chx1 = ChannelIndex(index=np.arange(self.n_channels),
                            channel_ids=np.arange(self.n_channels))
        chx2 = ChannelIndex(index=np.arange(self.n_channels),
                            channel_ids=np.arange(self.n_channels) * 2)
        blk.channel_indexes.extend([chx1, chx2])

        wf1 = np.random.random(
            (self.n_spikes, self.n_channels, self.n_samples))
        ts1 = np.sort(np.random.random(self.n_spikes))
        t_stop1 = np.ceil(ts1[-1])
        sptr1 = SpikeTrain(
            times=ts1,
            units='s',
            waveforms=np.random.random(
                (self.n_spikes, self.n_channels, self.n_samples)) * pq.V,
            name='spikes 1',
            description='sptr1',
            t_stop=t_stop1,
            **{'id': 1})
        sptr1.channel_index = chx1
        unit1 = Unit(name='unit 1')
        unit1.spiketrains.append(sptr1)
        chx1.units.append(unit1)
        seg.spiketrains.append(sptr1)

        ts2 = np.sort(np.random.random(self.n_spikes))
        t_stop2 = np.ceil(ts2[-1])
        sptr2 = SpikeTrain(
            times=ts2,
            units='s',
            waveforms=np.random.random(
                (self.n_spikes, self.n_channels, self.n_samples)) * pq.V,
            description='sptr2',
            name='spikes 2',
            t_stop=t_stop2,
            **{'id': 2})
        sptr2.channel_index = chx2
        unit2 = Unit(name='unit 2')
        unit2.spiketrains.append(sptr2)
        chx2.units.append(unit2)
        seg.spiketrains.append(sptr2)

        wf3 = np.random.random(
            (self.n_spikes, self.n_channels, self.n_samples))
        ts3 = np.sort(np.random.random(self.n_spikes))
        t_stop3 = np.ceil(ts3[-1])
        sptr3 = SpikeTrain(
            times=ts3,
            units='s',
            waveforms=np.random.random(
                (self.n_spikes, self.n_channels, self.n_samples)) * pq.V,
            description='sptr3',
            name='spikes 3',
            t_stop=t_stop3,
            **{'id': 3})
        sptr3.channel_index = chx2
        unit3 = Unit(name='unit 3')
        unit3.spiketrains.append(sptr3)
        chx2.units.append(unit3)
        seg.spiketrains.append(sptr3)

        t_stop = max([t_stop1, t_stop2, t_stop3]) * pq.s

        ana = AnalogSignal(np.random.random(self.n_samples),
                           sampling_rate=self.n_samples / t_stop,
                           units='V',
                           name='ana1',
                           description='LFP')
        assert t_stop == ana.t_stop
        seg.analogsignals.append(ana)
        epo = Epoch(np.random.random(self.n_samples),
                    durations=[1] * self.n_samples * pq.s,
                    units='s',
                    name='epo1')
        seg.epochs.append(epo)
        self.blk = blk