def default_data(block=None, n_chidx=1, n_units=1):

    # generate new block if none provided, otherwise attach to provided block
    if block is None:
        block = Block()

    for id in range(n_chidx):
        sorting_hash = elephant.spike_sorting.SpikeSorter.get_sorting_hash({
            'channel_index':
            id,
            'random annotation':
            np.random.randint(0, 10**10)
        })
        chidx = ChannelIndex([], sorting_hash=sorting_hash)
        chidx.block = block
        block.channel_indexes.append(chidx)

    for chidx in block.channel_indexes:
        for id in range(n_units):
            unit = Unit(unit_id=id)
            chidx.units.append(unit)
            unit.channel_index = chidx

            for st_id in range(id):
                st = SpikeTrain(np.random.uniform(0, st_id, 1) * pq.s,
                                t_start=0 * pq.s,
                                t_stop=st_id * pq.s,
                                spiketrain_id=st_id)
                unit.spiketrains.append(st)
                st.unit = unit

    block.create_relationship()
    return block
class SpikeSaveLoadTestCase(unittest.TestCase):
    def setUp(self):
        self.block = default_data(n_chidx=1, n_units=1)

        sorting_file = 'testdata'
        if os.path.exists(sorting_file + '_spikesorting.hdf5'):
            os.remove(sorting_file + '_spikesorting.hdf5')

        self.sorting_hash = self.block.channel_indexes[0].annotations[
            'sorting_hash']

        save_spikesorting(sorting_file,
                          self.block,
                          sorting_hash=self.sorting_hash)

        self.new_block = Block(type='loaded block')
        load_spikesorting(self.new_block,
                          sorting_file='testdata',
                          sorting_hash=self.sorting_hash)

        self.object_classes = [
            'ChannelIndex', 'Unit', 'SpikeTrain', 'Segment', 'AnalogSignal'
        ]

    def test_data_exist(self):
        for obj_class in self.object_classes:
            old_objs = self.block.list_children_by_class(obj_class)
            new_objs = self.new_block.list_children_by_class(obj_class)
            self.assertEqual(len(old_objs), len(new_objs))

    def test_for_annotations(self):
        for obj_class in self.object_classes:
            old_objs = self.block.list_children_by_class(obj_class)
            new_objs = self.new_block.list_children_by_class(obj_class)
            for id in range(len(old_objs)):
                d2 = old_objs[id].annotations
                d1 = new_objs[id].annotations
                self.assertTrue(set(d2.items()).issubset(set(d1.items())))

    def test_for_data(self):
        # This test will fail until neuralensemble issue #410 is solved
        for obj_class in self.object_classes:
            old_objs = self.block.list_children_by_class(obj_class)
            new_objs = self.new_block.list_children_by_class(obj_class)
            for id in range(len(old_objs)):
                if hasattr(old_objs[id], 'index'):
                    np.testing.assert_array_equal(old_objs[id].index,
                                                  new_objs[id].index)
                if hasattr(old_objs[id], 'times'):
                    np.testing.assert_array_equal(old_objs[id].times,
                                                  new_objs[id].times)
Exemple #3
0
    def test_write_read_single_spike(self):
        block1 = Block()
        seg = Segment('segment1')
        spiketrain1 = SpikeTrain([1] * pq.s,
                                 t_stop=10 * pq.s,
                                 sampling_rate=1 * pq.Hz)
        spiketrain1.annotate(yep='yop')
        block1.segments.append(seg)
        seg.spiketrains.append(spiketrain1)

        # write block
        filename = self.get_local_path('matlabiotestfile.mat')
        io1 = self.ioclass(filename)
        io1.write_block(block1)

        # read block
        io2 = self.ioclass(filename)
        block2 = io2.read_block()

        self.assertEqual(block1.segments[0].spiketrains[0],
                         block2.segments[0].spiketrains[0])

        # test annotations
        spiketrain2 = block2.segments[0].spiketrains[0]
        assert 'yep' in spiketrain2.annotations
        assert spiketrain2.annotations['yep'] == 'yop'
    def saveCCData(self):

        currentAmpsSet = np.sort(
            list(set([float(x.magnitude)
                      for x in self.currentAmps]))).tolist()

        self.CCData = Block('Current Clamp Data')
        self.CCData.segments = [
            Segment(name='Current Of ' + unicode(iAmp) + 'nA')
            for iAmp in currentAmpsSet
        ]
        for iAmp, vTrace in zip(self.currentAmps, self.voltageTraces):
            presSegInd = currentAmpsSet.index(iAmp)
            self.CCData.segments[presSegInd].analogsignals.append(vTrace)
            self.CCData.segments[presSegInd].events.append(
                Event(time=vTrace.t_start, label=unicode(iAmp)))
            self.CCData.segments[presSegInd].epochs.append(
                Epoch(time=vTrace.t_start - 50 * qu.ms,
                      duration=50 * qu.ms,
                      label=unicode(
                          self.restingMembranePotentials[presSegInd])))

        writer = NeoHdf5IO(
            os.path.join(
                os.path.split(self.ephysFile)[0], self.expName + '_CC.hdf5'))
        writer.write_block(self.CCData)
        writer.close()
    def setUp(self):
        self.block = default_data(n_chidx=1, n_units=1)

        sorting_file = 'testdata'
        if os.path.exists(sorting_file + '_spikesorting.hdf5'):
            os.remove(sorting_file + '_spikesorting.hdf5')

        self.sorting_hash = self.block.channel_indexes[0].annotations[
            'sorting_hash']

        save_spikesorting(sorting_file,
                          self.block,
                          sorting_hash=self.sorting_hash)

        self.new_block = Block(type='loaded block')
        load_spikesorting(self.new_block,
                          sorting_file='testdata',
                          sorting_hash=self.sorting_hash)

        self.object_classes = [
            'ChannelIndex', 'Unit', 'SpikeTrain', 'Segment', 'AnalogSignal'
        ]
Exemple #6
0
    def uploadToGNode(self):

        blk = Block()
        blk.name = self.blockNameProc
        blk.file_origin = self.originalFile
        blk.file_datetime = asctime()
        blk.description = 'Regions of Interest of electrophysiological recordings of a vibration sensitive neuron'
        blk = self.GNodeSession.set(blk)

        expSec = self.mainSec.sections[self.expName + '_Experiment']
        freqProp = expSec.properties['FrequenciesUsed']
        writtenFreq = getValuesOfProperty(freqProp)
        durProp = expSec.properties['PulseInputDurations']
        writtenDur = getValuesOfProperty(durProp)
        intervalProp = expSec.properties['PulseInputIntervals']
        writtenIntervals = getValuesOfProperty(intervalProp)

        blk.section = expSec
        blk = self.GNodeSession.set(blk)

        count = 0
        for (freq, amp, resp, stim, dur, inter) in \
            zip(self.stimFreqs, self.stimAmps, self.responseVTraces, self.stimTraces, self.stimDur, self.stimInterval):

            count += 1

            print 'Uploading Segment' + str(count)
            seg = self.GNodeSession.set(Segment(name=blk.name + '_seg' + str(count), index=count))

            seg.block = blk
            seg = self.GNodeSession.set(seg)

            resp.name = 'Membrane Potential'
            resp.description = 'Response to the associated vibration stimulus applied to the antenna.'
            stim.name = 'Vibration Stimulus'
            stim.description = 'Vibration Stimulus applied to the antenna'

            resp = self.GNodeSession.set(resp)
            stim = self.GNodeSession.set(stim)

            resp.segment = seg
            stim.segment = seg

            resp = self.GNodeSession.set(resp)
            stim = self.GNodeSession.set(stim)

            metadata = []

            metadata.append(freqProp.values[find_nearest_Id(writtenFreq, freq)])
            if min(abs(writtenDur - dur)).magnitude < 5:
                metadata.append(durProp.values[find_nearest_Id(writtenDur, dur)])
                metadata.append(intervalProp.values[find_nearest_Id(writtenIntervals, inter)])

            seg.metadata = metadata
            seg = self.GNodeSession.set(seg)

            print 'Uploading Segment' + str(count) + ' Done'
            import ipdb
            ipdb.set_trace()
    def test_write_read_single_spike(self):
        block1 = Block()
        seg = Segment('segment1')
        spiketrain = SpikeTrain([1] * pq.s, t_stop=10 * pq.s, sampling_rate=1 * pq.Hz)
        block1.segments.append(seg)
        seg.spiketrains.append(spiketrain)

        # write block
        filename = BaseTestIO.get_filename_path(self, 'matlabiotestfile.mat')
        io1 = self.ioclass(filename)
        io1.write_block(block1)

        # read block
        io2 = self.ioclass(filename)
        block2 = io2.read_block()

        self.assertEqual(block1.segments[0].spiketrains[0],
                         block2.segments[0].spiketrains[0])
Exemple #8
0
from neo import (Block, Segment,
                 AnalogSignal, IrregularlySampledSignal,
                 Event, Epoch, SpikeTrain,
                 ChannelIndex, Unit)
from neo.io.nixio import NixIO

import numpy as np
import quantities as pq

block1 = Block(name="nix-raw-block1", description="The 1st block")
block2 = Block(name="nix-raw-block2", description="The 2nd block")

for block in (block1, block2):
    ch_count = 0
    asig_count = 0
    nsegments = 2
    x = np.linspace(0,1,30)
    y = np.linspace(0,1,50)
    z = np.linspace(0,1,100)
    data_a = np.transpose((x,))
    data_b = np.transpose((y,y,y))
    data_c = np.transpose((z,z,z,z,z))
    nchannels = data_a.shape[1] + data_b.shape[1] + data_c.shape[1] # which one is correct
    nchannels = 3

    sampling_rate = pq.Quantity(1, "Hz")

    indexes = np.arange(nchannels)
    for cidx, signal in enumerate([data_a, data_b, data_c]):
        indexes = np.arange(signal.shape[1]) + ch_count
        ch_count += signal.shape[1]
pre = -10 * pq.ms
post = 15 * pq.ms
epoch = add_epoch(
    data_segment,
    event1=start_event, event2=None,
    pre=pre, post=post,
    attach_result=False,
    name='analysis_epochs')

# Create new segments of data cut according to the analysis epochs of the
# 'analysis_epochs' Neo Epoch object. The time axes of all segments are aligned
# such that each segment starts at time 0 (parameter reset_times); annotations
# describing the analysis epoch are carried over to the segments. A new Neo
# Block named "data_cut_to_analysis_epochs" is created to capture all cut
# analysis epochs.
cut_trial_block = Block(name="data_cut_to_analysis_epochs")
cut_trial_block.segments = cut_segment_by_epoch(
    data_segment, epoch, reset_time=True)

# =============================================================================
# Plot data
# =============================================================================

# Determine the first existing trial ID i from the Event object containing all
# start events. Then, by calling the filter() function of the Neo Block
# "data_cut_to_analysis_epochs" containing the data cut into the analysis
# epochs, we ask to return all Segments annotated by the behavioral trial ID i.
# In this case this call should return one matching analysis epoch around TS-ON
# belonging to behavioral trial ID i. For monkey N, this is trial ID 1, for
# monkey L this is trial ID 2 since trial ID 1 is not a correct trial.
trial_id = int(np.min(start_event.annotations['trial_id']))
Exemple #10
0
    def uploadToGNode(self):

        blk = Block()
        blk.name = self.blockNameProc
        blk.file_origin = self.originalFile
        blk.file_datetime = asctime()
        blk.description = 'Regions of Interest of electrophysiological recordings of a vibration sensitive neuron'
        blk = self.GNodeSession.set(blk)

        expSec = self.mainSec.sections[self.expName + '_Experiment']
        freqProp = expSec.properties['FrequenciesUsed']
        writtenFreq = getValuesOfProperty(freqProp)
        durProp = expSec.properties['PulseInputDurations']
        writtenDur = getValuesOfProperty(durProp)
        intervalProp = expSec.properties['PulseInputIntervals']
        writtenIntervals = getValuesOfProperty(intervalProp)

        blk.section = expSec
        blk = self.GNodeSession.set(blk)

        count = 0
        for (freq, amp, resp, stim, dur, inter) in \
            zip(self.stimFreqs, self.stimAmps, self.responseVTraces, self.stimTraces, self.stimDur, self.stimInterval):

            count += 1

            print 'Uploading Segment' + str(count)
            seg = self.GNodeSession.set(
                Segment(name=blk.name + '_seg' + str(count), index=count))

            seg.block = blk
            seg = self.GNodeSession.set(seg)

            resp.name = 'Membrane Potential'
            resp.description = 'Response to the associated vibration stimulus applied to the antenna.'
            stim.name = 'Vibration Stimulus'
            stim.description = 'Vibration Stimulus applied to the antenna'

            resp = self.GNodeSession.set(resp)
            stim = self.GNodeSession.set(stim)

            resp.segment = seg
            stim.segment = seg

            resp = self.GNodeSession.set(resp)
            stim = self.GNodeSession.set(stim)

            metadata = []

            metadata.append(freqProp.values[find_nearest_Id(writtenFreq,
                                                            freq)])
            if min(abs(writtenDur - dur)).magnitude < 5:
                metadata.append(durProp.values[find_nearest_Id(
                    writtenDur, dur)])
                metadata.append(intervalProp.values[find_nearest_Id(
                    writtenIntervals, inter)])

            seg.metadata = metadata
            seg = self.GNodeSession.set(seg)

            print 'Uploading Segment' + str(count) + ' Done'
            import ipdb
            ipdb.set_trace()
Exemple #11
0
    def uploadToGNode(self):

        self.csvData = extractCSVMetaData(self.csvFile, self.expName)

        self.dataBlockToUpload = Block(name=self.blockName, file_origin=self.expName)

        raw_seg = Segment(name='rawData', index=0)

        self.vibrationSignal.name = 'Vibration Stimulus'
        self.vibrationSignal.description = 'Vibration Stimulus applied to the honey bee antenna'
        self.voltageSignal.name = 'Membrane Potential'
        self.voltageSignal.description = 'Vibration Sensitive inter-neuron membrane potential'
        self.vibrationSignal.segment = raw_seg
        self.voltageSignal.segment = raw_seg

        raw_seg.analogsignals.append(self.vibrationSignal)
        raw_seg.analogsignals.append(self.voltageSignal)

        if len(self.dataBlock.segments[0].analogsignals) > 2:

            self.currentSignal.name = 'Current Signal'
            self.currentSignal.description = 'Indicates whether a current is being injected or not. The magnitudes ' \
                                             'are given in an event array'

            self.currentSignal.segment = raw_seg
            raw_seg.analogsignals.append(self.currentSignal)

            if len(self.dataBlock.segments[0].eventarrays) == 2:
                raw_seg.eventarrays.append(self.dataBlock.segments[0].eventarrays[1])
                self.dataBlock.segments[0].eventarrays[1].segment = raw_seg

        raw_seg.block = self.dataBlockToUpload
        self.dataBlockToUpload.segments.append(raw_seg)

        self.doc = odml.Document(author="Ajayrama K.", version="1.0")

        self.mainSec = odml.Section(name=self.expName, type='experiment')
        self.doc.append(self.mainSec)

        expSummary = odml.Section(name='VibrationStimulus', type='experiment/electrophysiology')

        quantity_parser = lambda lst: [odml.Value(data=float(x), unit=x.dimensionality.string) for x in lst]

        frequencies = quantity_parser(self.csvData['freqs'])
        if frequencies:
            expSummary.append(odml.Property(name='FrequenciesUsed', value=frequencies))

        durations = quantity_parser(self.csvData['pulse'][0])
        if durations:
            expSummary.append(odml.Property(name='PulseInputDurations', value=durations))

        intervals = quantity_parser(self.csvData['pulse'][1])
        if intervals:
            expSummary.append(odml.Property(name='PulseInputIntervals', value=intervals))

        expSummary.append(odml.Property(name='SpontaneousActivityPresence', value=self.csvData['spont']))

        if not self.csvData['resp'] == '':
            expSummary.append(odml.Property(name='NatureOfResponse', value=self.csvData['resp']))

        self.mainSec.append(expSummary)

        print asctime() + ' : Uploading metadata'
        doc = self.session.set_all(self.doc)
        print asctime() + ' : Uploading metadata Done'

        print asctime() + ' : Refreshing metadata'
        mainSec = self.session.get(doc.sections[0].location, refresh=True, recursive=True)
        print asctime() + ' : Refreshing metadata Done'

        self.dataBlockToUpload.section = mainSec

        print asctime() + ' : Uploading Data'
        blkLoc = self.session.set_all(self.dataBlockToUpload)
        print asctime() + ' : Uploading Data Done'
Exemple #12
0
"""
Example for usecases.rst
"""

from itertools import cycle
import numpy as np
from quantities import ms, mV, kHz
import matplotlib.pyplot as plt
from neo import Block, Segment, ChannelView, Group, SpikeTrain, AnalogSignal

store_signals = False

block = Block(name="probe data", tetrode_ids=["Tetrode #1", "Tetrode #2"])
block.segments = [
    Segment(name="trial #1", index=0),
    Segment(name="trial #2", index=1),
    Segment(name="trial #3", index=2)
]

n_units = {"Tetrode #1": 2, "Tetrode #2": 5}

# Create a group for each neuron, annotate each group with the tetrode from which it was recorded
groups = []
counter = 0
for tetrode_id, n in n_units.items():
    groups.extend([
        Group(name=f"neuron #{counter + i + 1}", tetrode_id=tetrode_id)
        for i in range(n)
    ])
    counter += n
block.groups.extend(groups)
    def fake_ephys(session=None):
        
        ephys = {"block": [], "segment": [], "eventarray": [], "event": [],
                 "epocharray": [], "epoch": [], "recordingchannelgroup": [], 
                 "recordingchannel": [], "unit": [], "spiketrain": [], 
                 "analogsignalarray": [], "analogsignal": [], 
                 "irregularlysampledsignal": [], "spike": []}

        # blocks
        for i in range(2):
            params = {
                'name': "Local Field Potential and Spike Data %d" % (i + 1),
            }
            obj = Block(**params)
            if session:
                obj = session.set(obj)
            ephys["block"].append(obj)

        # RCGs
        for i in range(2):
            params = {
                'name': "Electrode group %d" % (i + 1),
            }
            obj = RecordingChannelGroup(**params)
            obj.block = ephys['block'][0]
            if session:
                obj = session.set(obj)
            ephys["recordingchannelgroup"].append(obj)
            ephys['block'][0].recordingchannelgroups.append(obj)

        # recording channels
        for i in range(2):
            params = {
                'name': "Electrode %d" % (i + 1),
                'index': (i + 1),
            }
            obj = RecordingChannel(**params)
            obj.recordingchannelgroups.append(ephys["recordingchannelgroup"][0])
            if session:
                obj = session.set(obj)
            ephys["recordingchannel"].append(obj)
            ephys["recordingchannelgroup"][0].recordingchannels.append(obj)

        # units
        for i in range(2):
            params = {
                'name': "SUA-LFP-unit %d" % (i + 1),
            }
            obj = Unit(**params)
            obj.recordingchannelgroup = ephys["recordingchannelgroup"][0]
            if session:
                obj = session.set(obj)
            ephys["recordingchannelgroup"][0].units.append(obj)
            ephys["unit"].append(obj)

        # segments
        for i in range(4):
            params = {
                'name': "Segment %d" % (i + 1),
            }
            obj = Segment(**params)
            obj.block = ephys['block'][0]
            if session:
                obj = session.set(obj)
            ephys['block'][0].segments.append(obj)
            ephys["segment"].append(obj)

        # event arrays
        for i in range(2):
            parent = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            params = {
                'name': "Event array %d" % (i + 1),
                'labels': np.array(['foo', 'bar'], dtype='S'),
                'times': np.array([1.46, 4.15]) * pq.ms,
            }
            obj = EventArray(**params)
            obj.segment = parent
            if session:
                obj = session.set(obj)
            parent.eventarrays.append(obj)
            ephys["eventarray"].append(obj)

        # events
        for i in range(2):
            parent = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            params = {
                'name': "Event %d" % (i + 1),
                'label': "Event label %d" % (i + 1),
                'time': 1.56 * pq.ms,
            }
            obj = Event(**params)
            obj.segment = parent
            if session:
                obj = session.set(obj)
            parent.events.append(obj)
            ephys["event"].append(obj)

        # epoch arrays
        for i in range(2):
            parent = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            params = {
                'name': "Epoch array %d" % (i + 1),
                'labels': np.array(['foo', 'bar'], dtype='S'),
                'times': np.array([1.46, 4.15]) * pq.ms,
                'durations': np.array([1.01, 1.03]) * pq.ms,
            }
            obj = EpochArray(**params)
            obj.segment = parent
            if session:
                obj = session.set(obj)
            parent.epocharrays.append(obj)
            ephys["epocharray"].append(obj)

        # epochs
        for i in range(2):
            parent = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            params = {
                'name': "Epoch %d" % (i + 1),
                'label': "Epoch label %d" % (i + 1),
                'time': 1.56 * pq.ms,
                'duration': 5.23 * pq.ms,
            }
            obj = Epoch(**params)
            obj.segment = parent
            if session:
                obj = session.set(obj)
            parent.epochs.append(obj)
            ephys["epoch"].append(obj)

        # spike trains
        for i in range(2):
            segment = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            unit = ephys['unit'][0] if i < 2 else ephys['unit'][1]
            params = {
                'name': "Spiketrain %d" % (i + 1),
                't_start': 0.56 * pq.ms,
                't_stop': 5.23 * pq.ms,
                'times': np.array([1.46, 4.15]) * pq.ms,
            }
            obj = SpikeTrain(**params)
            obj.segment = segment
            obj.unit = unit
            if session:
                obj = session.set(obj)
            segment.spiketrains.append(obj)
            unit.spiketrains.append(obj)
            ephys["spiketrain"].append(obj)

        # analog signal arrays
        for i in range(2):
            segment = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            rcg = ephys['recordingchannelgroup'][0] if i < 3 else ephys['recordingchannelgroup'][1]
            params = {
                'name': "ASA %d" % (i + 1),
                't_start': 1.56 * pq.ms,
                'sampling_rate': 10000.0 * pq.Hz,
                'signal': np.array([[1.46, 4.15], [2.98, 3.12]]) * pq.mV,
            }
            obj = AnalogSignalArray(**params)
            obj.segment = segment
            obj.recordingchannelgroup = rcg
            if session:
                obj = session.set(obj)
            segment.analogsignalarrays.append(obj)
            rcg.analogsignalarrays.append(obj)
            ephys["analogsignalarray"].append(obj)

        # analog signals
        for i in range(2):
            segment = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            rc = ephys['recordingchannel'][0] if i < 3 else ephys['recordingchannel'][1]
            params = {
                'name': "Analog signal %d" % (i + 1),
                't_start': 1.56 * pq.ms,
                'sampling_rate': 10000.0 * pq.Hz,
                'signal': np.array([1.46, 4.15]) * pq.mV,
            }
            obj = AnalogSignal(**params)
            obj.segment = segment
            obj.recordingchannel = rc
            if session:
                obj = session.set(obj)
            segment.analogsignals.append(obj)
            rc.analogsignals.append(obj)
            ephys["analogsignal"].append(obj)

        # irsa-s
        for i in range(2):
            segment = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            rc = ephys['recordingchannel'][0] if i < 3 else ephys['recordingchannel'][1]
            params = {
                'name': "Irregular signal %d" % (i + 1),
                't_start': 1.56 * pq.ms,
                'signal': np.array([1.46, 4.15]) * pq.mV,
                'times': np.array([3.05, 4.05]) * pq.ms,
            }
            obj = IrregularlySampledSignal(**params)
            obj.segment = segment
            obj.recordingchannel = rc
            if session:
                obj = session.set(obj)
            segment.irregularlysampledsignals.append(obj)
            rc.irregularlysampledsignals.append(obj)
            ephys["irregularlysampledsignal"].append(obj)

        # spikes
        for i in range(2):
            segment = ephys['segment'][0] if i < 2 else ephys['segment'][1]
            unit = ephys['unit'][0] if i < 2 else ephys['unit'][1]
            params = {
                'name': "Spike waveform %d" % (i + 1),
                'time': 1.56 * pq.ms,
                'sampling_rate': 10000.0 * pq.Hz,
                'left_sweep': 1.56 * pq.ms,
                'waveform': np.array([1.46, 4.15]) * pq.mV,
            }
            obj = Spike(**params)
            obj.segment = segment
            obj.unit = unit
            if session:
                obj = session.set(obj)
            segment.spikes.append(obj)
            unit.spikes.append(obj)
            ephys["spike"].append(obj)

        return ephys
Exemple #14
0
from neo import (Block, Segment, AnalogSignal, IrregularlySampledSignal, Event,
                 Epoch, SpikeTrain, ChannelIndex, Unit)
from neo.io.nixio import NixIO

import numpy as np
import quantities as pq

for b in range(3):
    # Create a Block called example
    block = Block("example" + str(b),
                  description="The root block for this example")

    # Create a Segment called seg-ex1 and attach it to the Block
    seg_a = Segment("seg-ex1", description="Segment one")
    block.segments.append(seg_a)

    # A second segment with an added comment
    # The comment is an "annotation"; any keyword argument can be used
    seg_b = Segment("seg-ex2",
                    description="Segment two",
                    comment="Second recording set")
    block.segments.append(seg_b)

    # Generate 3 fake data signals using numpy's random function
    # The shapes of the arrays are arbitrary
    data_a = np.random.random((300, 10))
    data_b = np.random.random((1200, 3))
    data_c = np.random.random((8000, 5))

    # random sampling times for data_b
    data_b_t = np.cumsum(np.random.random(1200))