def default_data(block=None, n_chidx=1, n_units=1):

    # generate new block if none provided, otherwise attach to provided block
    if block is None:
        block = Block()

    for id in range(n_chidx):
        sorting_hash = elephant.spike_sorting.SpikeSorter.get_sorting_hash({
            'channel_index':
            id,
            'random annotation':
            np.random.randint(0, 10**10)
        })
        chidx = ChannelIndex([], sorting_hash=sorting_hash)
        chidx.block = block
        block.channel_indexes.append(chidx)

    for chidx in block.channel_indexes:
        for id in range(n_units):
            unit = Unit(unit_id=id)
            chidx.units.append(unit)
            unit.channel_index = chidx

            for st_id in range(id):
                st = SpikeTrain(np.random.uniform(0, st_id, 1) * pq.s,
                                t_start=0 * pq.s,
                                t_stop=st_id * pq.s,
                                spiketrain_id=st_id)
                unit.spiketrains.append(st)
                st.unit = unit

    block.create_relationship()
    return block
Exemple #2
0
    def test_write_read_single_spike(self):
        block1 = Block()
        seg = Segment('segment1')
        spiketrain1 = SpikeTrain([1] * pq.s,
                                 t_stop=10 * pq.s,
                                 sampling_rate=1 * pq.Hz)
        spiketrain1.annotate(yep='yop')
        block1.segments.append(seg)
        seg.spiketrains.append(spiketrain1)

        # write block
        filename = self.get_local_path('matlabiotestfile.mat')
        io1 = self.ioclass(filename)
        io1.write_block(block1)

        # read block
        io2 = self.ioclass(filename)
        block2 = io2.read_block()

        self.assertEqual(block1.segments[0].spiketrains[0],
                         block2.segments[0].spiketrains[0])

        # test annotations
        spiketrain2 = block2.segments[0].spiketrains[0]
        assert 'yep' in spiketrain2.annotations
        assert spiketrain2.annotations['yep'] == 'yop'
    def saveCCData(self):

        currentAmpsSet = np.sort(
            list(set([float(x.magnitude)
                      for x in self.currentAmps]))).tolist()

        self.CCData = Block('Current Clamp Data')
        self.CCData.segments = [
            Segment(name='Current Of ' + unicode(iAmp) + 'nA')
            for iAmp in currentAmpsSet
        ]
        for iAmp, vTrace in zip(self.currentAmps, self.voltageTraces):
            presSegInd = currentAmpsSet.index(iAmp)
            self.CCData.segments[presSegInd].analogsignals.append(vTrace)
            self.CCData.segments[presSegInd].events.append(
                Event(time=vTrace.t_start, label=unicode(iAmp)))
            self.CCData.segments[presSegInd].epochs.append(
                Epoch(time=vTrace.t_start - 50 * qu.ms,
                      duration=50 * qu.ms,
                      label=unicode(
                          self.restingMembranePotentials[presSegInd])))

        writer = NeoHdf5IO(
            os.path.join(
                os.path.split(self.ephysFile)[0], self.expName + '_CC.hdf5'))
        writer.write_block(self.CCData)
        writer.close()
    def test_write_read_single_spike(self):
        block1 = Block()
        seg = Segment('segment1')
        spiketrain = SpikeTrain([1] * pq.s, t_stop=10 * pq.s, sampling_rate=1 * pq.Hz)
        block1.segments.append(seg)
        seg.spiketrains.append(spiketrain)

        # write block
        filename = BaseTestIO.get_filename_path(self, 'matlabiotestfile.mat')
        io1 = self.ioclass(filename)
        io1.write_block(block1)

        # read block
        io2 = self.ioclass(filename)
        block2 = io2.read_block()

        self.assertEqual(block1.segments[0].spiketrains[0],
                         block2.segments[0].spiketrains[0])
    def setUp(self):
        self.block = default_data(n_chidx=1, n_units=1)

        sorting_file = 'testdata'
        if os.path.exists(sorting_file + '_spikesorting.hdf5'):
            os.remove(sorting_file + '_spikesorting.hdf5')

        self.sorting_hash = self.block.channel_indexes[0].annotations[
            'sorting_hash']

        save_spikesorting(sorting_file,
                          self.block,
                          sorting_hash=self.sorting_hash)

        self.new_block = Block(type='loaded block')
        load_spikesorting(self.new_block,
                          sorting_file='testdata',
                          sorting_hash=self.sorting_hash)

        self.object_classes = [
            'ChannelIndex', 'Unit', 'SpikeTrain', 'Segment', 'AnalogSignal'
        ]
Exemple #6
0
from neo import (Block, Segment,
                 AnalogSignal, IrregularlySampledSignal,
                 Event, Epoch, SpikeTrain,
                 ChannelIndex, Unit)
from neo.io.nixio import NixIO

import numpy as np
import quantities as pq

block1 = Block(name="nix-raw-block1", description="The 1st block")
block2 = Block(name="nix-raw-block2", description="The 2nd block")

for block in (block1, block2):
    ch_count = 0
    asig_count = 0
    nsegments = 2
    x = np.linspace(0,1,30)
    y = np.linspace(0,1,50)
    z = np.linspace(0,1,100)
    data_a = np.transpose((x,))
    data_b = np.transpose((y,y,y))
    data_c = np.transpose((z,z,z,z,z))
    nchannels = data_a.shape[1] + data_b.shape[1] + data_c.shape[1] # which one is correct
    nchannels = 3

    sampling_rate = pq.Quantity(1, "Hz")

    indexes = np.arange(nchannels)
    for cidx, signal in enumerate([data_a, data_b, data_c]):
        indexes = np.arange(signal.shape[1]) + ch_count
        ch_count += signal.shape[1]
pre = -10 * pq.ms
post = 15 * pq.ms
epoch = add_epoch(
    data_segment,
    event1=start_event, event2=None,
    pre=pre, post=post,
    attach_result=False,
    name='analysis_epochs')

# Create new segments of data cut according to the analysis epochs of the
# 'analysis_epochs' Neo Epoch object. The time axes of all segments are aligned
# such that each segment starts at time 0 (parameter reset_times); annotations
# describing the analysis epoch are carried over to the segments. A new Neo
# Block named "data_cut_to_analysis_epochs" is created to capture all cut
# analysis epochs.
cut_trial_block = Block(name="data_cut_to_analysis_epochs")
cut_trial_block.segments = cut_segment_by_epoch(
    data_segment, epoch, reset_time=True)

# =============================================================================
# Plot data
# =============================================================================

# Determine the first existing trial ID i from the Event object containing all
# start events. Then, by calling the filter() function of the Neo Block
# "data_cut_to_analysis_epochs" containing the data cut into the analysis
# epochs, we ask to return all Segments annotated by the behavioral trial ID i.
# In this case this call should return one matching analysis epoch around TS-ON
# belonging to behavioral trial ID i. For monkey N, this is trial ID 1, for
# monkey L this is trial ID 2 since trial ID 1 is not a correct trial.
trial_id = int(np.min(start_event.annotations['trial_id']))
Exemple #8
0
    def uploadToGNode(self):

        blk = Block()
        blk.name = self.blockNameProc
        blk.file_origin = self.originalFile
        blk.file_datetime = asctime()
        blk.description = 'Regions of Interest of electrophysiological recordings of a vibration sensitive neuron'
        blk = self.GNodeSession.set(blk)

        expSec = self.mainSec.sections[self.expName + '_Experiment']
        freqProp = expSec.properties['FrequenciesUsed']
        writtenFreq = getValuesOfProperty(freqProp)
        durProp = expSec.properties['PulseInputDurations']
        writtenDur = getValuesOfProperty(durProp)
        intervalProp = expSec.properties['PulseInputIntervals']
        writtenIntervals = getValuesOfProperty(intervalProp)

        blk.section = expSec
        blk = self.GNodeSession.set(blk)

        count = 0
        for (freq, amp, resp, stim, dur, inter) in \
            zip(self.stimFreqs, self.stimAmps, self.responseVTraces, self.stimTraces, self.stimDur, self.stimInterval):

            count += 1

            print 'Uploading Segment' + str(count)
            seg = self.GNodeSession.set(
                Segment(name=blk.name + '_seg' + str(count), index=count))

            seg.block = blk
            seg = self.GNodeSession.set(seg)

            resp.name = 'Membrane Potential'
            resp.description = 'Response to the associated vibration stimulus applied to the antenna.'
            stim.name = 'Vibration Stimulus'
            stim.description = 'Vibration Stimulus applied to the antenna'

            resp = self.GNodeSession.set(resp)
            stim = self.GNodeSession.set(stim)

            resp.segment = seg
            stim.segment = seg

            resp = self.GNodeSession.set(resp)
            stim = self.GNodeSession.set(stim)

            metadata = []

            metadata.append(freqProp.values[find_nearest_Id(writtenFreq,
                                                            freq)])
            if min(abs(writtenDur - dur)).magnitude < 5:
                metadata.append(durProp.values[find_nearest_Id(
                    writtenDur, dur)])
                metadata.append(intervalProp.values[find_nearest_Id(
                    writtenIntervals, inter)])

            seg.metadata = metadata
            seg = self.GNodeSession.set(seg)

            print 'Uploading Segment' + str(count) + ' Done'
            import ipdb
            ipdb.set_trace()
Exemple #9
0
    def uploadToGNode(self):

        self.csvData = extractCSVMetaData(self.csvFile, self.expName)

        self.dataBlockToUpload = Block(name=self.blockName, file_origin=self.expName)

        raw_seg = Segment(name='rawData', index=0)

        self.vibrationSignal.name = 'Vibration Stimulus'
        self.vibrationSignal.description = 'Vibration Stimulus applied to the honey bee antenna'
        self.voltageSignal.name = 'Membrane Potential'
        self.voltageSignal.description = 'Vibration Sensitive inter-neuron membrane potential'
        self.vibrationSignal.segment = raw_seg
        self.voltageSignal.segment = raw_seg

        raw_seg.analogsignals.append(self.vibrationSignal)
        raw_seg.analogsignals.append(self.voltageSignal)

        if len(self.dataBlock.segments[0].analogsignals) > 2:

            self.currentSignal.name = 'Current Signal'
            self.currentSignal.description = 'Indicates whether a current is being injected or not. The magnitudes ' \
                                             'are given in an event array'

            self.currentSignal.segment = raw_seg
            raw_seg.analogsignals.append(self.currentSignal)

            if len(self.dataBlock.segments[0].eventarrays) == 2:
                raw_seg.eventarrays.append(self.dataBlock.segments[0].eventarrays[1])
                self.dataBlock.segments[0].eventarrays[1].segment = raw_seg

        raw_seg.block = self.dataBlockToUpload
        self.dataBlockToUpload.segments.append(raw_seg)

        self.doc = odml.Document(author="Ajayrama K.", version="1.0")

        self.mainSec = odml.Section(name=self.expName, type='experiment')
        self.doc.append(self.mainSec)

        expSummary = odml.Section(name='VibrationStimulus', type='experiment/electrophysiology')

        quantity_parser = lambda lst: [odml.Value(data=float(x), unit=x.dimensionality.string) for x in lst]

        frequencies = quantity_parser(self.csvData['freqs'])
        if frequencies:
            expSummary.append(odml.Property(name='FrequenciesUsed', value=frequencies))

        durations = quantity_parser(self.csvData['pulse'][0])
        if durations:
            expSummary.append(odml.Property(name='PulseInputDurations', value=durations))

        intervals = quantity_parser(self.csvData['pulse'][1])
        if intervals:
            expSummary.append(odml.Property(name='PulseInputIntervals', value=intervals))

        expSummary.append(odml.Property(name='SpontaneousActivityPresence', value=self.csvData['spont']))

        if not self.csvData['resp'] == '':
            expSummary.append(odml.Property(name='NatureOfResponse', value=self.csvData['resp']))

        self.mainSec.append(expSummary)

        print asctime() + ' : Uploading metadata'
        doc = self.session.set_all(self.doc)
        print asctime() + ' : Uploading metadata Done'

        print asctime() + ' : Refreshing metadata'
        mainSec = self.session.get(doc.sections[0].location, refresh=True, recursive=True)
        print asctime() + ' : Refreshing metadata Done'

        self.dataBlockToUpload.section = mainSec

        print asctime() + ' : Uploading Data'
        blkLoc = self.session.set_all(self.dataBlockToUpload)
        print asctime() + ' : Uploading Data Done'
Exemple #10
0
"""
Example for usecases.rst
"""

from itertools import cycle
import numpy as np
from quantities import ms, mV, kHz
import matplotlib.pyplot as plt
from neo import Block, Segment, ChannelView, Group, SpikeTrain, AnalogSignal

store_signals = False

block = Block(name="probe data", tetrode_ids=["Tetrode #1", "Tetrode #2"])
block.segments = [
    Segment(name="trial #1", index=0),
    Segment(name="trial #2", index=1),
    Segment(name="trial #3", index=2)
]

n_units = {"Tetrode #1": 2, "Tetrode #2": 5}

# Create a group for each neuron, annotate each group with the tetrode from which it was recorded
groups = []
counter = 0
for tetrode_id, n in n_units.items():
    groups.extend([
        Group(name=f"neuron #{counter + i + 1}", tetrode_id=tetrode_id)
        for i in range(n)
    ])
    counter += n
block.groups.extend(groups)
Exemple #11
0
from neo import (Block, Segment, AnalogSignal, IrregularlySampledSignal, Event,
                 Epoch, SpikeTrain, ChannelIndex, Unit)
from neo.io.nixio import NixIO

import numpy as np
import quantities as pq

for b in range(3):
    # Create a Block called example
    block = Block("example" + str(b),
                  description="The root block for this example")

    # Create a Segment called seg-ex1 and attach it to the Block
    seg_a = Segment("seg-ex1", description="Segment one")
    block.segments.append(seg_a)

    # A second segment with an added comment
    # The comment is an "annotation"; any keyword argument can be used
    seg_b = Segment("seg-ex2",
                    description="Segment two",
                    comment="Second recording set")
    block.segments.append(seg_b)

    # Generate 3 fake data signals using numpy's random function
    # The shapes of the arrays are arbitrary
    data_a = np.random.random((300, 10))
    data_b = np.random.random((1200, 3))
    data_c = np.random.random((8000, 5))

    # random sampling times for data_b
    data_b_t = np.cumsum(np.random.random(1200))