def test_get_fp_start_ttl(self, mock_tdt):
     event = neo.Event(times=[1,2,3]*pq.s, name='A')
     segment = neo.Segment()
     segment.events.append(event)
     segment.events.append(event)
     block = neo.Block()
     block.segments.append(segment)
     mock_tdt.return_value = block
     self.assertEqual(1, GetBehavioralEvents().get_fp_start_ttl('/path/to/data/', event_name='A'))
     self.assertEqual(1, GetBehavioralEvents().get_fp_start_ttl('/path/to/data/', event_idx=0))
Пример #2
0
def generate_block(n_segments=3,
                   n_channels=8,
                   n_units=3,
                   data_samples=1000,
                   feature_samples=100):
    """
    Generate a block with a single recording channel group and a number of
    segments, recording channels and units with associated analog signals
    and spike trains.
    """
    feature_len = feature_samples / data_samples

    # Create container and grouping objects
    segments = [neo.Segment(index=i) for i in range(n_segments)]

    rcg = neo.RecordingChannelGroup(name='T0')
    for i in range(n_channels):
        rc = neo.RecordingChannel(name='C%d' % i, index=i)
        rc.recordingchannelgroups = [rcg]
        rcg.recordingchannels.append(rc)

    units = [neo.Unit('U%d' % i) for i in range(n_units)]
    rcg.units = units

    block = neo.Block()
    block.segments = segments
    block.recordingchannelgroups = [rcg]

    # Create synthetic data
    for seg in segments:
        feature_pos = np.random.randint(0, data_samples - feature_samples)

        # Analog signals: Noise with a single sinewave feature
        wave = 3 * np.sin(np.linspace(0, 2 * np.pi, feature_samples))
        for rc in rcg.recordingchannels:
            sig = np.random.randn(data_samples)
            sig[feature_pos:feature_pos + feature_samples] += wave

            signal = neo.AnalogSignal(sig * pq.mV, sampling_rate=1 * pq.kHz)
            seg.analogsignals.append(signal)
            rc.analogsignals.append(signal)

        # Spike trains: Random spike times with elevated rate in short period
        feature_time = feature_pos / data_samples
        for u in units:
            random_spikes = np.random.rand(20)
            feature_spikes = np.random.rand(5) * feature_len + feature_time
            spikes = np.hstack([random_spikes, feature_spikes])

            train = neo.SpikeTrain(spikes * pq.s, 1 * pq.s)
            seg.spiketrains.append(train)
            u.spiketrains.append(train)

    block.create_many_to_one_relationship()
    return block
Пример #3
0
    def create_hierarchy(cls, many_to_many):
        b = neo.Block()

        for ns in range(cls.SEGMENTS):
            b.segments.append(neo.Segment())

        channels = []
        if many_to_many:
            channels = [
                neo.RecordingChannel(name='Shared %d' % i,
                                     index=i + cls.CHANNELS)
                for i in range(cls.CHANNELS / 2)
            ]

        for ng in range(cls.CHANNEL_GROUPS):
            rcg = neo.RecordingChannelGroup()
            for nu in range(cls.UNITS):
                unit = neo.Unit()
                for ns in range(cls.SEGMENTS):
                    spike = neo.Spike(0 * pq.s)
                    unit.spikes.append(spike)
                    b.segments[ns].spikes.append(spike)

                    st = neo.SpikeTrain([] * pq.s, 0 * pq.s)
                    unit.spiketrains.append(st)
                    b.segments[ns].spiketrains.append(st)

                rcg.units.append(unit)

            if not many_to_many:
                for nc in range(cls.CHANNELS):
                    rc = neo.RecordingChannel(name='Single %d' % nc, index=nc)
                    rc.recordingchannelgroups.append(rcg)
                    rcg.recordingchannels.append(rc)
            else:
                for nc in range(cls.CHANNELS):
                    if nc % 2 == 0:
                        rc = neo.RecordingChannel(name='Single %d' % (nc / 2),
                                                  index=nc / 2)
                    else:
                        rc = channels[nc / 2]
                    rc.recordingchannelgroups.append(rcg)
                    rcg.recordingchannels.append(rc)
            rcg.channel_indexes = sp.array(
                [c.index for c in rcg.recordingchannels])
            rcg.channel_names = sp.array(
                [c.name for c in rcg.recordingchannels])

            b.recordingchannelgroups.append(rcg)

        try:
            neo.io.tools.create_many_to_one_relationship(b)
        except AttributeError:
            b.create_many_to_one_relationship()
        return b
Пример #4
0
def createFileFromSegmentList(list_segment, fileName):
    """
    Store the list of segments in elphy file
    """
    bl = neo.Block()
    for seg in list_segment:
        new_seg = swapAnalogSignals(seg)
        new_seg = swapSpikeTrains(new_seg)
        bl.segments.append(new_seg)
    # Neo -> Elphy
    r = neo.io.ElphyIO(filename=fileName)
    r.write_block(bl)
Пример #5
0
def generate_block(n_segments=3,
                   n_channels=4,
                   n_units=3,
                   data_samples=1000,
                   feature_samples=100):
    """
    Generate a block with a single recording channel group and a number of
    segments, recording channels and units with associated analog signals
    and spike trains.
    """
    feature_len = feature_samples / data_samples

    # Create Block to contain all generated data
    block = neo.Block()

    # Create multiple Segments
    block.segments = [neo.Segment(index=i) for i in range(n_segments)]
    # Create multiple ChannelIndexes
    block.channel_indexes = [
        neo.ChannelIndex(name='C%d' % i, index=i) for i in range(n_channels)
    ]

    # Attach multiple Units to each ChannelIndex
    for channel_idx in block.channel_indexes:
        channel_idx.units = [neo.Unit('U%d' % i) for i in range(n_units)]

    # Create synthetic data
    for seg in block.segments:
        feature_pos = np.random.randint(0, data_samples - feature_samples)

        # Analog signals: Noise with a single sinewave feature
        wave = 3 * np.sin(np.linspace(0, 2 * np.pi, feature_samples))
        for channel_idx in block.channel_indexes:
            sig = np.random.randn(data_samples)
            sig[feature_pos:feature_pos + feature_samples] += wave

            signal = neo.AnalogSignal(sig * pq.mV, sampling_rate=1 * pq.kHz)
            seg.analogsignals.append(signal)
            channel_idx.analogsignals.append(signal)

            # Spike trains: Random spike times with elevated rate in short period
            feature_time = feature_pos / data_samples
            for u in channel_idx.units:
                random_spikes = np.random.rand(20)
                feature_spikes = np.random.rand(5) * feature_len + feature_time
                spikes = np.hstack([random_spikes, feature_spikes])

                train = neo.SpikeTrain(spikes * pq.s, 1 * pq.s)
                seg.spiketrains.append(train)
                u.spiketrains.append(train)

    block.create_many_to_one_relationship()
    return block
Пример #6
0
 def test_GetImagingDataTTL(self, mock_tdt):
     event1 = neo.Event(times=[1, 2, 3] * pq.s, name='A')
     event2 = neo.Event(times=[4, 5, 6] * pq.s, name='B')
     event3 = neo.Event(times=[7, 8, 9] * pq.s, name='C')
     segment = neo.Segment()
     segment.events.append(event1)
     segment.events.append(event2)
     segment.events.append(event3)
     block = neo.Block()
     block.segments.append(segment)
     mock_tdt.return_value = block
     self.assertEqual(
         3, GetImagingDataTTL('/dpath/', time_idx=-1, event_name='A'))
     self.assertEqual(
         6, GetImagingDataTTL('/dpath/', time_idx=-1, event_idx=1))
Пример #7
0
def spike_array_to_neo(spike_array, population, t_stop):
    """
    Convert the spike array produced by PyNN 0.7 to a Neo Block
    (the data format used by PyNN 0.8)
    """
    from datetime import datetime
    segment = neo.Segment(name="I-F curve data", rec_datetime=datetime.now())
    segment.spiketrains = []
    for index in range(len(population)):
        segment.spiketrains.append(
            neo.SpikeTrain(spike_array[:, 1][spike_array[:, 0] == index],
                           t_start=0.0,
                           t_stop=t_stop,
                           units='ms',
                           source_index=index))
    data = neo.Block(name="I-F curve data")
    data.segments.append(segment)
    return data
Пример #8
0
    def SaveRecord(self, FileName, OverWrite=True):
        if os.path.isfile(FileName):
            if OverWrite:
                os.remove(FileName)
            else:
                print 'Warning File Exsist'

        if FileName.endswith('.h5'):
            out_f = neo.io.NixIO(filename=FileName)
        elif FileName.endswith('.mat'):
            out_f = neo.io.NeoMatlabIO(filename=FileName)
        else:
            return

        out_bl = neo.Block(name='NewBlock')
        out_bl.segments.append(self.Seg)
        out_f.write_block(out_bl)
        if FileName.endswith('.h5'):
            out_f.close()
Пример #9
0
 def get(self, variables, gather=False, filter_ids=None, clear=False,
         annotations=None):
     """Return the recorded data as a Neo `Block`."""
     variables = normalize_variables_arg(variables)
     data = neo.Block()
     data.segments = [filter_by_variables(segment, variables)
                      for segment in self.cache]
     if self._simulator.state.running: # reset() has not been called, so current segment is not in cache
         data.segments.append(self._get_current_segment(filter_ids=filter_ids, variables=variables, clear=clear))
     data.name = self.population.label
     data.description = self.population.describe()
     data.rec_datetime = data.segments[0].rec_datetime
     data.annotate(**self.metadata)
     if annotations:
         data.annotate(**annotations)
     if gather and self._simulator.state.num_processes > 1:
         data = gather_blocks(data)
     if clear:
         self.clear()
     return data
    def _run_simulations(self, model):
        """For each step in the protocol, run simulation and store recordings"""
        recordings = neo.Block()
        print("Total protocols: {}".format(len(self.protocol)))
        for idx, item in enumerate(self.protocol.items()):
            step_name = item[0]
            step = item[1]
            segment = neo.Segment(name=step_name)
            recordings.segments.append(segment)
            segment.block = recordings

            print("{}. Current protocol: {}".format(idx+1, step_name))
            model.inject_current(step["stimuli"])
            model.run(tstop=step["total_duration"])
            signal = model.get_membrane_potential()
            stimulus_on =  neo.Epoch(times=step["stimuli"]["delay"]*ms,
                                     durations=step["stimuli"]["duration"]*ms,
                                     labels="stimulus")
            segment.analogsignals.append(signal)
            segment.epochs.append(stimulus_on)
        return recordings
Пример #11
0
def spike_detector_to_neo(spike_detector, t_stop, label=""):
    """
    Convert the spikes recorded by NEST to a Neo Block
    """
    from datetime import datetime

    segment = neo.Segment(name=label, rec_datetime=datetime.now())
    segment.spiketrains = []
    events = nest.GetStatus(spike_detector, 'events')[0]
    ids = events['senders']
    values = events['times']
    for id in np.unique(ids):
        spike_times = values[ids == id]
        segment.spiketrains.append(
            neo.SpikeTrain(spike_times,
                           t_start=0.0,
                           t_stop=t_stop,
                           units='ms',
                           source_id=int(id)))
    data = neo.Block(name=label)
    data.segments.append(segment)
    return data
Пример #12
0
def test_load_save():
    n_channels = 5
    n_samples = 20
    n_spikes = 50
    fname = '/tmp/test_phy.exdir'
    if os.path.exists(fname):
        shutil.rmtree(fname)
    wf = np.random.random((n_spikes, n_channels, n_samples))
    ts = np.sort(np.random.random(n_spikes))
    t_stop = np.ceil(ts[-1])
    sptr = neo.SpikeTrain(times=ts, units='s', waveforms=wf * pq.V,
                          t_stop=t_stop, **{'group_id': 0})
    blk = neo.Block()
    seg = neo.Segment()
    seg.duration = t_stop
    blk.segments.append(seg)
    chx = neo.ChannelIndex(index=range(n_channels), **{'group_id': 0})
    blk.channel_indexes.append(chx)
    sptr.channel_index = chx
    unit = neo.Unit()
    unit.spiketrains.append(sptr)
    chx.units.append(unit)
    seg.spiketrains.append(sptr)
    epo = neo.Epoch()
    if os.path.exists(fname):
        shutil.rmtree(fname)
    io = neo.ExdirIO(fname)
    io.write_block(blk)
    wfswap = wf.swapaxes(1, 2)
    m = NeoModel(fname, overwrite=True)
    assert np.array_equal(m.spike_times, ts)
    assert np.array_equal(m.waveforms, wfswap)
    m.save()
    m2 = NeoModel(fname, overwrite=True)
    assert np.array_equal(m2.spike_times, ts)
    assert np.array_equal(m2.waveforms, wfswap)
    assert np.array_equal(m2.features, m.features)
    assert np.array_equal(m2.amplitudes, m.amplitudes)
    assert np.array_equal(m2.spike_clusters, m.spike_clusters)
Пример #13
0
    def ButSaveViewClick(self):  # TODO finish save

        if self.ChkSaveRec1.isChecked():
            RecordFile, _ = QFileDialog.getSaveFileName(
                self, "Rec1 file", "", "NixIO (*.h5)")
            if RecordFile:
                out_f = neo.io.NixIO(filename=RecordFile)
                out_f.write_block(self.rec.Block)
                out_f.close()

        if self.ChkSaveRec2.isChecked():
            RecordFile, _ = QFileDialog.getSaveFileName(
                self, "Rec2 file", "", "NixIO (*.h5)")
            if RecordFile:
                out_f = neo.io.NixIO(filename=RecordFile)
                out_f.write_block(self.rec2.Block)
                out_f.close()

        if self.ChkSaveView.isChecked():
            RecordFile, _ = QFileDialog.getSaveFileName(
                self, "Current View file", "", "NixIO (*.h5)")
            if RecordFile:
                Tstart = self.SLTStart.value() * pq.s
                Tstop = self.SLTStop.value() * pq.s

                out_seg = neo.Segment(name='NewSeg')

                for sl in self.pltrec.Slots:
                    SName = sl.DispName
                    print SName
                    sig = sl.rec.Signal(SName)
                    out_seg.analogsignals.append(sig.time_slice(Tstart, Tstop))

                out_bl = neo.Block(name='NewBlock')
                out_bl.segments.append(out_seg)
                out_f = neo.io.NixIO(filename='Test_out.h5')
                out_f.write_block(out_bl)
                out_f.close()
Пример #14
0
def build_block(data_file):
    """(plaintextfile_path) -> neo.core.block.Block
    Thie function reads a plain text file (data_file) with the data exported (per waveform) from Plexon Offline Sorter after sorting and returns a neo.Block with spiketrains ordered in any number of 10 minute segments.

    For practicality and Plexon management of channels names, units and channels have been ignored in the block structure."""

    raw_data = pd.read_csv(data_file, sep=',', header=0, usecols=[0, 1, 2])
    ord_times = raw_data.groupby(['Channel Name', 'Unit'])['Timestamp']
    new_block = neo.Block()
    chx = neo.ChannelIndex(index=None, name='MEA_60')
    new_block.channel_indexes.append(chx)
    # Next line will not work properly if last spike happens
    # exactly at the end of the recording
    num_segments = range(int(raw_data['Timestamp'].max() // 600 + 1))
    for ind in num_segments:
        seg = neo.Segment(name='segment {}'.format(ind), index=ind)
        new_block.segments.append(seg)

    for name, group in ord_times:
        time_stamps = ord_times.get_group(name).values
        inter = 600  # Number of seconds in 10 minutes
        first_seg = neo.SpikeTrain(
            time_stamps[time_stamps < inter], units='sec', t_start=0,  t_stop=inter)
        new_block.segments[0].spiketrains.append(first_seg)
        new_unit = neo.Unit(name=name)
        sptrs = [first_seg]
        for seg in num_segments[1:-1]:
            seg_train = neo.SpikeTrain(time_stamps[(time_stamps > seg * inter) & (time_stamps < ((seg + 1) * inter))],
                                       units='sec', t_start=(seg * inter), t_stop=((seg + 1) * inter))
            new_block.segments[seg].spiketrains.append(seg_train)
            sptrs.append(seg_train)
        last_seg = neo.SpikeTrain(time_stamps[time_stamps > (num_segments[-1] * inter)], units='sec',
                                  t_start=(num_segments[-1]) * inter, t_stop=((num_segments[-1] + 1) * inter))
        new_block.segments[num_segments[-1]].spiketrains.append(last_seg)
        sptrs.append(last_seg)
        new_unit.spiketrains = sptrs
        chx.units.append(new_unit)
    return new_block
Пример #15
0
def runtest_neo(io, N):
    times = []
    blk = neo.Block()
    seg = neo.Segment()
    blk.segments.append(seg)
    step = 1
    if N >= 10:
        step = N // 10
    Ns = list()
    for n in range(0, N + step, step):
        seg.analogsignals = []
        for ni in range(n):
            seg.analogsignals.append(
                neo.AnalogSignal(signal=[0],
                                 units="V",
                                 sampling_rate=1 * pq.Hz))
        t0 = time()
        io.write_block(blk)
        times.append(time() - t0)
        Ns.append(n)
        print(f" :: {n}/{N} {int(n/N*100):3d}%", end="\r")

    print(f" :: Last write time: {times[-1]:7.05f} s")

    print("Verifying neo-nix file")
    assert len(io.nix_file.blocks) == 1
    blk = io.nix_file.blocks[0]
    assert blk.type == "neo.block"

    assert len(blk.groups) == 1
    grp = blk.groups[0]
    assert grp.type == "neo.segment"

    assert len(blk.data_arrays) == N
    assert len(grp.data_arrays) == N

    return Ns, times
Пример #16
0
 def get(self,
         variables,
         gather=False,
         filter_ids=None,
         clear=False,
         annotations=None):
     """Return the recorded data as a Neo `Block`."""
     variables = normalize_variables_arg(variables)
     data = neo.Block()
     data.segments = [
         filter_by_variables(segment, variables) for segment in self.cache
     ]
     if self._simulator.state.running:  # reset() has not been called, so current segment is not in cache
         data.segments.append(
             self._get_current_segment(filter_ids=filter_ids,
                                       variables=variables,
                                       clear=clear))
     # collect channel indexes
     for segment in data.segments:
         for signal in segment.analogsignals:
             data.channel_indexes.append(signal.channel_index)
     data.name = self.population.label
     data.description = self.population.describe()
     data.rec_datetime = data.segments[0].rec_datetime
     data.annotate(**self.metadata)
     if annotations:
         data.annotate(**annotations)
     if gather and self._simulator.state.num_processes > 1:
         data = gather_blocks(data)
         if hasattr(
                 self.population.celltype,
                 "always_local") and self.population.celltype.always_local:
             data = remove_duplicate_spiketrains(data)
     if clear:
         self.clear()
     return data
Пример #17
0
Файл: sonata.py Проект: wau/PyNN
    def read(self):
        """
        Read all data* from a SONATA dataset directory.

        Returns a list of Blocks.

        (*Currently only spike data supported)
        """
        file_path = join(self.base_dir, self.spike_file)
        block = neo.Block(file_origin=file_path)
        segment = neo.Segment(file_origin=file_path)
        spikes_file = h5py.File(file_path, 'r')
        for gid in np.unique(spikes_file['spikes']['gids']):
            index = spikes_file['spikes']['gids'].value == gid
            spike_times = spikes_file['spikes']['timestamps'][index]
            segment.spiketrains.append(
                neo.SpikeTrain(spike_times,
                               t_stop = spike_times.max() + 1.0,
                               t_start=0.0,
                               units='ms',
                               source_id=gid)
            )
        block.segments.append(segment)
        return [block]
Пример #18
0
def load_dataset(metadata,
                 blk=None,
                 lazy=False,
                 signal_group_mode='split-all',
                 filter_events_from_epochs=False):
    """
    Load a dataset.

    ``metadata`` may be a :class:`MetadataSelector
    <neurotic.datasets.metadata.MetadataSelector>` or a simple dictionary
    containing the appropriate data.

    The ``data_file`` in ``metadata`` is read into a Neo :class:`Block
    <neo.core.Block>` using an automatically detected :mod:`neo.io` class
    if ``lazy=False`` or a :mod:`neo.rawio` class if ``lazy=True``. If
    ``data_file`` is unspecified, an empty Neo Block is created instead. If a
    Neo Block is passed as ``blk``, ``data_file`` is ignored.

    Epochs and events loaded from ``annotations_file`` and
    ``epoch_encoder_file`` and spike trains loaded from ``tridesclous_file``
    are added to the Neo Block.

    If ``lazy=False``, parameters given in ``metadata`` are used to apply
    filters to the signals, to detect spikes using amplitude discriminators, to
    calculate smoothed firing rates from spike trains, to detect bursts of
    spikes, and to calculate the rectified area under the curve (RAUC) for each
    signal.
    """

    if blk is None:
        if metadata.get('data_file', None) is not None:
            # read in the electrophysiology data
            blk = _read_data_file(metadata, lazy, signal_group_mode)
        else:
            # create an empty Block
            blk = neo.Block()
            seg = neo.Segment()
            blk.segments.append(seg)
    else:
        # a Block was provided
        if not isinstance(blk, neo.Block):
            raise TypeError('blk must be a neo.Block')

    # update the real-world start time of the data if provided
    if metadata.get('rec_datetime', None) is not None:
        if isinstance(metadata['rec_datetime'], datetime.datetime):
            blk.rec_datetime = metadata['rec_datetime']
        else:
            logger.warning(
                'Ignoring rec_datetime because it is not a properly formatted datetime: {}'
                .format(metadata['rec_datetime']))

    # apply filters to signals if not using lazy loading of signals
    if not lazy:
        blk = _apply_filters(metadata, blk)

    # copy events into epochs and vice versa
    epochs_from_events = [
        neo.Epoch(name=ev.name,
                  times=ev.times,
                  labels=ev.labels,
                  durations=np.zeros_like(ev.times))
        for ev in blk.segments[0].events
    ]
    events_from_epochs = [
        neo.Event(name=ep.name, times=ep.times, labels=ep.labels)
        for ep in blk.segments[0].epochs
    ]
    if not filter_events_from_epochs:
        blk.segments[0].epochs += epochs_from_events
    blk.segments[0].events += events_from_epochs

    # read in annotations
    annotations_dataframe = _read_annotations_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        annotations_dataframe, metadata,
        _abs_path(metadata, 'annotations_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        annotations_dataframe, metadata, _abs_path(metadata,
                                                   'annotations_file'))

    # read in epoch encoder file
    epoch_encoder_dataframe = _read_epoch_encoder_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'))

    # classify spikes by amplitude if not using lazy loading of signals
    if not lazy:
        blk.segments[0].spiketrains += _run_amplitude_discriminators(
            metadata, blk)

    # read in spikes identified by spike sorting using tridesclous
    spikes_dataframe = _read_spikes_file(metadata, blk)
    if spikes_dataframe is not None:
        if blk.segments[0].analogsignals:
            t_start = blk.segments[0].analogsignals[
                0].t_start  # assuming all AnalogSignals start at the same time
            t_stop = blk.segments[0].analogsignals[
                0].t_stop  # assuming all AnalogSignals start at the same time
            sampling_period = blk.segments[0].analogsignals[
                0].sampling_period  # assuming all AnalogSignals have the same sampling rate
            blk.segments[
                0].spiketrains += _create_neo_spike_trains_from_dataframe(
                    spikes_dataframe, metadata, t_start, t_stop,
                    sampling_period)
        else:
            logger.warning(
                'Ignoring tridesclous_file because the sampling rate and start time could not be inferred from analog signals'
            )

    # calculate smoothed firing rates from spike trains if not using lazy
    # loading of signals
    if not lazy:
        blk = _compute_firing_rates(metadata, blk)

    # identify bursts from spike trains if not using lazy loading of signals
    if not lazy:
        blk.segments[0].epochs += _run_burst_detectors(metadata, blk)

    # alphabetize epoch and event channels by name
    blk.segments[0].epochs.sort(key=lambda ep: ep.name or '')
    blk.segments[0].events.sort(key=lambda ev: ev.name or '')

    # compute rectified area under the curve (RAUC) for each signal if not
    # using lazy loading of signals
    if not lazy and metadata.get('rauc_bin_duration', None) is not None:
        for sig in blk.segments[0].analogsignals:
            rauc_sig = _elephant_tools.rauc(
                signal=sig,
                baseline=metadata.get('rauc_baseline', None),
                bin_duration=metadata['rauc_bin_duration'] * pq.s,
            )
            rauc_sig.name = sig.name + ' RAUC'
            sig.annotate(
                rauc_sig=rauc_sig,
                rauc_baseline=metadata.get('rauc_baseline', None),
                rauc_bin_duration=metadata['rauc_bin_duration'] * pq.s,
            )

    return blk
Пример #19
0
import neo
import quantities as pq
import numpy as np
import nixio as nix
from neo.io import NixIO

block = neo.Block()
chn_index = neo.ChannelIndex([0, 1, 2],
                             channel_names=["a", "b", "c"],
                             channel_ids=[1, 2, 3])
block.channel_indexes.append(chn_index)
unit = neo.Unit(name="x", description="contain1st")
chn_index.units.append(unit)

seg = neo.Segment()
asig = neo.AnalogSignal(name="signal",
                        signal=[1.1, 1.2, 1.5],
                        units="mV",
                        sampling_rate=1 * pq.Hz)
seg.analogsignals.append(asig)
asig2 = neo.AnalogSignal(name="signal2",
                         signal=[1.1, 1.2, 2.5],
                         units="mV",
                         sampling_rate=1 * pq.Hz)
seg.analogsignals.append(asig2)
irasig = neo.IrregularlySampledSignal(name="irsignal",
                                      signal=np.random.random((100, 2)),
                                      units="mV",
                                      times=np.cumsum(
                                          np.random.random(100) * pq.s))
seg.irregularlysampledsignals.append(irasig)
Пример #20
0
def cut_block_by_epochs(block, properties=None, reset_time=False):
    """
    This function cuts Segments in a Block according to multiple Neo
    Epoch objects.

    The function alters the Block by adding one Segment per Epoch entry
    fulfilling a set of conditions on the Epoch attributes and annotations. The
    original segments are removed from the block.

    A dictionary contains restrictions on which Epochs are considered for
    the cutting procedure. To this end, it is possible to
    specify accepted (valid) values of specific annotations on the source
    Epochs.

    The resulting cut segments may either retain their original time stamps, or
    be shifted to a common starting time.

    Parameters
    ----------
    block: Block
        Contains the Segments to cut according to the Epoch criteria provided
    properties: dictionary
        A dictionary that contains the Epoch keys and values to filter for.
        Each key of the dictionary is matched to an attribute or an
        annotation or an array_annotation of the Event.
        The value of each dictionary entry corresponds to a valid entry or a
        list of valid entries of the attribute or (array) annotation.

        If the value belonging to the key is a list of entries of the same
        length as the number of epochs in the Epoch object, the list entries
        are matched to the epochs in the Epoch object. The resulting Epoch
        object contains only those epochs where the values match up.

        Otherwise, the value is compared to the attributes or annotation of the
        Epoch object as such, and depending on the comparison, either the
        complete Epoch object is returned or not.

        If None or an empty dictionary is passed, all Epoch Objects will
        be considered

    reset_time: bool
        If True the times stamps of all sliced objects are set to fall
        in the range from 0 to the duration of the epoch duration.
        If False, original time stamps are retained.
        Default is False.

    Returns:
    --------
    None
    """
    if not isinstance(block, neo.Block):
        raise TypeError(
            'block needs to be a Block, not %s' % type(block))

    new_block = neo.Block()

    for seg in block.segments:
        epochs = _get_from_list(seg.epochs, prop=properties)
        if len(epochs) > 1:
            warnings.warn(
                'Segment %s contains multiple epochs with '
                'requested properties (%s). Sub-segments can '
                'have overlapping times' % (seg.name, properties))

        elif len(epochs) == 0:
            warnings.warn(
                'No epoch is matching the requested epoch properties %s. '
                'No cutting of segment %s performed.' % (properties, seg.name))

        for epoch in epochs:
            new_segments = cut_segment_by_epoch(
                seg, epoch=epoch, reset_time=reset_time)
            new_block.segments.extend(new_segments)

    new_block.create_many_to_one_relationship(force=True)

    return new_block
Пример #21
0
    def test_correct_transfer_of_spiketrain_attributes(self):

        # for delete=True the spiketrains in the block are changed,
        # test if their attributes remain correct

        sampling_rate = 1 / pq.s

        spiketrain = neo.SpikeTrain([1, 1, 5, 0] * pq.s, t_stop=10 * pq.s)

        block = neo.Block()

        group = neo.Group(name='Test Group')
        block.groups.append(group)
        group.spiketrains.append(spiketrain)

        segment = neo.Segment()
        block.segments.append(segment)
        segment.block = block
        segment.spiketrains.append(spiketrain)
        spiketrain.segment = segment

        spiketrain.annotate(cool_spike_train=True)
        spiketrain.array_annotate(
            spike_number=np.arange(len(spiketrain.times.magnitude)))
        spiketrain.waveforms = np.sin(
            np.arange(len(spiketrain.times.magnitude))[:, np.newaxis] +
            np.arange(len(spiketrain.times.magnitude))[np.newaxis, :])

        correct_mask = np.array([False, False, True, True])

        # store the correct attributes
        correct_annotations = spiketrain.annotations.copy()
        correct_waveforms = spiketrain.waveforms[correct_mask].copy()
        correct_array_annotations = {
            key: value[correct_mask]
            for key, value in spiketrain.array_annotations.items()
        }

        # perform a synchrofact search with delete=True
        synchrofact_obj = Synchrotool([spiketrain],
                                      spread=0,
                                      sampling_rate=sampling_rate,
                                      binary=False)
        synchrofact_obj.delete_synchrofacts(mode='delete',
                                            in_place=True,
                                            threshold=2)

        # Ensure that the spiketrain was not duplicated
        self.assertEqual(len(block.filter(objects=neo.SpikeTrain)), 1)

        cleaned_spiketrain = segment.spiketrains[0]

        # Ensure that the spiketrain is also in the group
        self.assertEqual(len(block.groups[0].spiketrains), 1)
        self.assertIs(block.groups[0].spiketrains[0], cleaned_spiketrain)

        cleaned_annotations = cleaned_spiketrain.annotations
        cleaned_waveforms = cleaned_spiketrain.waveforms
        cleaned_array_annotations = cleaned_spiketrain.array_annotations
        cleaned_array_annotations.pop('complexity')

        self.assertDictEqual(correct_annotations, cleaned_annotations)
        assert_array_almost_equal(cleaned_waveforms, correct_waveforms)
        self.assertTrue(
            len(cleaned_array_annotations) == len(correct_array_annotations))
        for key, value in correct_array_annotations.items():
            self.assertTrue(key in cleaned_array_annotations.keys())
            assert_array_almost_equal(value, cleaned_array_annotations[key])
Пример #22
0
    CLI = argparse.ArgumentParser(description=__doc__,
                   formatter_class=argparse.RawDescriptionHelpFormatter)
    CLI.add_argument("--data",    nargs='?', type=str, required=True,
                     help="path to input data in neo format")
    CLI.add_argument("--output",  nargs='?', type=str, required=True,
                     help="path of output file")
    CLI.add_argument("--output_img",  nargs='?', type=none_or_str,
                     help="path of output image", default=None)
    CLI.add_argument("--macro_pixel_dim",  nargs='?', type=int,
                     help="smoothing factor", default=2)

    args = CLI.parse_args()
    block = load_neo(args.data)
    block = analogsignals_to_imagesequences(block)
    imgseq = block.segments[0].imagesequences[0]

    imgseq_reduced = spatial_smoothing(imgseq, args.macro_pixel_dim)

    if args.output_img is not None:
        plot_downsampled_image(imgseq_reduced.as_array()[0], args.output_img)

    new_block = neo.Block()
    new_segment = neo.Segment()
    new_block.segments.append(new_segment)
    new_block.segments[0].imagesequences.append(imgseq_reduced)
    new_block = imagesequences_to_analogsignals(new_block)

    block.segments[0].analogsignals[0] = new_block.segments[0].analogsignals[0]

    write_neo(args.output, block)
Пример #23
0
    # calculate mask
    contour = calculate_contour(img=avg_img,
                                contour_limit=args.intensity_threshold)
    contour = close_contour(contour, num=100)
    mask = contour2mask(contour=contour,
                        dim_x=dim_x,
                        dim_y=dim_y)

    # apply mask
    imgseq_array[:, np.bitwise_not(mask)] = np.nan
    if args.crop_to_selection:
        imgseq_array = crop_to_selection(imgseq_array)

    # replace analogsingal
    tmp_blk = neo.Block()
    tmp_seg = neo.Segment()
    tmp_imgseq = imgseq.duplicate_with_new_data(imgseq_array)
    tmp_blk.segments.append(tmp_seg)
    tmp_blk.segments[0].imagesequences.append(tmp_imgseq)
    tmp_blk = ImageSequence2AnalogSignal(tmp_blk)
    new_asig = tmp_blk.segments[0].analogsignals[0]

    asig = block.segments[0].analogsignals[0].duplicate_with_new_data(new_asig.as_array())
    if not args.crop_to_selection:
        asig.array_annotate(**block.segments[0].analogsignals[0].array_annotations)
    else:
        asig.array_annotate(**new_asig.array_annotations)

    # save data and figure
    asig.name += ""
Пример #24
0
    def test1(self):
        """Create clu and fet files based on spiketrains in a block.

        Checks that
            Files are created
            Converted to samples correctly
            Missing sampling rate are taken from IO reader default
            Spiketrains without cluster info are assigned to cluster 0
            Spiketrains across segments are concatenated
        """
        block = neo.Block()
        segment = neo.Segment()
        segment2 = neo.Segment()
        block.segments.append(segment)
        block.segments.append(segment2)

        # Fake spiketrain 1, will be sorted
        st1 = neo.SpikeTrain(times=[.002, .004, .006], units='s', t_stop=1.)
        st1.annotations['cluster'] = 0
        st1.annotations['group'] = 0
        segment.spiketrains.append(st1)

        # Fake spiketrain 1B, on another segment. No group specified,
        # default is 0.
        st1B = neo.SpikeTrain(times=[.106], units='s', t_stop=1.)
        st1B.annotations['cluster'] = 0
        segment2.spiketrains.append(st1B)

        # Fake spiketrain 2 on same group, no sampling rate specified
        st2 = neo.SpikeTrain(times=[.001, .003, .011], units='s', t_stop=1.)
        st2.annotations['cluster'] = 1
        st2.annotations['group'] = 0
        segment.spiketrains.append(st2)

        # Fake spiketrain 3 on new group, with different sampling rate
        st3 = neo.SpikeTrain(times=[.05, .09, .10], units='s', t_stop=1.)
        st3.annotations['cluster'] = -1
        st3.annotations['group'] = 1
        segment.spiketrains.append(st3)

        # Fake spiketrain 4 on new group, without cluster info
        st4 = neo.SpikeTrain(times=[.005, .009], units='s', t_stop=1.)
        st4.annotations['group'] = 2
        segment.spiketrains.append(st4)

        # Create empty directory for writing
        delete_test_session()

        # Create writer with default sampling rate
        kio = KlustaKwikIO(filename=os.path.join(self.dirname, 'base1'),
                           sampling_rate=1000.)
        kio.write_block(block)

        # Check files were created
        for fn in ['.fet.0', '.fet.1', '.clu.0', '.clu.1']:
            self.assertTrue(
                os.path.exists(os.path.join(self.dirname, 'base1' + fn)))

        # Check files contain correct content
        # Spike times on group 0
        data = file(os.path.join(self.dirname, 'base1.fet.0')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [0, 2, 4, 6, 1, 3, 11, 106])

        # Clusters on group 0
        data = file(os.path.join(self.dirname, 'base1.clu.0')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [2, 0, 0, 0, 1, 1, 1, 0])

        # Spike times on group 1
        data = file(os.path.join(self.dirname, 'base1.fet.1')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [0, 50, 90, 100])

        # Clusters on group 1
        data = file(os.path.join(self.dirname, 'base1.clu.1')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [1, -1, -1, -1])

        # Spike times on group 2
        data = file(os.path.join(self.dirname, 'base1.fet.2')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [0, 5, 9])

        # Clusters on group 2
        data = file(os.path.join(self.dirname, 'base1.clu.2')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [1, 0, 0])

        # Empty out test session again
        delete_test_session()
Пример #25
0
    def save(self, spike_clusters=None, groups=None, *labels):
        if spike_clusters is None:
            spike_clusters = self.spike_clusters
        assert spike_clusters.shape == self.spike_clusters.shape
        # assert spike_clusters.dtype == self.spike_clusters.dtype # TODO check if this is necessary
        self.spike_clusters = spike_clusters
        blk = neo.Block()
        seg = neo.Segment(name='Segment_{}'.format(self.segment_num),
                          index=self.segment_num)
        # seg.duration = self.duration
        blk.segments.append(seg)
        metadata = self.chx.annotations
        if labels:
            metadata.update({name: values for name, values in labels})
        chx = neo.ChannelIndex(index=self.chx.index,
                               name=self.chx.name,
                               **metadata)
        blk.channel_indexes.append(chx)
        try:
            wf_units = self.sptrs[0].waveforms.units
        except AttributeError:
            wf_units = pq.dimensionless
        clusters = np.unique(spike_clusters)
        self.cluster_groups = groups or self.cluster_groups
        for sc in clusters:
            mask = self.spike_clusters == sc
            waveforms = np.swapaxes(self.waveforms[mask], 1, 2) * wf_units
            sptr = neo.SpikeTrain(times=self.spike_times[mask] * pq.s,
                                  waveforms=waveforms,
                                  sampling_rate=self.sample_rate * pq.Hz,
                                  name='cluster #%i' % sc,
                                  t_stop=self.duration,
                                  t_start=self.start_time,
                                  **{'cluster_id': sc,
                                     'cluster_group': self.cluster_groups[sc].lower(),
                                     'kk2_metadata': self.kk2_metadata})
            sptr.channel_index = chx
            unt = neo.Unit(name='Unit #{}'.format(sc),
                           **{'cluster_id': sc,
                              'cluster_group': self.cluster_groups[sc].lower()})
            unt.spiketrains.append(sptr)
            chx.units.append(unt)
            seg.spiketrains.append(sptr)

        # save block to file
        try:
            io = neo.get_io(self.save_path, mode=self.mode)
        except Exception:
            io = neo.get_io(self.save_path)
        io.write_block(blk)
        if hasattr(io, 'close'):
            io.close()
        if self.output_ext == '.exdir':
            # save features and masks
            group = exdir.File(directory=self.save_path)
            self._exdir_save_group = self._find_exdir_channel_group(
                group["processing"]['electrophysiology']) # TODO not use elphys name
            if self._exdir_save_group is None:
                raise IOError('Can not find a dirctory corresponding to ' +
                              'channel_group {}'.format(self.channel_group))
            self.save_features_masks(spike_clusters)
Пример #26
0
import neo
import odml
from gnodeclient import *
import numpy as np
import quantities as qu
import time
# from GJMorphSim.ephys.GNodeUpoadHelpers import *
import ipdb

ss = session.create(location="http://predata.g-node.org", username="******", password="******")
blk = neo.Block(name='testBlock1')
for ind1 in range(3):

    seg = neo.Segment(name='testSegment' + str(ind1), index=ind1)
    seg.block = blk

    for ind2 in range(3):
        sp = neo.SpikeTrain(name='SpikeTrain' + str(ind2), times=np.random.rand(10) * qu.s, t_stop=1.2)
        sp.segment = seg
        seg.spiketrains.append(sp)

    for ind2 in range(3):
        sp = neo.Spike(name='Spike' + str(ind2), time=np.random.rand() * qu.s)
        sp.segment = seg
        seg.spikes.append(sp)

    for ind2 in range(3):
        irr = neo.IrregularlySampledSignal(name='IrregularlySampled' + str(ind2), times=np.random.rand(10) * qu.s,
                                           signal=np.random.rand(10) * qu.mV)
        irr.segment = seg
        seg.irregularlysampledsignals.append(irr)
Пример #27
0
        images = substract_background(images, np.load(args.background))

    if args.normalize_by is not None and args.normalize_by != 'None':
        images = normalize(images, args.normalize_by)
    else:
        args.normlize_by = None

    if args.mask is not None:
        images = apply_mask(images, np.load(args.mask))

    if args.macro_pixel_dim is not None and args.macro_pixel_dim != 'None':
        images = spatial_smoothing(images, args.macro_pixel_dim)
    else:
        args.macro_pixel_dim = None

    # Save as NIX file
    description = '{} {} {} {}'.format(
        '' if args.background is None else 'background substracted;',
        '' if args.normalize_by is None else 'normlized by ' +
        args.normalize_by, '' if args.mask is None else 'mask applied;',
        '' if args.macro_pixel_dim is None else
        'resolution reduced by factor ' + str(args.macro_pixel_dim))

    image_block = neo.Block(name='Results of {}'\
                                 .format(os.path.basename(__file__)))
    seg = neo.Segment(name='Segment 1', description=description)
    image_block.segments.append(seg)
    image_block.segments[0].analogsignals.append(images)
    with neo.NixIO(args.output) as io:
        io.write(image_block)
import elephant as el
#import matplotlib.pyplot as plt
#import os as os
import useful_tools as ut

# Relative path to data (chenge to where you saved them)
path = '../data/'
resultpath = '../data_resliced/'

# Select the data

#%% Reslice trials to waiting time
for i in range(6):
    block = np.load(path + 'data{}.npy'.format(i), encoding='latin1').item()

    block_sliced = neo.Block()

    for idx, trial in enumerate(block.segments):  # for each trial

        # Find the index of our event in the event substructure
        on_num = trial.events[0].annotations['trial_event_labels'].index(
            b'CUE-OFF')
        off_num = trial.events[0].annotations['trial_event_labels'].index(
            b'GO-ON')

        # Find the associated times
        on_time = trial.events[0].annotations['signal'][on_num]
        off_time = trial.events[0].annotations['signal'][off_num]

        # Reslice the existing spiketrains, put them into a new neo.Segment
        seg_sliced = neo.Segment()
Пример #29
0
    def test1(self):
        """Create clu and fet files based on spiketrains in a block.

        Checks that
            Files are created
            Converted to samples correctly
            Missing sampling rate are taken from IO reader default
            Spiketrains without cluster info are assigned to cluster 0
            Spiketrains across segments are concatenated
        """
        block = neo.Block()
        segment = neo.Segment()
        segment2 = neo.Segment()
        block.segments.append(segment)
        block.segments.append(segment2)

        # Fake spiketrain 1
        st1 = neo.SpikeTrain(times=[.002, .004, .006], units='s', t_stop=1.)
        st1.annotations['cluster'] = 0
        st1.annotations['group'] = 0
        wff = np.array([[11.3, 0.2], [-0.3, 12.3], [3.0, -2.5]])
        st1.annotations['waveform_features'] = wff
        segment.spiketrains.append(st1)

        # Create empty directory for writing
        if not os.path.exists(self.dirname):
            os.mkdir(self.dirname)
        delete_test_session(self.dirname)

        # Create writer
        kio = KlustaKwikIO(filename=os.path.join(self.dirname, 'base2'),
                           sampling_rate=1000.)
        kio.write_block(block)

        # Check files were created
        for fn in ['.fet.0', '.clu.0']:
            self.assertTrue(
                os.path.exists(os.path.join(self.dirname, 'base2' + fn)))

        # Check files contain correct content
        fi = file(os.path.join(self.dirname, 'base2.fet.0'))

        # first line is nbFeatures
        self.assertEqual(fi.readline(), '2\n')

        # Now check waveforms and times are same
        data = fi.readlines()
        new_wff = []
        new_times = []
        for line in data:
            line_split = line.split()
            new_wff.append([float(val) for val in line_split[:-1]])
            new_times.append(int(line_split[-1]))
        self.assertEqual(new_times, [2, 4, 6])
        assert_arrays_almost_equal(wff, np.array(new_wff), .00001)

        # Clusters on group 0
        data = file(os.path.join(self.dirname, 'base2.clu.0')).readlines()
        data = [int(d) for d in data]
        self.assertEqual(data, [1, 0, 0, 0])

        # Now read the features and test same
        block = kio.read_block()
        train = block.segments[0].spiketrains[0]
        assert_arrays_almost_equal(wff, train.annotations['waveform_features'],
                                   .00001)

        # Empty out test session again
        delete_test_session(self.dirname)
Пример #30
0
    
    asig.name += ""
    asig.description += "Deconvoluted activity using the given {} kernel"\
                        .format(args.kernel)
    block.segments[0].analogsignals[0] = asig
    """
    # New method, creating a new block
    # create an ImageSequence with the deconvoluted matrix
    imgseq_deconv = neo.ImageSequence(
        activities,
        units=block.segments[0].analogsignals[0].units,
        sampling_rate=block.segments[0].analogsignals[0].sampling_rate,
        spatial_scale=block.segments[0].imagesequences[0].spatial_scale,
        name=block.segments[0].analogsignals[0].name,
        description=block.segments[0].analogsignals[0].description)

    # create a new Block & Segment and append the ImageSequence
    segm_deconv = neo.Segment()
    segm_deconv.annotations = block.segments[0].annotations
    segm_deconv.annotate(kernel=args.kernel)  #ToDo: parameters annotations
    segm_deconv.imagesequences.append(imgseq_deconv)
    block_deconv = neo.Block()
    block_deconv.segments.append(segm_deconv)
    block_deconv.name = block.name
    block_deconv.description = block.description
    block_deconv.annotations = block.annotations

    # converting into analogsignal
    block_deconv = ImageSequence2AnalogSignal(block_deconv)

    write_neo(args.output, block_deconv)