コード例 #1
0
def _event_epoch_slice_by_valid_ids(obj, valid_ids):
    """
    Internal function
    """
    # modify annotations
    sparse_annotations = _get_valid_annotations(obj, valid_ids)

    # modify array annotations
    sparse_array_annotations = {key: value[valid_ids]
                                for key, value in obj.array_annotations.items() if len(value)}

    if type(obj) is neo.Event:
        sparse_obj = neo.Event(
            times=copy.deepcopy(obj.times[valid_ids]),
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            array_annotations=sparse_array_annotations,
            **sparse_annotations)
    elif type(obj) is neo.Epoch:
        sparse_obj = neo.Epoch(
            times=copy.deepcopy(obj.times[valid_ids]),
            durations=copy.deepcopy(obj.durations[valid_ids]),
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            array_annotations=sparse_array_annotations,
            **sparse_annotations)
    else:
        raise TypeError('Can only slice Event and Epoch objects by valid IDs.')

    return sparse_obj
コード例 #2
0
def generate_salt_trials(spike_train, epoch):
    """
    Generate test and baseline trials from spike train and epoch for salt.
    Test trial are trials within epoch times and durations, baseline trails
    are between time + duration and next time.
    Note
    ----
    Spikes before the first trial are disregarded in baseline trials.
    Parameters
    ----------
    spike_train : neo.SpikeTrain
    epoch : neo.Epoch
    Returns
    -------
    out : tuple
        (baseline_trials, test_trials)
    """
    from exana.stimulus import make_spiketrain_trials
    e = epoch
    test_trials = make_spiketrain_trials(spike_train=spike_train,
                                         epoch=e)
    durations = np.array(
        [t2 - t1 - d for t1, t2, d in zip(e.times,
                                          e.times[1:],
                                          e.durations)]) * e.times.units
    times = np.array(
        [t1 + d for t1, d in zip(e.times[:-1], e.durations[:-1])]) * e.times.units
    baseline_epoch = neo.Epoch(times=times, durations=durations)
    baseline_trials = make_spiketrain_trials(spike_train=spike_train,
                                             epoch=baseline_epoch)
    return baseline_trials, test_trials
コード例 #3
0
def _event_epoch_slice_by_valid_ids(obj, valid_ids):
    """
    Internal function
    """
    # modify annotations
    sparse_annotations = _get_valid_annotations(obj, valid_ids)

    # modify labels
    sparse_labels = _get_valid_labels(obj, valid_ids)

    if type(obj) is neo.Event:
        sparse_obj = neo.Event(
            times=copy.deepcopy(obj.times[valid_ids]),
            labels=sparse_labels,
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            **sparse_annotations)
    elif type(obj) is neo.Epoch:
        sparse_obj = neo.Epoch(
            times=copy.deepcopy(obj.times[valid_ids]),
            durations=copy.deepcopy(obj.durations[valid_ids]),
            labels=sparse_labels,
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            **sparse_annotations)
    else:
        raise TypeError('Can only slice Event and Epoch objects by valid IDs.')

    return sparse_obj
コード例 #4
0
def generate_spike_train_and_epoch():
    from exana.stimulus import salt, generate_salt_trials
    from exana.misc import concatenate_spiketrains
    from elephant.spike_train_generation import homogeneous_poisson_process as hpp
    np.random.seed(12345)
    N_trials = 100
    stim_duration = 100 * pq.ms
    stim_start = 1000 * pq.ms
    stim_latency = 50 * pq.ms
    trial_duration = 1500 * pq.ms
    trains = []
    stim_onsets = []
    for n in range(N_trials):
        offset = trial_duration * n
        stim_onsets.append(stim_start + offset)
        trains.extend([hpp(rate=2 * pq.Hz,
                           t_start=offset,
                           t_stop=stim_start + stim_latency + offset),
                       hpp(rate=8 * pq.Hz,
                           t_start=stim_start + stim_latency + offset,
                           t_stop=stim_start + stim_duration + offset),
                       hpp(rate=2 * pq.Hz,
                           t_start=stim_start + stim_duration + offset,
                           t_stop=trial_duration + offset)])
    spike_train = concatenate_spiketrains(trains)

    epoch = neo.Epoch(
        times=np.array(stim_onsets) * pq.ms,
        durations=np.array([stim_duration] * len(stim_onsets)) * pq.ms)
    return spike_train, epoch
コード例 #5
0
def _create_neo_epochs_from_dataframe(dataframe,
                                      metadata,
                                      file_origin,
                                      filter_events_from_epochs=False):
    """
    Convert the contents of a dataframe into Neo :class:`Epochs
    <neo.core.Epoch>`.
    """

    epochs_list = []

    if dataframe is not None:

        if filter_events_from_epochs:
            # keep only rows with a positive duration
            dataframe = dataframe[dataframe['Duration (s)'] > 0]

        # group epochs by type
        for type_name, df in dataframe.groupby('Type'):

            # create a Neo Epoch for each type
            epoch = neo.Epoch(
                name=type_name,
                file_origin=file_origin,
                times=df['Start (s)'].values * pq.s,
                durations=df['Duration (s)'].values * pq.s,
                labels=df['Label'].values,
            )

            epochs_list.append(epoch)

    # return the list of Neo Epochs
    return epochs_list
コード例 #6
0
def test_baysian():
    from exana.stimulus import baysian_latency, generate_salt_trials
    from exana.misc import concatenate_spiketrains
    from elephant.spike_train_generation import homogeneous_poisson_process as hpp
    np.random.seed(12345)
    N_trials = 100
    stim_duration = 100 * pq.ms
    stim_start = 1000 * pq.ms
    stim_latency = 50 * pq.ms
    trial_duration = 1150 * pq.ms
    trains = []
    stim_onsets = []
    for n in range(N_trials):
        offset = trial_duration * n
        stim_onsets.append(offset)
        trains.extend([
            hpp(rate=2 * pq.Hz,
                t_start=offset,
                t_stop=stim_start + stim_latency + offset),
            hpp(rate=8 * pq.Hz,
                t_start=stim_start + stim_latency + offset,
                t_stop=stim_start + stim_duration + offset)
        ])
    spike_train = concatenate_spiketrains(trains)

    epoch = neo.Epoch(times=np.array(stim_onsets) * pq.ms,
                      durations=np.array([trial_duration] * len(stim_onsets)) *
                      pq.ms)

    from exana.stimulus import make_spiketrain_trials
    trials = make_spiketrain_trials(spike_train=spike_train, epoch=epoch)
    from elephant.statistics import time_histogram
    t_start = trials[0].t_start.rescale('s')
    t_stop = trials[0].t_stop.rescale('s')

    binsize = (abs(t_start) + abs(t_stop)) / float(100)
    time_hist = time_histogram(trials,
                               binsize,
                               t_start=t_start,
                               t_stop=t_stop,
                               output='counts',
                               binary=False)
    bins = np.arange(t_start.magnitude, t_stop.magnitude, binsize.magnitude)

    count_data = time_hist.magnitude

    trace = baysian_latency(count_data)
    return count_data, trace
コード例 #7
0
def yhat2trains(mdl, cbool):
    """
    Map the predicted spikes to a list of spike trains and extract a contact epoch
    :param mdl:
    :param cbool:
    :return:
    """
    ysim = mdl['ysim']
    trains = []
    for pred in ysim.T:
        train = neo.SpikeTrain(times=np.where(pred) * pq.ms,
                               t_stop=len(pred) * pq.ms)
        trains.append(train[0])

    starts, stops = neoUtils.Cbool_to_cc(cbool)
    dur = stops - starts
    epoch = neo.Epoch(starts * pq.ms, dur * pq.ms)
    return (trains, epoch)
コード例 #8
0
def test_salt_exc():
    from exana.stimulus import salt, generate_salt_trials
    from exana.misc import concatenate_spiketrains
    from elephant.spike_train_generation import homogeneous_poisson_process as hpp
    np.random.seed(12345)
    N_trials = 100
    stim_duration = 100 * pq.ms
    stim_start = 1000 * pq.ms
    stim_latency = 50 * pq.ms
    trial_duration = 1500 * pq.ms
    trains = []
    stim_onsets = []
    for n in range(N_trials):
        offset = trial_duration * n
        stim_onsets.append(stim_start + offset)
        trains.extend([hpp(rate=2 * pq.Hz,
                          t_start=offset,
                          t_stop=stim_start + stim_latency + offset),
                      hpp(rate=8 * pq.Hz,
                          t_start=stim_start + stim_latency + offset,
                          t_stop=stim_start + stim_duration + offset),
                      hpp(rate=2 * pq.Hz,
                          t_start=stim_start + stim_duration + offset,
                          t_stop=trial_duration + offset)])
    spike_train = concatenate_spiketrains(trains)

    epoch = neo.Epoch(
        times=np.array(stim_onsets) * pq.ms,
        durations=np.array([stim_duration] * len(stim_onsets)) * pq.ms)
    baseline_trials, test_trials = generate_salt_trials(spike_train, epoch)


    latencies, p_values, I_values = salt(baseline_trials=baseline_trials,
                                         test_trials=test_trials,
                                         winsize=0.01*pq.s,
                                         latency_step=0.01*pq.s)
    idxs, = np.where(np.array(p_values) < 0.01)
    print(latencies)
    print(p_values)
    assert latencies[min(idxs)] == stim_latency
    return baseline_trials, test_trials, spike_train, epoch
コード例 #9
0
    def _run_simulations(self, model):
        """For each step in the protocol, run simulation and store recordings"""
        recordings = neo.Block()
        print("Total protocols: {}".format(len(self.protocol)))
        for idx, item in enumerate(self.protocol.items()):
            step_name = item[0]
            step = item[1]
            segment = neo.Segment(name=step_name)
            recordings.segments.append(segment)
            segment.block = recordings

            print("{}. Current protocol: {}".format(idx+1, step_name))
            model.inject_current(step["stimuli"])
            model.run(tstop=step["total_duration"])
            signal = model.get_membrane_potential()
            stimulus_on =  neo.Epoch(times=step["stimuli"]["delay"]*ms,
                                     durations=step["stimuli"]["duration"]*ms,
                                     labels="stimulus")
            segment.analogsignals.append(signal)
            segment.epochs.append(stimulus_on)
        return recordings
コード例 #10
0
def _read_epoch(exdir_file, path, lazy=False):
    group = exdir_file[path]
    if lazy:
        times = []
    else:
        times = pq.Quantity(group['timestamps'].data,
                            group['timestamps'].attrs['unit'])

    if "durations" in group and not lazy:
        durations = pq.Quantity(group['durations'].data,
                                group['durations'].attrs['unit'])
    elif "durations" in group and lazy:
        durations = []
    else:
        durations = None

    if 'data' in group and not lazy:
        if 'unit' not in group['data'].attrs:
            labels = group['data'].data
        else:
            labels = pq.Quantity(group['data'].data,
                                 group['data'].attrs['unit'])
    elif 'data' in group and lazy:
        labels = []
    else:
        labels = None
    annotations = {'exdir_path': path}
    annotations.update(group.attrs.to_dict())

    if lazy:
        lazy_shape = (group.attrs['num_samples'], )
    else:
        lazy_shape = None
    epo = neo.Epoch(times=times,
                    durations=durations,
                    labels=labels,
                    lazy_shape=lazy_shape,
                    **annotations)

    return epo
コード例 #11
0
def test_load_save():
    n_channels = 5
    n_samples = 20
    n_spikes = 50
    fname = '/tmp/test_phy.exdir'
    if os.path.exists(fname):
        shutil.rmtree(fname)
    wf = np.random.random((n_spikes, n_channels, n_samples))
    ts = np.sort(np.random.random(n_spikes))
    t_stop = np.ceil(ts[-1])
    sptr = neo.SpikeTrain(times=ts, units='s', waveforms=wf * pq.V,
                          t_stop=t_stop, **{'group_id': 0})
    blk = neo.Block()
    seg = neo.Segment()
    seg.duration = t_stop
    blk.segments.append(seg)
    chx = neo.ChannelIndex(index=range(n_channels), **{'group_id': 0})
    blk.channel_indexes.append(chx)
    sptr.channel_index = chx
    unit = neo.Unit()
    unit.spiketrains.append(sptr)
    chx.units.append(unit)
    seg.spiketrains.append(sptr)
    epo = neo.Epoch()
    if os.path.exists(fname):
        shutil.rmtree(fname)
    io = neo.ExdirIO(fname)
    io.write_block(blk)
    wfswap = wf.swapaxes(1, 2)
    m = NeoModel(fname, overwrite=True)
    assert np.array_equal(m.spike_times, ts)
    assert np.array_equal(m.waveforms, wfswap)
    m.save()
    m2 = NeoModel(fname, overwrite=True)
    assert np.array_equal(m2.spike_times, ts)
    assert np.array_equal(m2.waveforms, wfswap)
    assert np.array_equal(m2.features, m.features)
    assert np.array_equal(m2.amplitudes, m.amplitudes)
    assert np.array_equal(m2.spike_clusters, m.spike_clusters)
コード例 #12
0
ファイル: conversions.py プロジェクト: soltesz-lab/spykeutils
def epoch_array_to_epochs(epoch_array):
    """ Return a list of epochs for an epoch array.

    Note that while the created epochs may have references to a segment,
    the relationships in the other direction are not automatically created
    (the events are not attached to the segment). Other properties like
    annotations are not copied or referenced in the created epochs.

    :param epoch_array: A period array from which the Epoch objects are
        constructed.
    :type epoch_array: :class:`neo.core.EpochArray`
    :return: A list of events, one for of the events in ``epoch_array``.
    :rtype: list
    """
    periods = []
    for i, t in enumerate(epoch_array.times):
        p = neo.Epoch(
            t, epoch_array.durations[i],
            epoch_array.labels[i] if i < len(epoch_array.labels) else '')
        p.segment = epoch_array.segment
        periods.append(p)
    return periods
コード例 #13
0
 def regime_epochs(self):
     """
     Retrieves the periods spent in each regime during the simulation
     in a neo.core.EpochArray
     """
     try:
         rec = self._regime_recording()
     except KeyError:
         raise Pype9RegimeTransitionsNotRecordedError(
             "Regime transitions not recorded, call 'record_regime' before"
             " simulation")
     cc = self.build_component_class
     index_map = dict((cc.index_of(r), r.name) for r in cc.regimes)
     trans_inds = np.nonzero(
         np.asarray(rec[1:]) != np.asarray(rec[:-1]))[0] + 1
     # Insert initial regime
     trans_inds = np.insert(trans_inds, 0, 0)
     labels = [index_map[int(rec[int(i)])] for i in trans_inds]
     times = rec.times[trans_inds]
     epochs = np.append(times, rec.t_stop) * times.units
     durations = epochs[1:] - epochs[:-1]
     return neo.Epoch(
         times=times, durations=durations, labels=labels,
         name='{}_regimes'.format(self.name))
コード例 #14
0
ファイル: developmentio.py プロジェクト: JuliaSprenger/utils
    def read_segment(self,
                     lazy=False,
                     cascade=True,
                     t_start=None,
                     t_stop=None,
                     electrode_list=None,
                     unit_list=None,
                     analogsignals=True,
                     events=False,
                     waveforms=False):
        """Reads one Segment.

        The Segment will contain one AnalogSignalArray for each channel
        and will go from t_start to t_stop.

        Arguments:


            lazy : Postpone actual reading of the data files. Default 'False'.
            cascade : Do not postpone reading subsequent neo types (SpikeTrains,
                            AnalogSignalArrays, Events).
                            Default 'True'.
            t_start : time (quantity) that the Segment begins. Default None.
            t_stop : time (quantity) that the Segment ends. Default None.
            electrode_list : list of integers containing the IDs of the requested
                            units to load. If [] or None all available units
                            will be loaded. If False, no unit will be loaded.
                            Default: None.
            unit_list : list of integers containing the IDs of the requested
                            units to load. If [] all available units will be
                            loaded.
                            Default: None.
            analogsignals : boolean, indication whether analogsignals should be
                            read. Default: True.
            events : Loading events. If True all available events in the given
                            time window will be read. Default: False.
            waveforms : Load waveform for spikes in the requested time
                            window. Default: False.


        Returns:
            Segment object containing neo objects, which contain the data.
        """

        # Load neo segment
        seg = neo.io.NeuralynxIO.read_segment(self,
                                              lazy=lazy,
                                              cascade=cascade,
                                              t_start=t_start,
                                              t_stop=t_stop,
                                              electrode_list=electrode_list,
                                              unit_list=unit_list,
                                              analogsignals=analogsignals,
                                              events=events,
                                              waveforms=waveforms)

        # # Generate t_start and t_stop annotations of segments
        # seg.annotations['t_start'] = min([a.t_start for a in seg.analogsignalarrays + seg.spiketrains])
        # seg.annotations['t_stop'] = max([a.t_stop for a in seg.analogsignalarrays + seg.spiketrains])

        signaltype = []
        # Generate analogsignal classifications
        for sig in seg.analogsignals:
            for channel_idx in sig.annotations['channel_index']:
                if channel_idx <= 32:
                    signaltype.append('neural')
                elif channel_idx in [32, 35]:
                    signaltype.append('stimulation')
                else:
                    raise TypeError('Signal has unkown channel type (id %s)' %
                                    channel_idx)
            sig.annotations['signal_type'] = signaltype

        for sig in seg.spiketrains:
            if 'electrode_id' in sig.annotations and sig.annotations[
                    'electrode_id'] <= 32:
                sig.annotations['signaltype'] = 'neural'
            elif 'electrode_id' in sig.annotations and sig.annotations[
                    'electrode_id'] in [32, 35]:
                sig.annotations['signaltype'] = 'stimulation'
            elif 'electrode_id' in sig.annotations:
                raise TypeError(
                    'Signal has unkown electrode_id annotation (%s)' %
                    sig.annotations['electrode_id'])
            else:
                raise TypeError('Signal has no electrode_id annotation')

        if self.odML_avail:

            ################ STIMULATIONS ######################
            # Get stimulation periods from odml
            stimulations, stimarea = self.get_stimulations()

            if stimulations != None:
                # Add stimulations as epocharray
                s_start = seg.t_start if seg.t_start else 0 * pq.s
                s_stop = seg.t_stop if seg.t_stop else self.parameters_global[
                    't_stop'] - self.parameters_global['t_start']

                # Add only stimulations which are completely in the current segment
                stim = {
                    k: v
                    for k, v in stimulations.items()
                    if v['StimulationPeriodEnd'] -
                    self.parameters_global['t_start'] > s_start
                    and v['StimulationPeriodStart'] -
                    self.parameters_global['t_start'] < s_stop
                }

                start_times = [
                    t['StimulationPeriodStart'] -
                    self.parameters_global['t_start'] for t in stim.values()
                ]
                durations = [
                    t['StimulationPeriodEnd'] - t['StimulationPeriodStart']
                    for t in stim.values()
                ]

                if len(durations) > 0 and len(start_times) > 0:
                    start_times = pq.Quantity(
                        [t.rescale(start_times[0].units) for t in start_times],
                        start_times[0].units)
                    durations = pq.Quantity(
                        [d.rescale(durations[0].units) for d in durations],
                        durations[0].units)

                    labels = [
                        'Stimulationperiod ' + str(i) for i in list(stim)
                    ]
                    stimtype = [t['StimulationType'] for t in stim.values()]
                    stimfreq = [
                        t['StimulusFrequency']
                        if 'StimulusFrequency' in t else None
                        for t in stim.values()
                    ]
                    stimpulseduration = [
                        t['PulseDuration'] if 'PulseDuration' in t else None
                        for t in stim.values()
                    ]
                    stim_count = [t['N_Stimuli'] for t in stim.values()]
                    stimoutput = [t['LaserOutput'] for t in stim.values()]
                    stimpower = [t['LaserPower'] for t in stim.values()]
                    stimquality = [
                        t['StimulationQuality'] for t in stim.values()
                    ]
                    # stimtimeunits = [t['StimulationTimes'][0].units if type(t['StimulationTimes'])==list else t['StimulationTimes'].units for t in stim.values()]
                    stimtimeunit = list(
                        stim.values())[0]['StimulationTimes'][0].units

                    stimtimes = [
                        np.asarray(t['StimulationTimes']) * stimtimeunit -
                        self.parameters_global['t_start']
                        for t in stim.values()
                    ]

                    ep = neo.Epoch(
                        times=start_times,
                        durations=durations,
                        labels=labels,
                        name='Stimulation epoch',
                        file_origin=self.odML_filename + '.odml',
                        type="stimulation",
                        stimtype=stimtype,
                        definition='Times of optogenetic stimulation',
                        stimarea=stimarea,
                        stimfreq=stimfreq,
                        stimpulseduration=stimpulseduration,
                        stim_count=stim_count,
                        stimpower=stimpower,
                        stimoutput=stimoutput,
                        stimquality=stimquality,
                        stimtimes=stimtimes)

                    seg.epochs.append(ep)
                    seg.create_relationship()

            ################ SPINDLES ######################
            # Get stimulation periods from odml
            spindledet = self.get_spindles()

            # Add spindles as epocharray
            s_start = seg.t_start
            s_stop = seg.t_stop
            # Add only spindles which are contained in the current segment # if (spindle['SpindleEnd']-self.parameters_global['t_start']>s_start) and (spindle['SpindleStart']-self.parameters_global['t_start']<s_stop)
            # stim = {p:{c:{s:spindle for s,spindle in channel.items() if (spindle['SpindleEnd']-self.parameters_global['t_start']>s_start) and (spindle['SpindleStart']-self.parameters_global['t_start']<s_stop)} for c,channel in period.items()} for p,period in spindledet.items()}

            # convert spindle to
            for p, period in spindledet.items():
                for c, channel in period.items():
                    valid_spindles = {}
                    for s, spindle in channel.items():
                        if type(s) == int and (
                                spindle['SpindleEnd'] -
                                self.parameters_global['t_start'] > s_start
                        ) and (spindle['SpindleStart'] -
                               self.parameters_global['t_start'] < s_stop):
                            valid_spindles.update({s: spindle})

                    if len(valid_spindles.keys()) > 0:

                        start_times = [
                            t['SpindleStart'] -
                            self.parameters_global['t_start']
                            for t in valid_spindles.values()
                        ]
                        start_times = pq.Quantity([
                            t.rescale(start_times[0].units)
                            for t in start_times
                        ], start_times[0].units)
                        durations = [
                            t['SpindleEnd'] - t['SpindleStart']
                            for t in valid_spindles.values()
                        ]
                        durations = pq.Quantity(
                            [d.rescale(durations[0].units) for d in durations],
                            durations[0].units)

                        labels = ['Spindleperiod %i in channel %i in time ' \
                                  'period %s'%(i,c,p) for i in list(valid_spindles)]
                        spindamplitude = [
                            t['SpindleAmplitude']
                            for t in valid_spindles.values()
                        ]
                        spindmaxamplitude = [
                            t['SpindleMaxAmplitude']
                            for t in valid_spindles.values()
                        ]

                        ep = neo.Epoch(
                            times=start_times,
                            durations=durations,
                            labels=labels,
                            name='Spindle epoch',
                            file_origin=self.odML_filename + '.odml',
                            type="spindle oscillation",
                            channel_id=c,
                            spindperiod=p,
                            definition='Times of LFP spindle oscillations',
                            spindamplitude=spindamplitude,
                            spindmaxamplitude=spindmaxamplitude)

                        seg.epochs.append(ep)
                        seg.create_relationship()
        return seg
コード例 #15
0
ファイル: utils.py プロジェクト: iZXTYbWTCdrXlu81/python-neo
def add_epoch(
        segment, event1, event2=None, pre=0 * pq.s, post=0 * pq.s,
        attach_result=True, **kwargs):
    """
    Create Epochs around a single Event, or between pairs of events. Starting
    and end time of the Epoch can be modified using pre and post as offsets
    before the and after the event(s). Additional keywords will be directly
    forwarded to the Epoch intialization.

    Parameters:
    -----------
    segment : Segment
        The segment in which the final Epoch object is added.
    event1 : Event
        The Event objects containing the start events of the epochs. If no
        event2 is specified, these event1 also specifies the stop events, i.e.,
        the Epoch is cut around event1 times.
    event2: Event
        The Event objects containing the stop events of the epochs. If no
        event2 is specified, event1 specifies the stop events, i.e., the Epoch
        is cut around event1 times. The number of events in event2 must match
        that of event1.
    pre, post: Quantity (time)
        Time offsets to modify the start (pre) and end (post) of the resulting
        Epoch. Example: pre=-10*ms and post=+25*ms will cut from 10 ms before
        event1 times to 25 ms after event2 times
    attach_result: bool
        If True, the resulting Epoch object is added to segment.

    Keyword Arguments:
    ------------------
    Passed to the Epoch object.

    Returns:
    --------
    epoch: Epoch
        An Epoch object with the calculated epochs (one per entry in event1).

    See also:
    ---------
    Event.to_epoch()
    """
    if event2 is None:
        event2 = event1

    if not isinstance(segment, neo.Segment):
        raise TypeError(
            'Segment has to be of type Segment, not %s' % type(segment))

    # load the full event if a proxy object has been given as an argument
    if isinstance(event1, neo.io.proxyobjects.EventProxy):
        event1 = event1.load()
    if isinstance(event2, neo.io.proxyobjects.EventProxy):
        event2 = event2.load()

    for event in [event1, event2]:
        if not isinstance(event, neo.Event):
            raise TypeError(
                'Events have to be of type Event, not %s' % type(event))

    if len(event1) != len(event2):
        raise ValueError(
            'event1 and event2 have to have the same number of entries in '
            'order to create epochs between pairs of entries. Match your '
            'events before generating epochs. Current event lengths '
            'are %i and %i' % (len(event1), len(event2)))

    times = event1.times + pre
    durations = event2.times + post - times

    if any(durations < 0):
        raise ValueError(
            'Can not create epoch with negative duration. '
            'Requested durations %s.' % durations)
    elif any(durations == 0):
        raise ValueError('Can not create epoch with zero duration.')

    if 'name' not in kwargs:
        kwargs['name'] = 'epoch'
    if 'labels' not in kwargs:
        kwargs['labels'] = [u'{}_{}'.format(kwargs['name'], i)
                            for i in range(len(times))]

    ep = neo.Epoch(times=times, durations=durations, **kwargs)

    ep.annotate(**event1.annotations)
    ep.array_annotate(**event1.array_annotations)

    if attach_result:
        segment.epochs.append(ep)
        segment.create_relationship()

    return ep
コード例 #16
0
ファイル: Neomapping.py プロジェクト: hkchekc/NixRawIO
                         signal=[1.1, 1.2, 2.5],
                         units="mV",
                         sampling_rate=1 * pq.Hz)
seg.analogsignals.append(asig2)
irasig = neo.IrregularlySampledSignal(name="irsignal",
                                      signal=np.random.random((100, 2)),
                                      units="mV",
                                      times=np.cumsum(
                                          np.random.random(100) * pq.s))
seg.irregularlysampledsignals.append(irasig)
event = neo.Event(name="event",
                  times=np.cumsum(np.random.random(10)) * pq.ms,
                  labels=["event-" + str(idx) for idx in range(10)])
seg.events.append(event)
epoch = neo.Epoch(name="epoch",
                  times=np.cumsum(np.random.random(10)) * pq.ms,
                  durations=np.random.random(10) * pq.ms,
                  labels=["epoch-" + str(idx) for idx in range(10)])
seg.epochs.append(epoch)
st = neo.SpikeTrain(name="train1",
                    times=[0.21, 0.37, 0.53, 0.56],
                    t_start=0 * pq.s,
                    t_stop=2.4 * pq.s,
                    units=pq.s,
                    sampling_rate=0.01)
seg.spiketrains.append(st)

block.segments.append(seg)
chn_index.analogsignals.append(asig)
chn_index.irregularlysampledsignals.append(irasig)
unit.spiketrains.append(st)
コード例 #17
0
def _read_data_file(metadata, lazy=False, signal_group_mode='split-all'):
    """
    Read in the ``data_file`` given in ``metadata`` using an automatically
    detected :mod:`neo.io` class if ``lazy=False`` or a :mod:`neo.rawio` class
    if ``lazy=True``. If ``lazy=True``, manually load epochs, events, and spike
    trains, but not signals. Return a Neo :class:`Block <neo.core.Block>`.
    """

    # read in the electrophysiology data
    # - signal_group_mode='split-all' ensures every channel gets its own
    #   AnalogSignal, which is important for indexing in EphyviewerConfigurator
    io = neo.io.get_io(_abs_path(metadata, 'data_file'))
    blk = io.read_block(lazy=lazy, signal_group_mode=signal_group_mode)

    # load all objects except analog signals
    if lazy:

        if version.parse(neo.__version__) >= version.parse(
                '0.8.0'):  # Neo >= 0.8.0 has proxy objects with load method

            for i in range(len(blk.segments[0].epochs)):
                epoch = blk.segments[0].epochs[i]
                if hasattr(epoch, 'load'):
                    blk.segments[0].epochs[i] = epoch.load()

            for i in range(len(blk.segments[0].events)):
                event = blk.segments[0].events[i]
                if hasattr(event, 'load'):
                    blk.segments[0].events[i] = event.load()

            for i in range(len(blk.segments[0].spiketrains)):
                spiketrain = blk.segments[0].spiketrains[i]
                if hasattr(spiketrain, 'load'):
                    blk.segments[0].spiketrains[i] = spiketrain.load()

        else:  # Neo < 0.8.0 does not have proxy objects

            neorawioclass = neo.rawio.get_rawio_class(
                _abs_path(metadata, 'data_file'))
            if neorawioclass is not None:
                neorawio = neorawioclass(_abs_path(metadata, 'data_file'))
                neorawio.parse_header()

                for i in range(len(blk.segments[0].epochs)):
                    epoch = blk.segments[0].epochs[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == epoch.name
                                          and chan['type'] == b'epoch'), None)
                    if channel_index is not None:
                        ep_raw_times, ep_raw_durations, ep_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ep_times = neorawio.rescale_event_timestamp(
                            ep_raw_times, dtype='float64')
                        ep_durations = neorawio.rescale_epoch_duration(
                            ep_raw_durations, dtype='float64')
                        ep = neo.Epoch(times=ep_times * pq.s,
                                       durations=ep_durations * pq.s,
                                       labels=ep_labels,
                                       name=epoch.name)
                        blk.segments[0].epochs[i] = ep

                for i in range(len(blk.segments[0].events)):
                    event = blk.segments[0].events[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == event.name
                                          and chan['type'] == b'event'), None)
                    if channel_index is not None:
                        ev_raw_times, _, ev_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ev_times = neorawio.rescale_event_timestamp(
                            ev_raw_times, dtype='float64')
                        ev = neo.Event(times=ev_times * pq.s,
                                       labels=ev_labels,
                                       name=event.name)
                        blk.segments[0].events[i] = ev

                for i in range(len(blk.segments[0].spiketrains)):
                    spiketrain = blk.segments[0].spiketrains[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['unit_channels'])
                                          if chan['name'] == spiketrain.name),
                                         None)
                    if channel_index is not None:
                        st_raw_times = neorawio.get_spike_timestamps(
                            unit_index=channel_index)
                        st_times = neorawio.rescale_spike_timestamp(
                            st_raw_times, dtype='float64')
                        st = neo.SpikeTrain(times=st_times * pq.s,
                                            name=st.name)
                        blk.segments[0].spiketrains[i] = st

    # convert byte labels to Unicode strings
    for epoch in blk.segments[0].epochs:
        epoch.labels = epoch.labels.astype('U')

    for event in blk.segments[0].events:
        event.labels = event.labels.astype('U')

    return blk
コード例 #18
0
                                   sampling_rate=10 * qu.Hz)
        sp.segment = seg
        seg.analogsignalarrays.append(an)

    for ind2 in range(3):
        ev = neo.Event(name='Event' + str(ind2), time=np.random.rand() * qu.s, label='h')
        ev.segment = seg
        seg.events.append(ev)

    for ind2 in range(3):
        eva = neo.EventArray(name='EventArray' + str(ind2), times=np.random.rand(10) * qu.s, label=['h'] * 10)
        eva.segment = seg
        seg.eventarrays.append(eva)

    for ind2 in range(3):
        ep = neo.Epoch(name='Epoch' + str(ind2), time=np.random.rand() * qu.s, duration=np.random.rand() * qu.s,
                       label='cc')
        ep.segment = seg
        seg.epochs.append(ep)

    for ind2 in range(3):
        epa = neo.EpochArray(name='EpochArray' + str(ind2), times=np.random.rand(10) * qu.s,
                             durations=np.random.rand(10) * qu.s, labels=['cc'] * 10)
        epa.segment = seg
        seg.epocharrays.append(epa)

    blk.segments.append(seg)


mainSec = Native.SECTION(name='testSection', type='experiment')
subSec = Native.SECTION(name='testSubSection', type='experiment/electrophysiology')
for ind in range(3):
コード例 #19
0
def _find_bursts(st, start_freq, stop_freq):
    """
    Find every period of time during which the instantaneous firing frequency
    (IFF) of the Neo :class:`SpikeTrain <neo.core.SpikeTrain>` ``st`` meets the
    criteria for bursting. Return the set of bursts as a Neo :class:`Epoch
    <neo.core.Epoch>`, with ``array_annotations['spikes']`` listing the number
    of spikes contained in each burst.

    A burst is defined as a period beginning when the IFF exceeds
    ``start_freq`` and ending when the IFF subsequently drops below the
    ``stop_freq``. Note that in general ``stop_freq`` should not exceed
    ``start_freq``, since otherwise bursts may not be detected.
    """

    isi = _elephant_tools.isi(st).rescale('s')
    iff = 1 / isi

    start_mask = iff > start_freq
    stop_mask = iff < stop_freq

    times = []
    durations = []
    n_spikes = []
    scan_index = -1
    while scan_index < iff.size:
        start_index = None
        stop_index = None

        start_mask_indexes = np.where(start_mask)[0]
        start_mask_indexes = start_mask_indexes[
            start_mask_indexes > scan_index]
        if start_mask_indexes.size == 0:
            break

        start_index = start_mask_indexes[
            0]  # first time that iff rises above start threshold

        stop_mask_indexes = np.where(stop_mask)[0]
        stop_mask_indexes = stop_mask_indexes[stop_mask_indexes > start_index]
        if stop_mask_indexes.size > 0:
            stop_index = stop_mask_indexes[
                0]  # first time after start that iff drops below stop theshold
        else:
            stop_index = -1  # end of spike train (include all spikes after start)

        times.append(st[start_index].rescale('s').magnitude)
        durations.append(
            (st[stop_index] - st[start_index]).rescale('s').magnitude)
        n_spikes.append(stop_index - start_index +
                        1 if stop_index > 0 else st.size - start_index)

        if stop_index == -1:
            break
        else:
            scan_index = stop_index

    bursts = neo.Epoch(
        times=times * pq.s,
        durations=durations * pq.s,
        labels=[''] * len(times),
        array_annotations={'spikes': n_spikes},
    )

    return bursts
コード例 #20
0
def load_dataset(metadata,
                 blk=None,
                 lazy=False,
                 signal_group_mode='split-all',
                 filter_events_from_epochs=False):
    """
    Load a dataset.

    ``metadata`` may be a :class:`MetadataSelector
    <neurotic.datasets.metadata.MetadataSelector>` or a simple dictionary
    containing the appropriate data.

    The ``data_file`` in ``metadata`` is read into a Neo :class:`Block
    <neo.core.Block>` using an automatically detected :mod:`neo.io` class
    if ``lazy=False`` or a :mod:`neo.rawio` class if ``lazy=True``. If
    ``data_file`` is unspecified, an empty Neo Block is created instead. If a
    Neo Block is passed as ``blk``, ``data_file`` is ignored.

    Epochs and events loaded from ``annotations_file`` and
    ``epoch_encoder_file`` and spike trains loaded from ``tridesclous_file``
    are added to the Neo Block.

    If ``lazy=False``, parameters given in ``metadata`` are used to apply
    filters to the signals, to detect spikes using amplitude discriminators, to
    calculate smoothed firing rates from spike trains, to detect bursts of
    spikes, and to calculate the rectified area under the curve (RAUC) for each
    signal.
    """

    if blk is None:
        if metadata.get('data_file', None) is not None:
            # read in the electrophysiology data
            blk = _read_data_file(metadata, lazy, signal_group_mode)
        else:
            # create an empty Block
            blk = neo.Block()
            seg = neo.Segment()
            blk.segments.append(seg)
    else:
        # a Block was provided
        if not isinstance(blk, neo.Block):
            raise TypeError('blk must be a neo.Block')

    # update the real-world start time of the data if provided
    if metadata.get('rec_datetime', None) is not None:
        if isinstance(metadata['rec_datetime'], datetime.datetime):
            blk.rec_datetime = metadata['rec_datetime']
        else:
            logger.warning(
                'Ignoring rec_datetime because it is not a properly formatted datetime: {}'
                .format(metadata['rec_datetime']))

    # apply filters to signals if not using lazy loading of signals
    if not lazy:
        blk = _apply_filters(metadata, blk)

    # copy events into epochs and vice versa
    epochs_from_events = [
        neo.Epoch(name=ev.name,
                  times=ev.times,
                  labels=ev.labels,
                  durations=np.zeros_like(ev.times))
        for ev in blk.segments[0].events
    ]
    events_from_epochs = [
        neo.Event(name=ep.name, times=ep.times, labels=ep.labels)
        for ep in blk.segments[0].epochs
    ]
    if not filter_events_from_epochs:
        blk.segments[0].epochs += epochs_from_events
    blk.segments[0].events += events_from_epochs

    # read in annotations
    annotations_dataframe = _read_annotations_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        annotations_dataframe, metadata,
        _abs_path(metadata, 'annotations_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        annotations_dataframe, metadata, _abs_path(metadata,
                                                   'annotations_file'))

    # read in epoch encoder file
    epoch_encoder_dataframe = _read_epoch_encoder_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'))

    # classify spikes by amplitude if not using lazy loading of signals
    if not lazy:
        blk.segments[0].spiketrains += _run_amplitude_discriminators(
            metadata, blk)

    # read in spikes identified by spike sorting using tridesclous
    spikes_dataframe = _read_spikes_file(metadata, blk)
    if spikes_dataframe is not None:
        if blk.segments[0].analogsignals:
            t_start = blk.segments[0].analogsignals[
                0].t_start  # assuming all AnalogSignals start at the same time
            t_stop = blk.segments[0].analogsignals[
                0].t_stop  # assuming all AnalogSignals start at the same time
            sampling_period = blk.segments[0].analogsignals[
                0].sampling_period  # assuming all AnalogSignals have the same sampling rate
            blk.segments[
                0].spiketrains += _create_neo_spike_trains_from_dataframe(
                    spikes_dataframe, metadata, t_start, t_stop,
                    sampling_period)
        else:
            logger.warning(
                'Ignoring tridesclous_file because the sampling rate and start time could not be inferred from analog signals'
            )

    # calculate smoothed firing rates from spike trains if not using lazy
    # loading of signals
    if not lazy:
        blk = _compute_firing_rates(metadata, blk)

    # identify bursts from spike trains if not using lazy loading of signals
    if not lazy:
        blk.segments[0].epochs += _run_burst_detectors(metadata, blk)

    # alphabetize epoch and event channels by name
    blk.segments[0].epochs.sort(key=lambda ep: ep.name or '')
    blk.segments[0].events.sort(key=lambda ev: ev.name or '')

    # compute rectified area under the curve (RAUC) for each signal if not
    # using lazy loading of signals
    if not lazy and metadata.get('rauc_bin_duration', None) is not None:
        for sig in blk.segments[0].analogsignals:
            rauc_sig = _elephant_tools.rauc(
                signal=sig,
                baseline=metadata.get('rauc_baseline', None),
                bin_duration=metadata['rauc_bin_duration'] * pq.s,
            )
            rauc_sig.name = sig.name + ' RAUC'
            sig.annotate(
                rauc_sig=rauc_sig,
                rauc_baseline=metadata.get('rauc_baseline', None),
                rauc_bin_duration=metadata['rauc_bin_duration'] * pq.s,
            )

    return blk
コード例 #21
0
def _read_data_file(metadata, lazy=False, signal_group_mode='split-all'):
    """
    Read in the ``data_file`` given in ``metadata`` using a :mod:`neo.io`
    class. Lazy-loading is used for signals if both ``lazy=True`` and the data
    file type is supported by a :mod:`neo.rawio` class; otherwise, signals are
    fully loaded. Lazy-loading is never used for epochs, events, and spike
    trains contained in the data file; these are always fully loaded. Returns a
    Neo :class:`Block <neo.core.Block>`.
    """

    # get a Neo IO object appropriate for the data file type
    io = _get_io(metadata)

    # force lazy=False if lazy is not supported by the reader class
    if lazy and not io.support_lazy:
        lazy = False
        logger.info(
            f'NOTE: Not reading signals in lazy mode because Neo\'s {io.__class__.__name__} reader does not support it.'
        )

    if 'signal_group_mode' in inspect.signature(
            io.read_block).parameters.keys():
        # - signal_group_mode='split-all' is the default because this ensures
        #   every channel gets its own AnalogSignal, which is important for
        #   indexing in EphyviewerConfigurator
        blk = io.read_block(lazy=lazy, signal_group_mode=signal_group_mode)
    else:
        # some IOs do not have signal_group_mode
        blk = io.read_block(lazy=lazy)

    if lazy and isinstance(io, neo.rawio.baserawio.BaseRawIO):
        # store the rawio for use with AnalogSignalFromNeoRawIOSource
        blk.rawio = io

    # load all objects except analog signals
    if lazy:

        if version.parse(neo.__version__) >= version.parse(
                '0.8.0'):  # Neo >= 0.8.0 has proxy objects with load method

            for i in range(len(blk.segments[0].epochs)):
                epoch = blk.segments[0].epochs[i]
                if hasattr(epoch, 'load'):
                    blk.segments[0].epochs[i] = epoch.load()

            for i in range(len(blk.segments[0].events)):
                event = blk.segments[0].events[i]
                if hasattr(event, 'load'):
                    blk.segments[0].events[i] = event.load()

            for i in range(len(blk.segments[0].spiketrains)):
                spiketrain = blk.segments[0].spiketrains[i]
                if hasattr(spiketrain, 'load'):
                    blk.segments[0].spiketrains[i] = spiketrain.load()

        else:  # Neo < 0.8.0 does not have proxy objects

            neorawioclass = neo.rawio.get_rawio_class(
                _abs_path(metadata, 'data_file'))
            if neorawioclass is not None:
                neorawio = neorawioclass(_abs_path(metadata, 'data_file'))
                neorawio.parse_header()

                for i in range(len(blk.segments[0].epochs)):
                    epoch = blk.segments[0].epochs[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == epoch.name
                                          and chan['type'] == b'epoch'), None)
                    if channel_index is not None:
                        ep_raw_times, ep_raw_durations, ep_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ep_times = neorawio.rescale_event_timestamp(
                            ep_raw_times, dtype='float64')
                        ep_durations = neorawio.rescale_epoch_duration(
                            ep_raw_durations, dtype='float64')
                        ep = neo.Epoch(times=ep_times * pq.s,
                                       durations=ep_durations * pq.s,
                                       labels=ep_labels,
                                       name=epoch.name)
                        blk.segments[0].epochs[i] = ep

                for i in range(len(blk.segments[0].events)):
                    event = blk.segments[0].events[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == event.name
                                          and chan['type'] == b'event'), None)
                    if channel_index is not None:
                        ev_raw_times, _, ev_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ev_times = neorawio.rescale_event_timestamp(
                            ev_raw_times, dtype='float64')
                        ev = neo.Event(times=ev_times * pq.s,
                                       labels=ev_labels,
                                       name=event.name)
                        blk.segments[0].events[i] = ev

                for i in range(len(blk.segments[0].spiketrains)):
                    spiketrain = blk.segments[0].spiketrains[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['unit_channels'])
                                          if chan['name'] == spiketrain.name),
                                         None)
                    if channel_index is not None:
                        st_raw_times = neorawio.get_spike_timestamps(
                            unit_index=channel_index)
                        st_times = neorawio.rescale_spike_timestamp(
                            st_raw_times, dtype='float64')
                        st = neo.SpikeTrain(times=st_times * pq.s,
                                            name=st.name)
                        blk.segments[0].spiketrains[i] = st

    # convert byte labels to Unicode strings
    for epoch in blk.segments[0].epochs:
        epoch.labels = epoch.labels.astype('U')

    for event in blk.segments[0].events:
        event.labels = event.labels.astype('U')

    return blk
コード例 #22
0
def load_dataset(metadata,
                 lazy=False,
                 signal_group_mode='split-all',
                 filter_events_from_epochs=False):
    """
    Load a dataset.

    ``metadata`` may be a :class:`MetadataSelector
    <neurotic.datasets.metadata.MetadataSelector>` or a simple dictionary
    containing the appropriate data.

    The ``data_file`` in ``metadata`` is read into a Neo :class:`Block
    <neo.core.Block>` using an automatically detected :mod:`neo.io` class
    if ``lazy=False`` or a :mod:`neo.rawio` class if ``lazy=True``.

    Epochs and events loaded from ``annotations_file`` and
    ``epoch_encoder_file`` and spike trains loaded from ``tridesclous_file``
    are added to the Neo Block.

    If ``lazy=False``, filters given in ``metadata`` are applied to the
    signals and amplitude discriminators are run to detect spikes.
    """

    # read in the electrophysiology data
    blk = _read_data_file(metadata, lazy, signal_group_mode)

    # apply filters to signals if not using lazy loading of signals
    if not lazy:
        blk = _apply_filters(metadata, blk)

    # copy events into epochs and vice versa
    epochs_from_events = [
        neo.Epoch(name=ev.name,
                  times=ev.times,
                  labels=ev.labels,
                  durations=np.zeros_like(ev.times))
        for ev in blk.segments[0].events
    ]
    events_from_epochs = [
        neo.Event(name=ep.name, times=ep.times, labels=ep.labels)
        for ep in blk.segments[0].epochs
    ]
    if not filter_events_from_epochs:
        blk.segments[0].epochs += epochs_from_events
    blk.segments[0].events += events_from_epochs

    # read in annotations
    annotations_dataframe = _read_annotations_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        annotations_dataframe, metadata,
        _abs_path(metadata, 'annotations_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        annotations_dataframe, metadata, _abs_path(metadata,
                                                   'annotations_file'))

    # read in epoch encoder file
    epoch_encoder_dataframe = _read_epoch_encoder_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'))

    # classify spikes by amplitude if not using lazy loading of signals
    if not lazy:
        blk.segments[0].spiketrains += _run_amplitude_discriminators(
            metadata, blk)

    # read in spikes identified by spike sorting using tridesclous
    t_start = blk.segments[0].analogsignals[0].t_start
    t_stop = blk.segments[0].analogsignals[0].t_stop
    sampling_period = blk.segments[0].analogsignals[0].sampling_period
    spikes_dataframe = _read_spikes_file(metadata, blk)
    blk.segments[0].spiketrains += _create_neo_spike_trains_from_dataframe(
        spikes_dataframe, metadata, t_start, t_stop, sampling_period)

    # alphabetize epoch and event channels by name
    blk.segments[0].epochs.sort(key=lambda ep: ep.name)
    blk.segments[0].events.sort(key=lambda ev: ev.name)

    return blk