Пример #1
0
def GroupTrialsByEpoch(seg=None, trials=None, startoftrial=None, 
        endoftrial=None, endeventmissing='next'):
    """Given a segment object and a trials dataframe, will go through 
    each epoch type and collect when the trial started and stopped.
    Started is by the start events in startoftrial, stopped is by
    event in endoftrial. If endoftrial is missing, stopped can be determined
    by two modes:
    endeventmissing = 'next': end of trial is the start of the next one
    (last trial ends on its last event)
    endeventmissing = 'last': end of trial is the last event of that trial"""
    # Makes sure seg is a segment object
    if not isinstance(seg, neo.core.Segment):
        raise TypeError('%s must be a segment object' % seg)
    # Assign trials for segment object if trials is not given
    if trials is None:
        trials = seg.dataframes['trials']
    # Calculate epochs for results column
    epoch_list = CalculateStartsAndDurations(trials=trials, epoch_column='results', startoftrial=startoftrial, endoftrial=endoftrial, 
        endeventmissing=endeventmissing)
    # Adds durations and start times to create an Epoch
    for epoch in epoch_list:
        seg.epochs.append(Epoch(times=np.array(epoch[0]) * pq.s,
            durations=np.array(epoch[1]) * pq.s, name=epoch[2]))
    # Now calculates epochs with previous trial results
    # Gets a set of the epochs
    prev_epoch_list = CalculateStartsAndDurations(trials=trials, 
        epoch_column='with_previous_results', startoftrial=startoftrial, 
        endoftrial=endoftrial, endeventmissing=endeventmissing)
    # Adds durations and start times to create an Epoch
    for epoch in prev_epoch_list:
        seg.epochs.append(Epoch(times=np.array(epoch[0]) * pq.s,
            durations=np.array(epoch[1]) * pq.s, name=epoch[2]))
Пример #2
0
    def setup_epochs(self):
        epochname11 = 'epoch 1 1'
        epochname12 = 'epoch 1 2'
        epochname21 = 'epoch 2 1'
        epochname22 = 'epoch 2 2'

        epochtime11 = 10 * pq.ms
        epochtime12 = 20 * pq.ms
        epochtime21 = 30 * pq.s
        epochtime22 = 40 * pq.s

        epochdur11 = 11 * pq.s
        epochdur12 = 21 * pq.s
        epochdur21 = 31 * pq.ms
        epochdur22 = 41 * pq.ms

        self.epochnames1 = [epochname11, epochname12]
        self.epochnames2 = [epochname21, epochname22]
        self.epochnames = [epochname11, epochname12, epochname21, epochname22]

        epoch11 = Epoch(epochtime11, epochdur11,
                        label=epochname11, name=epochname11, channel_index=1,
                        testattr=True)
        epoch12 = Epoch(epochtime12, epochdur12,
                        label=epochname12, name=epochname12, channel_index=2,
                        testattr=False)
        epoch21 = Epoch(epochtime21, epochdur21,
                        label=epochname21, name=epochname21, channel_index=1)
        epoch22 = Epoch(epochtime22, epochdur22,
                        label=epochname22, name=epochname22, channel_index=2)

        self.epoch1 = [epoch11, epoch12]
        self.epoch2 = [epoch21, epoch22]
        self.epoch = [epoch11, epoch12, epoch21, epoch22]
Пример #3
0
    def create_epoch(self, parent=None, name='Epoch'):
        epoch = Epoch(times=[1.0, 2.5, 10.0] * pq.s, durations=[1.0, 0.5, 1.0] * pq.ms,
                      labels=np.array([chr(0) + 'btn1', chr(0) + 'btn2', chr(0) + 'btn3']))

        epoch.segment = parent
        self._assign_basic_attributes(epoch, name=name)

        return epoch
Пример #4
0
 def _read_epocharray(self, node, parent):
     attributes = self._get_standard_attributes(node)
     times = self._get_quantity(node["times"])
     durations = self._get_quantity(node["durations"])
     labels = node["labels"].value
     epoch = Epoch(times=times, durations=durations, labels=labels, **attributes)
     epoch.segment = parent
     return epoch
Пример #5
0
 def _read_epocharray(self, node, parent):
     attributes = self._get_standard_attributes(node)
     times = self._get_quantity(node["times"])
     durations = self._get_quantity(node["durations"])
     labels = node["labels"].value
     epoch = Epoch(times=times, durations=durations, labels=labels, **attributes)
     epoch.segment = parent
     return epoch
Пример #6
0
    def read_epocharray(self,
                        lazy=False,
                        cascade=True,
                        channel_index=0,
                        t_start=0.,
                        segment_duration=0.):
        """function to read digital timestamps. this function reads the event
        onset and offset and outputs onset and duration. to get only onsets use
        the event array function"""
        if lazy:
            epa = Epoch(file_origin=self.filename,
                        times=None,
                        durations=None,
                        labels=None)
        else:
            #create temporary empty lists to store data
            tempNames = list()
            tempTimeStamp = list()
            durations = list()
            #get entity from file
            digEntity = self.fd.get_entity(channel_index)
            #transform t_start into index (reading will start from this index)
            startat = digEntity.get_index_by_time(
                t_start, 0)  #zero means closest index to value
            #get the last index to read, using segment duration and t_start
            endat = digEntity.get_index_by_time(
                float(segment_duration + t_start),
                -1)  #-1 means last index before time

            #run through entity using only odd "i"s
            for i in range(startat, endat + 1, 1):
                if i % 2 == 1:
                    #get in which digital bit was the trigger detected
                    tempNames.append(digEntity.label[-8:])
                    #get the time stamps of even events
                    tempData, onOrOff = digEntity.get_data(i - 1)
                    #if this was an onset event, save it to the list
                    #on triggered recordings it seems that only onset events are
                    #recorded. On continuous recordings both onset(==1)
                    #and offset(==255) seem to be recorded
                    #if onOrOff == 1:
                    #append the time stamp to them empty list
                    tempTimeStamp.append(tempData)

                    #get time stamps of odd events
                    tempData1, onOrOff = digEntity.get_data(i)
                    #if onOrOff == 255:
                    #pass
                    durations.append(tempData1 - tempData)
            epa = Epoch(file_origin=self.filename,
                        times=np.array(tempTimeStamp) * pq.s,
                        durations=np.array(durations) * pq.s,
                        labels=np.array(tempNames, dtype="S"),
                        description="digital events with duration")
            return epa
Пример #7
0
 def _read_epocharray(self, node, parent):
     attributes = self._get_standard_attributes(node)
     times = self._get_quantity(node["times"])
     durations = self._get_quantity(node["durations"])
     if self._lazy:
         labels = np.array((), dtype=node["labels"].dtype)
     else:
         labels = node["labels"].value
     epoch = Epoch(times=times, durations=durations, labels=labels, **attributes)
     epoch.segment = parent
     if self._lazy:
         epoch.lazy_shape = node["times"].shape
     return epoch
Пример #8
0
 def _get_stim(self, channel):
     epo = Epoch()
     ttls = self._kwe['event_types']['TTL']['events']['time_samples'].value
     event_channels = self._kwe['event_types']['TTL']['events'][
         'user_data']['event_channels'].value
     event_id = self._kwe['event_types']['TTL']['events']['user_data'][
         'eventID'].value
     epo.times = (ttls[(event_channels == channel) & (event_id == 1)] /
                  self._attrs['kwe']['sample_rate']) * pq.s
     off_times = (ttls[(event_channels == channel) & (event_id == 0)] /
                  self._attrs['kwe']['sample_rate']) * pq.s
     epo.durations = off_times - epo.times  # TODO check length match
     epo.name = 'StimulusTTL'
     return epo
Пример #9
0
def fake_epoch(seed=None, n=1):
    """
    Create a fake Epoch.

    We use this separate function because the attributes of
    Epoch are not independent (must all have the same size)
    """
    kwargs = get_annotations()
    if seed is not None:
        np.random.seed(seed)
    size = np.random.randint(5, 15)
    for i, attr in enumerate(Epoch._necessary_attrs +
                             Epoch._recommended_attrs):
        if seed is not None:
            iseed = seed + i
        else:
            iseed = None
        if attr[0] in ('times', 'durations', 'labels'):
            kwargs[attr[0]] = get_fake_value(*attr,
                                             seed=iseed,
                                             obj=Epoch,
                                             shape=size)
        else:
            kwargs[attr[0]] = get_fake_value(*attr, seed=iseed, obj=Epoch, n=n)
    kwargs['seed'] = seed
    obj = Epoch(**kwargs)
    return obj
Пример #10
0
def make_stimulus_off_epoch(epo, include_boundary=False):
    '''
    Creates a neo.Epoch of off periods.
    Parameters
    ----------
    epo : neo.Epoch
        stimulus epoch
    include_boundary :
        add 0 to be first off period
    Returns
    ------
    out : neo.Epoch
    '''

    from neo.core import Epoch
    times = epo.times[:-1] + epo.durations[:-1]
    durations = epo.times[1:] - times
    if (include_boundary):
        times = np.append([0], times) * pq.s
        durations = np.append(epo.times[0], durations) * pq.s

    off_epoch = Epoch(labels=[None] * len(times),
                      durations=durations,
                      times=times)

    return off_epoch
Пример #11
0
def epoch_overview(epo, period, expected_num_epochs=None):
    '''
    Makes a new Epoch with start and stop time as first and last event in
    a burst of epochs, bursts are separated by > period + stim duration*2
    Parameters
    ----------
    epo : neo.Epoch
    Returns
    -------
    out : neo.Epoch
    '''
    is_quantities(period, dtype='scalar')
    if len(epo.times) == 1:
        return epo
    from neo import Epoch
    pause = np.diff(epo.times)
    pause = pause > period + np.median(epo.durations) * 2
    start_ind = np.concatenate((np.array([1]), pause))
    stop_ind = np.concatenate((pause, np.array([1])))
    stop_times = epo.times[stop_ind == 1]
    start_times = epo.times[start_ind == 1]
    if expected_num_epochs is not None:
        assert len(start_times) == expected_num_epochs
    return Epoch(times=start_times,
                 durations=stop_times - start_times,
                 description=epo.description)
Пример #12
0
 def _read_epocharray(self, node, parent):
     attributes = self._get_standard_attributes(node)
     times = self._get_quantity(node["times"])
     durations = self._get_quantity(node["durations"])
     if self._lazy:
         labels = np.array((), dtype=node["labels"].dtype)
     else:
         labels = node["labels"].value
     epoch = Epoch(times=times,
                   durations=durations,
                   labels=labels,
                   **attributes)
     epoch.segment = parent
     if self._lazy:
         epoch.lazy_shape = node["times"].shape
     return epoch
Пример #13
0
    def create_all_annotated(cls):
        times = cls.rquant(1, pq.s)
        signal = cls.rquant(1, pq.V)
        blk = Block()
        blk.annotate(**cls.rdict(3))
        cls.populate_dates(blk)

        seg = Segment()
        seg.annotate(**cls.rdict(4))
        cls.populate_dates(seg)
        blk.segments.append(seg)

        asig = AnalogSignal(signal=signal, sampling_rate=pq.Hz)
        asig.annotate(**cls.rdict(2))
        seg.analogsignals.append(asig)

        isig = IrregularlySampledSignal(times=times,
                                        signal=signal,
                                        time_units=pq.s)
        isig.annotate(**cls.rdict(2))
        seg.irregularlysampledsignals.append(isig)

        epoch = Epoch(times=times, durations=times)
        epoch.annotate(**cls.rdict(4))
        seg.epochs.append(epoch)

        event = Event(times=times)
        event.annotate(**cls.rdict(4))
        seg.events.append(event)

        spiketrain = SpikeTrain(times=times, t_stop=pq.s, units=pq.s)
        d = cls.rdict(6)
        d["quantity"] = pq.Quantity(10, "mV")
        d["qarray"] = pq.Quantity(range(10), "mA")
        spiketrain.annotate(**d)
        seg.spiketrains.append(spiketrain)

        chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10])
        chx.annotate(**cls.rdict(5))
        blk.channel_indexes.append(chx)

        unit = Unit()
        unit.annotate(**cls.rdict(2))
        chx.units.append(unit)

        return blk
Пример #14
0
    def create_all_annotated(cls):
        times = cls.rquant(1, pq.s)
        signal = cls.rquant(1, pq.V)
        blk = Block()
        blk.annotate(**cls.rdict(3))

        seg = Segment()
        seg.annotate(**cls.rdict(4))
        blk.segments.append(seg)

        asig = AnalogSignal(signal=signal, sampling_rate=pq.Hz)
        asig.annotate(**cls.rdict(2))
        seg.analogsignals.append(asig)

        isig = IrregularlySampledSignal(times=times, signal=signal,
                                        time_units=pq.s)
        isig.annotate(**cls.rdict(2))
        seg.irregularlysampledsignals.append(isig)

        epoch = Epoch(times=times, durations=times)
        epoch.annotate(**cls.rdict(4))
        seg.epochs.append(epoch)

        event = Event(times=times)
        event.annotate(**cls.rdict(4))
        seg.events.append(event)

        spiketrain = SpikeTrain(times=times, t_stop=pq.s, units=pq.s)
        d = cls.rdict(6)
        d["quantity"] = pq.Quantity(10, "mV")
        d["qarray"] = pq.Quantity(range(10), "mA")
        spiketrain.annotate(**d)
        seg.spiketrains.append(spiketrain)

        chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10])
        chx.annotate(**cls.rdict(5))
        blk.channel_indexes.append(chx)

        unit = Unit()
        unit.annotate(**cls.rdict(2))
        chx.units.append(unit)

        return blk
Пример #15
0
    def load(self, time_slice=None, strict_slicing=True):
        '''
        *Args*:
            :time_slice: None or tuple of the time slice expressed with quantities.
                            None is the entire signal.
            :strict_slicing: True by default.
                 Control if an error is raise or not when one of  time_slice member (t_start or t_stop)
                 is outside the real time range of the segment.
        '''

        t_start, t_stop = consolidate_time_slice(time_slice, self.t_start,
                                                 self.t_stop, strict_slicing)
        _t_start, _t_stop = prepare_time_slice(time_slice)

        timestamp, durations, labels = self._rawio.get_event_timestamps(
            block_index=self._block_index,
            seg_index=self._seg_index,
            event_channel_index=self._event_channel_index,
            t_start=_t_start,
            t_stop=_t_stop)

        dtype = 'float64'
        times = self._rawio.rescale_event_timestamp(timestamp, dtype=dtype)
        units = 's'

        if durations is not None:
            durations = self._rawio.rescale_epoch_duration(durations,
                                                           dtype=dtype) * pq.s

        h = self._rawio.header['event_channels'][self._event_channel_index]
        if h['type'] == b'event':
            ret = Event(times=times,
                        labels=labels,
                        units='s',
                        name=self.name,
                        file_origin=self.file_origin,
                        description=self.description,
                        **self.annotations)
        elif h['type'] == b'epoch':
            ret = Epoch(times=times,
                        durations=durations,
                        labels=labels,
                        units='s',
                        name=self.name,
                        file_origin=self.file_origin,
                        description=self.description,
                        **self.annotations)

        if time_slice is None:
            ret.array_annotate(**self.array_annotations)
        else:
            # TODO handle array_annotations with time_slice
            pass

        return ret
Пример #16
0
def _create_extra_stimuli_channel(from_channel: Event, time_threshold: Quantity) -> Epoch:
    stimuli: List[Tuple[int, int]] = _group_stimuli(from_channel, time_threshold)
    #data_points: List[Quantity] = [from_channel.times[start:stop] for start, stop in stimuli]
    # the label of each timespan is the label of the first event
    labels = np.array([from_channel.labels[start] for start, _ in stimuli])
    # the timestamp of each timespan is the timestamp of the first event
    times: Quantity = np.array([from_channel.times[start] for start, _ in stimuli]) * from_channel.units
    # the duration is the time difference between the first and last event
    durations: Quantity = np.array([from_channel.times[stop-1] - from_channel.times[start] for start, stop in stimuli]) * from_channel.units
    frequencies: Quantity = None
    # with this deactivate the divide by 0 warning from NP
    # it just returns infinity
    with np.errstate(divide="ignore"):
        # the frequency is the number of pulses by the duration.
        # FIXME: shouldn't it be the number -1 as the last one marks exactly the end, so we only count the ones in the middle?
        frequencies = np.array([(stop - start) / durations[i] for i, (start, stop) in enumerate(stimuli)]) / from_channel.units
    frequencies.units = Hz
    epoch = Epoch(times=times, durations=durations, labels=labels, units=from_channel.units)
    epoch.array_annotate(frequencies=frequencies)
    return epoch
Пример #17
0
    def test_epoch_write(self):
        block = Block()
        seg = Segment()
        block.segments.append(seg)

        epoch = Epoch(times=[1, 1, 10, 3]*pq.ms, durations=[3, 3, 3, 1]*pq.ms,
                      labels=np.array(["one", "two", "three", "four"]),
                      name="test epoch", description="an epoch for testing")

        seg.epochs.append(epoch)
        self.write_and_compare([block])
Пример #18
0
def process_epoch(seg, trials, event, duration, epochname, mask=None):
    if not isinstance(trials, np.ndarray):
        warnings.warn("not a valid trials!")
        return
    if mask is None:
        mask = [True] * len(trials)
    subtrial = trials[mask]
    subtrial = subtrial[subtrial['eventname'] == event]
    seg.epochs.append(
        Epoch(times=subtrial['time'] * duration[0].units + duration[0],
              durations=[duration[1] - duration[0]] * len(subtrial),
              name=epochname))
Пример #19
0
    def read_epoch(fh, block_id, array_id):
        nix_block = fh.handle.blocks[block_id]
        nix_da = nix_block.data_arrays[array_id]

        params = {
            'times': nix_da[0],  # TODO think about lazy data loading
            'durations': nix_da[1],  # TODO think about lazy data loading
            'labels': [x.encode('UTF-8') for x in nix_da.dimensions[0].labels]
        }

        name = Reader.Help.get_obj_neo_name(nix_da)
        if name:
            params['name'] = name

        epoch = Epoch(**params)

        for key, value in Reader.Help.read_attributes(nix_da.metadata, 'epoch').items():
            setattr(epoch, key, value)

        epoch.annotations = Reader.Help.read_annotations(nix_da.metadata, 'epoch')

        return epoch
Пример #20
0
    def test__issue_285(self):
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch([0, 10, 20], [2, 2, 2], ["a", "b", "c"], units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
Пример #21
0
def test_make_stimulus_off_epoch():
    from neo.core import Epoch
    from exana.stimulus.tools import (make_stimulus_off_epoch)

    times = np.linspace(0, 10, 11) * pq.s
    durations = np.ones(len(times)) * pq.s
    labels = np.ones(len(times))

    stim_epoch = Epoch(labels=labels, durations=durations, times=times)
    stim_off_epoch = make_stimulus_off_epoch(stim_epoch)

    assert (stim_off_epoch.times == np.linspace(1, 10, 10)).all()
    assert (stim_off_epoch.durations == np.zeros(10)).all()
    assert (stim_off_epoch.labels == [None] * 10)

    stim_off_epoch = make_stimulus_off_epoch(stim_epoch, include_boundary=True)
    assert (stim_off_epoch.times == np.linspace(0, 10, 11)).all()
    assert (stim_off_epoch.durations == np.zeros(11)).all()
    assert (stim_off_epoch.labels == [None] * 11)

    times = np.arange(0.5, 11, 0.5)[::2] * pq.s
    durations = np.ones(len(times)) * 0.5 * pq.s
    labels = np.ones(len(times))

    stim_epoch = Epoch(labels=labels, durations=durations, times=times)
    stim_off_epoch = make_stimulus_off_epoch(stim_epoch)

    assert (stim_off_epoch.times == np.arange(1, 11, 1)).all()
    assert (stim_off_epoch.durations == np.ones(10) * 0.5).all()
    assert (stim_off_epoch.labels == [None] * 10)

    stim_off_epoch = make_stimulus_off_epoch(stim_epoch, include_boundary=True)

    assert (stim_off_epoch.times == np.arange(0, 11, 1)).all()
    assert (stim_off_epoch.durations == np.ones(11) * 0.5).all()
    assert (stim_off_epoch.labels == [None] * 11)
Пример #22
0
def random_epoch():
    size = random.randint(1, 7)
    times = np.cumsum(np.random.uniform(5, 10, size=size))
    durations = np.random.uniform(1, 3, size=size)
    labels = [random_string() for i in range(size)]
    obj = Epoch(
        times=times,
        durations=durations,
        labels=labels,
        units="ms",
        name=random_string(),
        array_annotations=None,   # todo
        **random_annotations(3)
    )
    return obj
 def setUp(self):
     self.segment = Segment()
     self.epoch = Epoch(name='my epoch')
     self.segment.epochs.append(self.epoch)
     self.signal = AnalogSignal(np.random.randn(1000, 1),
                                units='V',
                                sampling_rate=1 * pq.Hz,
                                name='my signal')
     self.segment.analogsignals.append(self.signal)
     self.trials = pd.DataFrame()
     self.trials.name = 'trials'
     self.segment2 = Segment()
     self.segment2.epochs.append(self.epoch)
     self.segment2.analogsignals.append(self.signal)
     self.segment2.dataframes = [self.trials]
Пример #24
0
    def test_anonymous_objects_write(self):
        nblocks = 2
        nsegs = 2
        nanasig = 4
        nirrseg = 2
        nepochs = 3
        nevents = 4
        nspiketrains = 3
        nchx = 5
        nunits = 10

        times = self.rquant(1, pq.s)
        signal = self.rquant(1, pq.V)
        blocks = []
        for blkidx in range(nblocks):
            blk = Block()
            blocks.append(blk)
            for segidx in range(nsegs):
                seg = Segment()
                blk.segments.append(seg)
                for anaidx in range(nanasig):
                    seg.analogsignals.append(AnalogSignal(signal=signal,
                                                          sampling_rate=pq.Hz))
                for irridx in range(nirrseg):
                    seg.irregularlysampledsignals.append(
                        IrregularlySampledSignal(times=times,
                                                 signal=signal,
                                                 time_units=pq.s)
                    )
                for epidx in range(nepochs):
                    seg.epochs.append(Epoch(times=times, durations=times))
                for evidx in range(nevents):
                    seg.events.append(Event(times=times))
                for stidx in range(nspiketrains):
                    seg.spiketrains.append(SpikeTrain(times=times,
                                                      t_stop=times[-1]+pq.s,
                                                      units=pq.s))
            for chidx in range(nchx):
                chx = ChannelIndex(name="chx{}".format(chidx),
                                   index=[1, 2],
                                   channel_ids=[11, 22])
                blk.channel_indexes.append(chx)
                for unidx in range(nunits):
                    unit = Unit()
                    chx.units.append(unit)
        self.writer.write_all_blocks(blocks)
        self.compare_blocks(blocks, self.reader.blocks)
Пример #25
0
    def test_multiref_write(self):
        blk = Block("blk1")
        signal = AnalogSignal(name="sig1",
                              signal=[0, 1, 2],
                              units="mV",
                              sampling_period=pq.Quantity(1, "ms"))
        othersignal = IrregularlySampledSignal(name="i1",
                                               signal=[0, 0, 0],
                                               units="mV",
                                               times=[1, 2, 3],
                                               time_units="ms")
        event = Event(name="Evee", times=[0.3, 0.42], units="year")
        epoch = Epoch(name="epoche",
                      times=[0.1, 0.2] * pq.min,
                      durations=[0.5, 0.5] * pq.min)
        st = SpikeTrain(name="the train of spikes",
                        times=[0.1, 0.2, 10.3],
                        t_stop=11,
                        units="us")

        for idx in range(3):
            segname = "seg" + str(idx)
            seg = Segment(segname)
            blk.segments.append(seg)
            seg.analogsignals.append(signal)
            seg.irregularlysampledsignals.append(othersignal)
            seg.events.append(event)
            seg.epochs.append(epoch)
            seg.spiketrains.append(st)

        chidx = ChannelIndex([10, 20, 29])
        seg = blk.segments[0]
        st = SpikeTrain(name="choochoo",
                        times=[10, 11, 80],
                        t_stop=1000,
                        units="s")
        seg.spiketrains.append(st)
        blk.channel_indexes.append(chidx)
        for idx in range(6):
            unit = Unit("unit" + str(idx))
            chidx.units.append(unit)
            unit.spiketrains.append(st)

        self.writer.write_block(blk)
        self.compare_blocks([blk], self.reader.blocks)
Пример #26
0
    def to_epoch(self, durations=None):
        from neo.core import Epoch, Event, Segment
        if durations is None:
            # times = self.times[:]
            # durations = np.diff(self.times)[:]
            # labels = self.labels[:]

            times = self.times[:-1]
            durations = np.diff(self.times)
            # labels = self.labels[:-1]
            labels = np.array([
                "{}-{}".format(a, b)
                for a, b in zip(self.labels[:-1], self.labels[1:])
            ])
        else:
            times = self.times
            labels = self.labels
        return Epoch(time=times, durations=durations, labels=labels)
Пример #27
0
    def load(self, time_slice=None, strict_slicing=True):
        """
        Load EpochProxy args:
            :param time_slice: None or tuple of the time slice expressed with quantities.
                            None is all of the intervals.
            :param strict_slicing: True by default.
                Control if an error is raised or not when one of the time_slice members
                (t_start or t_stop) is outside the real time range of the segment.
        """
        if time_slice:
            raise NotImplementedError("todo")
        else:
            start_times = self._time_intervals.start_time[self._index]
            stop_times = self._time_intervals.stop_time[self._index]
            durations = stop_times - start_times
            labels = self._time_intervals.tags[self._index]

        return Epoch(times=start_times * pq.s,
                     durations=durations * pq.s,
                     labels=labels,
                     name=self.name)
Пример #28
0
    def read_epoch(self, path, cascade=True, lazy=False):
        group = self._exdir_directory[path]
        if lazy:
            times = []
        else:
            times = pq.Quantity(group['timestamps'].data,
                                group['timestamps'].attrs['unit'])

        if "durations" in group and not lazy:
            durations = pq.Quantity(group['durations'].data,
                                    group['durations'].attrs['unit'])
        elif "durations" in group and lazy:
            durations = []
        else:
            durations = None

        if 'data' in group and not lazy:
            if 'unit' not in group['data'].attrs:
                labels = group['data'].data
            else:
                labels = pq.Quantity(group['data'].data,
                                     group['data'].attrs['unit'])
        elif 'data' in group and lazy:
            labels = []
        else:
            labels = None
        annotations = {'exdir_path': path}
        annotations.update(group.attrs.to_dict())
        if lazy:
            lazy_shape = (group.attrs['num_samples'], )
        else:
            lazy_shape = None
        epo = Epoch(times=times,
                    durations=durations,
                    labels=labels,
                    lazy_shape=lazy_shape,
                    **annotations)

        return epo
Пример #29
0
    def test__issue_285(self):
        # Spiketrain
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch(np.array([0, 10, 20]),
                      np.array([2, 2, 2]),
                      np.array(["a", "b", "c"]),
                      units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
        os.remove('blk.pkl')

        # Epoch
        epoch = Epoch(times=np.arange(0, 30, 10) * pq.s,
                      durations=[10, 5, 7] * pq.ms,
                      labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U'))
        epoch.segment = Segment()
        blk = Block()
        seg = Segment()
        seg.epochs.append(epoch)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.epochs[0].segment, Segment)
        os.remove('blk.pkl')

        # Event
        event = Event(np.arange(0, 30, 10) * pq.s,
                      labels=np.array(['trig0', 'trig1', 'trig2'], dtype='U'))
        event.segment = Segment()

        blk = Block()
        seg = Segment()
        seg.events.append(event)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.events[0].segment, Segment)
        os.remove('blk.pkl')

        # IrregularlySampledSignal
        signal = IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3],
                                          units='mV',
                                          time_units='ms')
        signal.segment = Segment()

        blk = Block()
        seg = Segment()
        seg.irregularlysampledsignals.append(signal)
        blk.segments.append(seg)
        blk.segments[0].block = blk

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.irregularlysampledsignals[0].segment,
                              Segment)
        os.remove('blk.pkl')
Пример #30
0
def generate_one_simple_segment(seg_name='segment 0', supported_objects=[], nb_analogsignal=4,
                                t_start=0. * pq.s, sampling_rate=10 * pq.kHz, duration=6. * pq.s,

                                nb_spiketrain=6, spikerate_range=[.5 * pq.Hz, 12 * pq.Hz],

                                event_types={'stim': ['a', 'b', 'c', 'd'],
                                             'enter_zone': ['one', 'two'],
                                             'color': ['black', 'yellow', 'green'], },
                                event_size_range=[5, 20],

                                epoch_types={'animal state': ['Sleep', 'Freeze', 'Escape'],
                                             'light': ['dark', 'lighted']},
                                epoch_duration_range=[.5, 3.],
                                # this should be multiplied by pq.s, no?

                                array_annotations={'valid': np.array([True, False]),
                                                   'number': np.array(range(5))}

                                ):
    if supported_objects and Segment not in supported_objects:
        raise ValueError('Segment must be in supported_objects')
    seg = Segment(name=seg_name)
    if AnalogSignal in supported_objects:
        for a in range(nb_analogsignal):
            anasig = AnalogSignal(rand(int((sampling_rate * duration).simplified)),
                                  sampling_rate=sampling_rate,
                                  t_start=t_start, units=pq.mV, channel_index=a,
                                  name='sig %d for segment %s' % (a, seg.name))
            seg.analogsignals.append(anasig)

    if SpikeTrain in supported_objects:
        for s in range(nb_spiketrain):
            spikerate = rand() * np.diff(spikerate_range)
            spikerate += spikerate_range[0].magnitude
            # spikedata = rand(int((spikerate*duration).simplified))*duration
            # sptr = SpikeTrain(spikedata,
            #                  t_start=t_start, t_stop=t_start+duration)
            #                  #, name = 'spiketrain %d'%s)
            spikes = rand(int((spikerate * duration).simplified))
            spikes.sort()  # spikes are supposed to be an ascending sequence
            sptr = SpikeTrain(spikes * duration, t_start=t_start, t_stop=t_start + duration)
            sptr.annotations['channel_index'] = s
            # Randomly generate array_annotations from given options
            arr_ann = {key: value[(rand(len(spikes)) * len(value)).astype('i')] for (key, value) in
                       array_annotations.items()}
            sptr.array_annotate(**arr_ann)
            seg.spiketrains.append(sptr)

    if Event in supported_objects:
        for name, labels in event_types.items():
            evt_size = rand() * np.diff(event_size_range)
            evt_size += event_size_range[0]
            evt_size = int(evt_size)
            labels = np.array(labels, dtype='S')
            labels = labels[(rand(evt_size) * len(labels)).astype('i')]
            evt = Event(times=rand(evt_size) * duration, labels=labels)
            # Randomly generate array_annotations from given options
            arr_ann = {key: value[(rand(evt_size) * len(value)).astype('i')] for (key, value) in
                       array_annotations.items()}
            evt.array_annotate(**arr_ann)
            seg.events.append(evt)

    if Epoch in supported_objects:
        for name, labels in epoch_types.items():
            t = 0
            times = []
            durations = []
            while t < duration:
                times.append(t)
                dur = rand() * (epoch_duration_range[1] - epoch_duration_range[0])
                dur += epoch_duration_range[0]
                durations.append(dur)
                t = t + dur
            labels = np.array(labels, dtype='S')
            labels = labels[(rand(len(times)) * len(labels)).astype('i')]
            assert len(times) == len(durations)
            assert len(times) == len(labels)
            epc = Epoch(times=pq.Quantity(times, units=pq.s),
                        durations=pq.Quantity(durations, units=pq.s),
                        labels=labels,)
            assert epc.times.dtype == 'float'
            # Randomly generate array_annotations from given options
            arr_ann = {key: value[(rand(len(times)) * len(value)).astype('i')] for (key, value) in
                       array_annotations.items()}
            epc.array_annotate(**arr_ann)
            seg.epochs.append(epc)

    # TODO : Spike, Event

    seg.create_many_to_one_relationship()
    return seg
Пример #31
0
    def test__issue_285(self):
        ##Spiketrain
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch([0, 10, 20], [2, 2, 2], ["a", "b", "c"], units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
        os.remove('blk.pkl')
        ##Epoch
        train = Epoch(times=np.arange(0, 30, 10)*pq.s,durations=[10, 5, 7]*pq.ms,labels=np.array(['btn0', 'btn1', 'btn2'], dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)
        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##Event
        train = Event(np.arange(0, 30, 10)*pq.s,labels=np.array(['trig0', 'trig1', 'trig2'],dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##IrregularlySampledSignal
        train =  IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3],units='mV', time_units='ms')
        train.segment = Segment()
        unit = Unit()
        train.channel_index = ChannelIndex(1)
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)
        blk.segments[0].block = blk

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        self.assertIsInstance(r_seg.spiketrains[0].channel_index, ChannelIndex)
        os.remove('blk.pkl')
Пример #32
0
    def test_roundtrip(self):

        annotations = {"session_start_time": datetime.now()}
        # Define Neo blocks
        bl0 = Block(name='First block', **annotations)
        bl1 = Block(name='Second block', **annotations)
        bl2 = Block(name='Third block', **annotations)
        original_blocks = [bl0, bl1, bl2]

        num_seg = 4  # number of segments
        num_chan = 3  # number of channels

        for blk in original_blocks:

            for ind in range(num_seg):  # number of Segments
                seg = Segment(index=ind)
                seg.block = blk
                blk.segments.append(seg)

            for seg in blk.segments:  # AnalogSignal objects

                # 3 Neo AnalogSignals
                a = AnalogSignal(np.random.randn(44, num_chan) * pq.nA,
                                 sampling_rate=10 * pq.kHz,
                                 t_start=50 * pq.ms)
                b = AnalogSignal(np.random.randn(64, num_chan) * pq.mV,
                                 sampling_rate=8 * pq.kHz,
                                 t_start=40 * pq.ms)
                c = AnalogSignal(np.random.randn(33, num_chan) * pq.uA,
                                 sampling_rate=10 * pq.kHz,
                                 t_start=120 * pq.ms)

                # 2 Neo IrregularlySampledSignals
                d = IrregularlySampledSignal(
                    np.arange(7.0) * pq.ms,
                    np.random.randn(7, num_chan) * pq.mV)

                # 2 Neo SpikeTrains
                train = SpikeTrain(times=[1, 2, 3] * pq.s,
                                   t_start=1.0,
                                   t_stop=10.0)
                train2 = SpikeTrain(times=[4, 5, 6] * pq.s, t_stop=10.0)
                # todo: add waveforms

                # 1 Neo Event
                evt = Event(times=np.arange(0, 30, 10) * pq.ms,
                            labels=np.array(['ev0', 'ev1', 'ev2']))

                # 2 Neo Epochs
                epc = Epoch(times=np.arange(0, 30, 10) * pq.s,
                            durations=[10, 5, 7] * pq.ms,
                            labels=np.array(['btn0', 'btn1', 'btn2']))

                epc2 = Epoch(times=np.arange(10, 40, 10) * pq.s,
                             durations=[9, 3, 8] * pq.ms,
                             labels=np.array(['btn3', 'btn4', 'btn5']))

                seg.spiketrains.append(train)
                seg.spiketrains.append(train2)

                seg.epochs.append(epc)
                seg.epochs.append(epc2)

                seg.analogsignals.append(a)
                seg.analogsignals.append(b)
                seg.analogsignals.append(c)
                seg.irregularlysampledsignals.append(d)
                seg.events.append(evt)
                a.segment = seg
                b.segment = seg
                c.segment = seg
                d.segment = seg
                evt.segment = seg
                train.segment = seg
                train2.segment = seg
                epc.segment = seg
                epc2.segment = seg

        # write to file
        test_file_name = "test_round_trip.nwb"
        iow = NWBIO(filename=test_file_name, mode='w')
        iow.write_all_blocks(original_blocks)

        ior = NWBIO(filename=test_file_name, mode='r')
        retrieved_blocks = ior.read_all_blocks()

        self.assertEqual(len(retrieved_blocks), 3)
        self.assertEqual(len(retrieved_blocks[2].segments), num_seg)

        original_signal_22b = original_blocks[2].segments[2].analogsignals[1]
        retrieved_signal_22b = retrieved_blocks[2].segments[2].analogsignals[1]
        for attr_name in ("name", "units", "sampling_rate", "t_start"):
            retrieved_attribute = getattr(retrieved_signal_22b, attr_name)
            original_attribute = getattr(original_signal_22b, attr_name)
            self.assertEqual(retrieved_attribute, original_attribute)
        assert_array_equal(retrieved_signal_22b.magnitude,
                           original_signal_22b.magnitude)

        original_issignal_22d = original_blocks[2].segments[
            2].irregularlysampledsignals[0]
        retrieved_issignal_22d = retrieved_blocks[2].segments[
            2].irregularlysampledsignals[0]
        for attr_name in ("name", "units", "t_start"):
            retrieved_attribute = getattr(retrieved_issignal_22d, attr_name)
            original_attribute = getattr(original_issignal_22d, attr_name)
            self.assertEqual(retrieved_attribute, original_attribute)
        assert_array_equal(
            retrieved_issignal_22d.times.rescale('ms').magnitude,
            original_issignal_22d.times.rescale('ms').magnitude)
        assert_array_equal(retrieved_issignal_22d.magnitude,
                           original_issignal_22d.magnitude)

        original_event_11 = original_blocks[1].segments[1].events[0]
        retrieved_event_11 = retrieved_blocks[1].segments[1].events[0]
        for attr_name in ("name", ):
            retrieved_attribute = getattr(retrieved_event_11, attr_name)
            original_attribute = getattr(original_event_11, attr_name)
            self.assertEqual(retrieved_attribute, original_attribute)
        assert_array_equal(
            retrieved_event_11.rescale('ms').magnitude,
            original_event_11.rescale('ms').magnitude)
        assert_array_equal(retrieved_event_11.labels, original_event_11.labels)

        original_spiketrain_131 = original_blocks[1].segments[1].spiketrains[1]
        retrieved_spiketrain_131 = retrieved_blocks[1].segments[1].spiketrains[
            1]
        for attr_name in ("name", "t_start", "t_stop"):
            retrieved_attribute = getattr(retrieved_spiketrain_131, attr_name)
            original_attribute = getattr(original_spiketrain_131, attr_name)
            self.assertEqual(retrieved_attribute, original_attribute)
        assert_array_equal(
            retrieved_spiketrain_131.times.rescale('ms').magnitude,
            original_spiketrain_131.times.rescale('ms').magnitude)

        original_epoch_11 = original_blocks[1].segments[1].epochs[0]
        retrieved_epoch_11 = retrieved_blocks[1].segments[1].epochs[0]
        for attr_name in ("name", ):
            retrieved_attribute = getattr(retrieved_epoch_11, attr_name)
            original_attribute = getattr(original_epoch_11, attr_name)
            self.assertEqual(retrieved_attribute, original_attribute)
        assert_array_equal(
            retrieved_epoch_11.rescale('ms').magnitude,
            original_epoch_11.rescale('ms').magnitude)
        assert_allclose(
            retrieved_epoch_11.durations.rescale('ms').magnitude,
            original_epoch_11.durations.rescale('ms').magnitude)
        assert_array_equal(retrieved_epoch_11.labels, original_epoch_11.labels)
        os.remove(test_file_name)
    def _mtag_eest_to_neo(self, nix_mtag, lazy):
        neo_attrs = self._nix_attr_to_neo(nix_mtag)
        neo_type = nix_mtag.type

        time_unit = nix_mtag.positions.unit
        if lazy:
            times = pq.Quantity(np.empty(0), time_unit)
            lazy_shape = np.shape(nix_mtag.positions)
        else:
            times = pq.Quantity(nix_mtag.positions, time_unit)
            lazy_shape = None
        if neo_type == "neo.epoch":
            if lazy:
                durations = pq.Quantity(np.empty(0), nix_mtag.extents.unit)
                labels = np.empty(0, dtype='S')
            else:
                durations = pq.Quantity(nix_mtag.extents,
                                        nix_mtag.extents.unit)
                labels = np.array(nix_mtag.positions.dimensions[0].labels,
                                  dtype="S")
            eest = Epoch(times=times, durations=durations, labels=labels,
                         **neo_attrs)
        elif neo_type == "neo.event":
            if lazy:
                labels = np.empty(0, dtype='S')
            else:
                labels = np.array(nix_mtag.positions.dimensions[0].labels,
                                  dtype="S")
            eest = Event(times=times, labels=labels, **neo_attrs)
        elif neo_type == "neo.spiketrain":
            if "t_start" in neo_attrs:
                if "t_start.units" in neo_attrs:
                    t_start_units = neo_attrs["t_start.units"]
                    del neo_attrs["t_start.units"]
                else:
                    t_start_units = time_unit
                t_start = pq.Quantity(neo_attrs["t_start"], t_start_units)
                del neo_attrs["t_start"]
            else:
                t_start = None
            if "t_stop" in neo_attrs:
                if "t_stop.units" in neo_attrs:
                    t_stop_units = neo_attrs["t_stop.units"]
                    del neo_attrs["t_stop.units"]
                else:
                    t_stop_units = time_unit
                t_stop = pq.Quantity(neo_attrs["t_stop"], t_stop_units)
                del neo_attrs["t_stop"]
            else:
                t_stop = None
            if "sampling_interval.units" in neo_attrs:
                interval_units = neo_attrs["sampling_interval.units"]
                del neo_attrs["sampling_interval.units"]
            else:
                interval_units = None
            if "left_sweep.units" in neo_attrs:
                left_sweep_units = neo_attrs["left_sweep.units"]
                del neo_attrs["left_sweep.units"]
            else:
                left_sweep_units = None
            eest = SpikeTrain(times=times, t_start=t_start,
                              t_stop=t_stop, **neo_attrs)
            if len(nix_mtag.features):
                wfda = nix_mtag.features[0].data
                wftime = self._get_time_dimension(wfda)
                if lazy:
                    eest.waveforms = pq.Quantity(np.empty((0, 0, 0)),
                                                 wfda.unit)
                    eest.sampling_period = pq.Quantity(1, wftime.unit)
                    eest.left_sweep = pq.Quantity(0, wftime.unit)
                else:
                    eest.waveforms = pq.Quantity(wfda, wfda.unit)
                    if interval_units is None:
                        interval_units = wftime.unit
                    eest.sampling_period = pq.Quantity(
                        wftime.sampling_interval, interval_units
                    )
                    if left_sweep_units is None:
                        left_sweep_units = wftime.unit
                    if "left_sweep" in wfda.metadata:
                        eest.left_sweep = pq.Quantity(
                            wfda.metadata["left_sweep"], left_sweep_units
                        )
        else:
            return None
        self._neo_map[nix_mtag.name] = eest
        if lazy_shape:
            eest.lazy_shape = lazy_shape
        return eest
Пример #34
0
def plot_segment(seg,
                 analist=(),
                 spklist=(),
                 epchlist=(),
                 showevent=None,
                 embedpeaks=True,
                 recenter=None):
    anas = [x for x in seg.analogsignals if x.name in analist]
    spks = [x for x in seg.spiketrains if x.name in spklist]
    evts = next((e for e in seg.events if e.name == 'Events'), None)
    if not evts:
        warnings.warn("No Events presented in data. Turning showevent off.")
        showevent = False
    if (not anas) and (not spks):
        print("Empty data lists. Nothing plotted")
        return
    if not epchlist:
        if spks:
            tunit = spks[0].t_start.units
            tstart = np.min([s.t_start for s in spks])
            tstop = np.max([s.t_stop for s in spks])
        elif anas:
            tunit = anas[0].t_start.units
            tstart = np.min([a.t_start for a in anas])
            tstop = np.max([np.max(a.times) for a in anas])
        else:
            tunit = pq.s
            tstart = 0
            tstop = 0
        epchlist = [
            Epoch(name='all',
                  times=[tstart] * tunit,
                  durations=[tstop - tstart] * tunit)
        ]
    else:
        epchlist = [ep for ep in seg.epochs if ep.name in epchlist]
    nrow = len(analist) + len(spklist)
    ncol = len(epchlist)
    fig = plt.figure(figsize=(ncol * 18, nrow * 6))
    fig.suptitle(seg.name)
    gs = grdspc.GridSpec(nrow, ncol)
    for epid, epch in enumerate(epchlist):
        row = 0
        cur_anas = [slice_signals(a, epch, recenter) for a in anas]
        cur_spks = [slice_signals(s, epch, recenter) for s in spks]
        if showevent:
            cur_evts = slice_signals(evts, epch, recenter)
        for ana in cur_anas:
            ax = fig.add_subplot(gs[row, epid])
            for aid, a in enumerate(ana):
                ax.plot(a.times, a, linewidth=0.3, alpha=0.5, color='darkgrey')
                if embedpeaks:
                    arrlen = (np.max(a) - np.min(a)) / 10
                    peaks = next(
                        (spk[aid]
                         for spk in cur_spks if spk[aid].name == a.name), [])
                    for p in peaks:
                        pidx = np.argmin(np.abs(a.times - p))
                        pidx = int(pidx)
                        ax.annotate('',
                                    xy=(p, a[pidx]),
                                    xytext=(p, a[pidx] + arrlen),
                                    arrowprops={'arrowstyle': "->"})
                if showevent:
                    for et in cur_evts[aid].times:
                        ax.add_patch(
                            pch.Rectangle((et - showevent, np.min(a)),
                                          showevent * 2,
                                          np.max(a) - np.min(a),
                                          alpha=0.2,
                                          edgecolor='none'))
            ana_mean = np.mean(trim_signals(ana), axis=0)
            ax.plot(ana[0].times, ana_mean)
            ax.set_xlabel("time (s)")
            ax.set_ylabel("signal")
            # ax.set_ylim(0, 1)
            ax.set_title("Signal: " + ana[0].name + "\n" + epch.name)
            row += 1
        for spk in cur_spks:
            ax = fig.add_subplot(gs[row, epid])
            for sid, s in enumerate(spk):
                if s.times.size:
                    ax.vlines(s.times, sid, sid + 1)
                    if showevent:
                        for et in cur_evts[sid].times:
                            ax.add_patch(
                                pch.Rectangle((et - showevent, sid),
                                              showevent * 2,
                                              1,
                                              alpha=0.2,
                                              edgecolor='none'))
            ax.set_title("Signal: " + spk[0].name + "\n" + epch.name)
            ax.set_xlabel("time (s)")
            ax.set_ylabel("spike ID")
            row += 1
    return fig
Пример #35
0
    def read_segment(self, lazy=False, cascade=True):
        fid = open(self.filename, 'rb')
        global_header = HeaderReader(fid, GlobalHeader).read_f(offset=0)
        # ~ print globalHeader
        #~ print 'version' , globalHeader['version']
        seg = Segment()
        seg.file_origin = os.path.basename(self.filename)
        seg.annotate(neuroexplorer_version=global_header['version'])
        seg.annotate(comment=global_header['comment'])

        if not cascade:
            return seg

        offset = 544
        for i in range(global_header['nvar']):
            entity_header = HeaderReader(fid, EntityHeader).read_f(
                offset=offset + i * 208)
            entity_header['name'] = entity_header['name'].replace('\x00', '')

            #print 'i',i, entityHeader['type']

            if entity_header['type'] == 0:
                # neuron
                if lazy:
                    spike_times = [] * pq.s
                else:
                    spike_times = np.memmap(self.filename, np.dtype('i4'), 'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    spike_times = spike_times.astype('f8') / global_header[
                        'freq'] * pq.s
                sptr = SpikeTrain(
                    times=spike_times,
                    t_start=global_header['tbeg'] /
                    global_header['freq'] * pq.s,
                    t_stop=global_header['tend'] /
                    global_header['freq'] * pq.s,
                    name=entity_header['name'])
                if lazy:
                    sptr.lazy_shape = entity_header['n']
                sptr.annotate(channel_index=entity_header['WireNumber'])
                seg.spiketrains.append(sptr)

            if entity_header['type'] == 1:
                # event
                if lazy:
                    event_times = [] * pq.s
                else:
                    event_times = np.memmap(self.filename, np.dtype('i4'), 'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    event_times = event_times.astype('f8') / global_header[
                        'freq'] * pq.s
                labels = np.array([''] * event_times.size, dtype='S')
                evar = Event(times=event_times, labels=labels,
                             channel_name=entity_header['name'])
                if lazy:
                    evar.lazy_shape = entity_header['n']
                seg.events.append(evar)

            if entity_header['type'] == 2:
                # interval
                if lazy:
                    start_times = [] * pq.s
                    stop_times = [] * pq.s
                else:
                    start_times = np.memmap(self.filename, np.dtype('i4'), 'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    start_times = start_times.astype('f8') / global_header[
                        'freq'] * pq.s
                    stop_times = np.memmap(self.filename, np.dtype('i4'), 'r',
                                           shape=(entity_header['n']),
                                           offset=entity_header['offset'] +
                                           entity_header['n'] * 4)
                    stop_times = stop_times.astype('f') / global_header[
                        'freq'] * pq.s
                epar = Epoch(times=start_times,
                             durations=stop_times - start_times,
                             labels=np.array([''] * start_times.size,
                                             dtype='S'),
                             channel_name=entity_header['name'])
                if lazy:
                    epar.lazy_shape = entity_header['n']
                seg.epochs.append(epar)

            if entity_header['type'] == 3:
                # spiketrain and wavefoms
                if lazy:
                    spike_times = [] * pq.s
                    waveforms = None
                else:

                    spike_times = np.memmap(self.filename, np.dtype('i4'), 'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    spike_times = spike_times.astype('f8') / global_header[
                        'freq'] * pq.s

                    waveforms = np.memmap(self.filename, np.dtype('i2'), 'r',
                                          shape=(entity_header['n'], 1,
                                                 entity_header['NPointsWave']),
                                          offset=entity_header['offset'] +
                                          entity_header['n'] * 4)
                    waveforms = (waveforms.astype('f') *
                                 entity_header['ADtoMV'] +
                                 entity_header['MVOffset']) * pq.mV
                t_stop = global_header['tend'] / global_header['freq'] * pq.s
                if spike_times.size > 0:
                    t_stop = max(t_stop, max(spike_times))
                sptr = SpikeTrain(
                    times=spike_times,
                    t_start=global_header['tbeg'] /
                    global_header['freq'] * pq.s,
                    #~ t_stop = max(globalHeader['tend']/
                    #~ globalHeader['freq']*pq.s,max(spike_times)),
                    t_stop=t_stop, name=entity_header['name'],
                    waveforms=waveforms,
                    sampling_rate=entity_header['WFrequency'] * pq.Hz,
                    left_sweep=0 * pq.ms)
                if lazy:
                    sptr.lazy_shape = entity_header['n']
                sptr.annotate(channel_index=entity_header['WireNumber'])
                seg.spiketrains.append(sptr)

            if entity_header['type'] == 4:
                # popvectors
                pass

            if entity_header['type'] == 5:
                # analog
                timestamps = np.memmap(self.filename, np.dtype('i4'), 'r',
                                       shape=(entity_header['n']),
                                       offset=entity_header['offset'])
                timestamps = timestamps.astype('f8') / global_header['freq']
                fragment_starts = np.memmap(self.filename, np.dtype('i4'), 'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                fragment_starts = fragment_starts.astype('f8') / global_header[
                    'freq']
                t_start = timestamps[0] - fragment_starts[0] / float(
                    entity_header['WFrequency'])
                del timestamps, fragment_starts

                if lazy:
                    signal = [] * pq.mV
                else:
                    signal = np.memmap(self.filename, np.dtype('i2'), 'r',
                                       shape=(entity_header['NPointsWave']),
                                       offset=entity_header['offset'])
                    signal = signal.astype('f')
                    signal *= entity_header['ADtoMV']
                    signal += entity_header['MVOffset']
                    signal = signal * pq.mV

                ana_sig = AnalogSignal(
                    signal=signal, t_start=t_start * pq.s,
                    sampling_rate=entity_header['WFrequency'] * pq.Hz,
                    name=entity_header['name'],
                    channel_index=entity_header['WireNumber'])
                if lazy:
                    ana_sig.lazy_shape = entity_header['NPointsWave']
                seg.analogsignals.append(ana_sig)

            if entity_header['type'] == 6:
                # markers  : TO TEST
                if lazy:
                    times = [] * pq.s
                    labels = np.array([], dtype='S')
                    markertype = None
                else:
                    times = np.memmap(self.filename, np.dtype('i4'), 'r',
                                      shape=(entity_header['n']),
                                      offset=entity_header['offset'])
                    times = times.astype('f8') / global_header['freq'] * pq.s
                    fid.seek(entity_header['offset'] + entity_header['n'] * 4)
                    markertype = fid.read(64).replace('\x00', '')
                    labels = np.memmap(
                        self.filename, np.dtype(
                            'S' + str(entity_header['MarkerLength'])),
                        'r', shape=(entity_header['n']),
                        offset=entity_header['offset'] +
                        entity_header['n'] * 4 + 64)
                ea = Event(times=times,
                           labels=labels.view(np.ndarray),
                           name=entity_header['name'],
                           channel_index=entity_header['WireNumber'],
                           marker_type=markertype)
                if lazy:
                    ea.lazy_shape = entity_header['n']
                seg.events.append(ea)

        seg.create_many_to_one_relationship()
        return seg
    def read_segment(self, cascade=True, lazy=False, ):
        """
        Arguments:
        """
        f = StructFile(open(self.filename, 'rb'))

        # Name
        f.seek(64, 0)
        surname = f.read(22).decode('ascii')
        while surname[-1] == ' ':
            if len(surname) == 0:
                break
            surname = surname[:-1]
        firstname = f.read(20).decode('ascii')
        while firstname[-1] == ' ':
            if len(firstname) == 0:
                break
            firstname = firstname[:-1]

        #Date
        f.seek(128, 0)
        day, month, year, hour, minute, sec = f.read_f('bbbbbb')
        rec_datetime = datetime.datetime(year + 1900, month, day, hour, minute,
                                         sec)

        f.seek(138, 0)
        Data_Start_Offset, Num_Chan, Multiplexer, Rate_Min, Bytes = f.read_f(
            'IHHHH')
        #~ print Num_Chan, Bytes

        #header version
        f.seek(175, 0)
        header_version, = f.read_f('b')
        assert header_version == 4

        seg = Segment(name=str(firstname + ' ' + surname),
                      file_origin=os.path.basename(self.filename))
        seg.annotate(surname=surname)
        seg.annotate(firstname=firstname)
        seg.annotate(rec_datetime=rec_datetime)

        if not cascade:
            f.close()
            return seg

        # area
        f.seek(176, 0)
        zone_names = ['ORDER', 'LABCOD', 'NOTE', 'FLAGS', 'TRONCA', 'IMPED_B',
                      'IMPED_E', 'MONTAGE',
                      'COMPRESS', 'AVERAGE', 'HISTORY', 'DVIDEO', 'EVENT A',
                      'EVENT B', 'TRIGGER']
        zones = {}
        for zname in zone_names:
            zname2, pos, length = f.read_f('8sII')
            zones[zname] = zname2, pos, length
            #~ print zname2, pos, length

        # reading raw data
        if not lazy:
            f.seek(Data_Start_Offset, 0)
            rawdata = np.fromstring(f.read(), dtype='u' + str(Bytes))
            rawdata = rawdata.reshape((-1, Num_Chan))

        # Reading Code Info
        zname2, pos, length = zones['ORDER']
        f.seek(pos, 0)
        code = np.fromstring(f.read(Num_Chan*2), dtype='u2', count=Num_Chan)

        units = {-1: pq.nano * pq.V, 0: pq.uV, 1: pq.mV, 2: 1, 100: pq.percent,
                 101: pq.dimensionless, 102: pq.dimensionless}

        for c in range(Num_Chan):
            zname2, pos, length = zones['LABCOD']
            f.seek(pos + code[c] * 128 + 2, 0)

            label = f.read(6).strip(b"\x00").decode('ascii')
            ground = f.read(6).strip(b"\x00").decode('ascii')
            (logical_min, logical_max, logical_ground, physical_min,
             physical_max) = f.read_f('iiiii')
            k, = f.read_f('h')
            if k in units.keys():
                unit = units[k]
            else:
                unit = pq.uV

            f.seek(8, 1)
            sampling_rate, = f.read_f('H') * pq.Hz
            sampling_rate *= Rate_Min

            if lazy:
                signal = [] * unit
            else:
                factor = float(physical_max - physical_min) / float(
                    logical_max - logical_min + 1)
                signal = (rawdata[:, c].astype(
                    'f') - logical_ground) * factor * unit

            ana_sig = AnalogSignal(signal, sampling_rate=sampling_rate,
                                   name=str(label), channel_index=c)
            if lazy:
                ana_sig.lazy_shape = None
            ana_sig.annotate(ground=ground)

            seg.analogsignals.append(ana_sig)

        sampling_rate = np.mean(
            [ana_sig.sampling_rate for ana_sig in seg.analogsignals]) * pq.Hz

        # Read trigger and notes
        for zname, label_dtype in [('TRIGGER', 'u2'), ('NOTE', 'S40')]:
            zname2, pos, length = zones[zname]
            f.seek(pos, 0)
            triggers = np.fromstring(f.read(length), dtype=[('pos', 'u4'), (
                'label', label_dtype)])
            if not lazy:
                keep = (triggers['pos'] >= triggers['pos'][0]) & (
                    triggers['pos'] < rawdata.shape[0]) & (
                    triggers['pos'] != 0)
                triggers = triggers[keep]
                ea = Event(name=zname[0] + zname[1:].lower(),
                           labels=triggers['label'].astype('S'),
                           times=(triggers['pos'] / sampling_rate).rescale('s'))
            else:
                ea = Event(name=zname[0] + zname[1:].lower())
                ea.lazy_shape = triggers.size
            seg.events.append(ea)

        # Read Event A and B
        # Not so well  tested
        for zname in ['EVENT A', 'EVENT B']:
            zname2, pos, length = zones[zname]
            f.seek(pos, 0)
            epochs = np.fromstring(f.read(length),
                                   dtype=[('label', 'u4'), ('start', 'u4'),
                                          ('stop', 'u4'), ])
            ep = Epoch(name=zname[0] + zname[1:].lower())
            if not lazy:
                keep = (epochs['start'] > 0) & (
                    epochs['start'] < rawdata.shape[0]) & (
                    epochs['stop'] < rawdata.shape[0])
                epochs = epochs[keep]
                ep = Epoch(name=zname[0] + zname[1:].lower(),
                           labels=epochs['label'].astype('S'),
                           times=(epochs['start'] / sampling_rate).rescale('s'),
                           durations=((epochs['stop'] - epochs['start']) / sampling_rate).rescale('s'))
            else:
                ep = Epoch(name=zname[0] + zname[1:].lower())
                ep.lazy_shape = triggers.size
            seg.epochs.append(ep)

        seg.create_many_to_one_relationship()
        f.close()
        return seg
Пример #37
0
    def read_segment(self, lazy=False, cascade=True):
        fid = open(self.filename, 'rb')
        global_header = HeaderReader(fid, GlobalHeader).read_f(offset=0)
        # ~ print globalHeader
        #~ print 'version' , globalHeader['version']
        seg = Segment()
        seg.file_origin = os.path.basename(self.filename)
        seg.annotate(neuroexplorer_version=global_header['version'])
        seg.annotate(comment=global_header['comment'])

        if not cascade:
            return seg

        offset = 544
        for i in range(global_header['nvar']):
            entity_header = HeaderReader(
                fid, EntityHeader).read_f(offset=offset + i * 208)
            entity_header['name'] = entity_header['name'].replace('\x00', '')

            #print 'i',i, entityHeader['type']

            if entity_header['type'] == 0:
                # neuron
                if lazy:
                    spike_times = [] * pq.s
                else:
                    spike_times = np.memmap(self.filename,
                                            np.dtype('i4'),
                                            'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    spike_times = spike_times.astype(
                        'f8') / global_header['freq'] * pq.s
                sptr = SpikeTrain(times=spike_times,
                                  t_start=global_header['tbeg'] /
                                  global_header['freq'] * pq.s,
                                  t_stop=global_header['tend'] /
                                  global_header['freq'] * pq.s,
                                  name=entity_header['name'])
                if lazy:
                    sptr.lazy_shape = entity_header['n']
                sptr.annotate(channel_index=entity_header['WireNumber'])
                seg.spiketrains.append(sptr)

            if entity_header['type'] == 1:
                # event
                if lazy:
                    event_times = [] * pq.s
                else:
                    event_times = np.memmap(self.filename,
                                            np.dtype('i4'),
                                            'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    event_times = event_times.astype(
                        'f8') / global_header['freq'] * pq.s
                labels = np.array([''] * event_times.size, dtype='S')
                evar = Event(times=event_times,
                             labels=labels,
                             channel_name=entity_header['name'])
                if lazy:
                    evar.lazy_shape = entity_header['n']
                seg.events.append(evar)

            if entity_header['type'] == 2:
                # interval
                if lazy:
                    start_times = [] * pq.s
                    stop_times = [] * pq.s
                else:
                    start_times = np.memmap(self.filename,
                                            np.dtype('i4'),
                                            'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    start_times = start_times.astype(
                        'f8') / global_header['freq'] * pq.s
                    stop_times = np.memmap(self.filename,
                                           np.dtype('i4'),
                                           'r',
                                           shape=(entity_header['n']),
                                           offset=entity_header['offset'] +
                                           entity_header['n'] * 4)
                    stop_times = stop_times.astype(
                        'f') / global_header['freq'] * pq.s
                epar = Epoch(times=start_times,
                             durations=stop_times - start_times,
                             labels=np.array([''] * start_times.size,
                                             dtype='S'),
                             channel_name=entity_header['name'])
                if lazy:
                    epar.lazy_shape = entity_header['n']
                seg.epochs.append(epar)

            if entity_header['type'] == 3:
                # spiketrain and wavefoms
                if lazy:
                    spike_times = [] * pq.s
                    waveforms = None
                else:

                    spike_times = np.memmap(self.filename,
                                            np.dtype('i4'),
                                            'r',
                                            shape=(entity_header['n']),
                                            offset=entity_header['offset'])
                    spike_times = spike_times.astype(
                        'f8') / global_header['freq'] * pq.s

                    waveforms = np.memmap(self.filename,
                                          np.dtype('i2'),
                                          'r',
                                          shape=(entity_header['n'], 1,
                                                 entity_header['NPointsWave']),
                                          offset=entity_header['offset'] +
                                          entity_header['n'] * 4)
                    waveforms = (
                        waveforms.astype('f') * entity_header['ADtoMV'] +
                        entity_header['MVOffset']) * pq.mV
                t_stop = global_header['tend'] / global_header['freq'] * pq.s
                if spike_times.size > 0:
                    t_stop = max(t_stop, max(spike_times))
                sptr = SpikeTrain(
                    times=spike_times,
                    t_start=global_header['tbeg'] / global_header['freq'] *
                    pq.s,
                    #~ t_stop = max(globalHeader['tend']/
                    #~ globalHeader['freq']*pq.s,max(spike_times)),
                    t_stop=t_stop,
                    name=entity_header['name'],
                    waveforms=waveforms,
                    sampling_rate=entity_header['WFrequency'] * pq.Hz,
                    left_sweep=0 * pq.ms)
                if lazy:
                    sptr.lazy_shape = entity_header['n']
                sptr.annotate(channel_index=entity_header['WireNumber'])
                seg.spiketrains.append(sptr)

            if entity_header['type'] == 4:
                # popvectors
                pass

            if entity_header['type'] == 5:
                # analog
                timestamps = np.memmap(self.filename,
                                       np.dtype('i4'),
                                       'r',
                                       shape=(entity_header['n']),
                                       offset=entity_header['offset'])
                timestamps = timestamps.astype('f8') / global_header['freq']

                fragment_starts_offset = entity_header[
                    'offset'] + entity_header['n'] * 4
                fragment_starts = np.memmap(self.filename,
                                            np.dtype('i4'),
                                            'r',
                                            shape=(entity_header['n']),
                                            offset=fragment_starts_offset)
                fragment_starts = fragment_starts.astype(
                    'f8') / global_header['freq']
                t_start = timestamps[0] - fragment_starts[0] / float(
                    entity_header['WFrequency'])
                del timestamps, fragment_starts

                if lazy:
                    signal = [] * pq.mV
                else:
                    signal_offset = fragment_starts_offset + entity_header[
                        'n'] * 4
                    signal = np.memmap(self.filename,
                                       np.dtype('i2'),
                                       'r',
                                       shape=(entity_header['NPointsWave']),
                                       offset=signal_offset)
                    signal = signal.astype('f')
                    signal *= entity_header['ADtoMV']
                    signal += entity_header['MVOffset']
                    signal = signal * pq.mV

                ana_sig = AnalogSignal(
                    signal=signal,
                    t_start=t_start * pq.s,
                    sampling_rate=entity_header['WFrequency'] * pq.Hz,
                    name=entity_header['name'],
                    channel_index=entity_header['WireNumber'])
                if lazy:
                    ana_sig.lazy_shape = entity_header['NPointsWave']
                seg.analogsignals.append(ana_sig)

            if entity_header['type'] == 6:
                # markers  : TO TEST
                if lazy:
                    times = [] * pq.s
                    labels = np.array([], dtype='S')
                    markertype = None
                else:
                    times = np.memmap(self.filename,
                                      np.dtype('i4'),
                                      'r',
                                      shape=(entity_header['n']),
                                      offset=entity_header['offset'])
                    times = times.astype('f8') / global_header['freq'] * pq.s
                    fid.seek(entity_header['offset'] + entity_header['n'] * 4)
                    markertype = fid.read(64).replace('\x00', '')
                    labels = np.memmap(
                        self.filename,
                        np.dtype('S' + str(entity_header['MarkerLength'])),
                        'r',
                        shape=(entity_header['n']),
                        offset=entity_header['offset'] +
                        entity_header['n'] * 4 + 64)
                ea = Event(times=times,
                           labels=labels.view(np.ndarray),
                           name=entity_header['name'],
                           channel_index=entity_header['WireNumber'],
                           marker_type=markertype)
                if lazy:
                    ea.lazy_shape = entity_header['n']
                seg.events.append(ea)

        seg.create_many_to_one_relationship()
        return seg
Пример #38
0
def generate_one_simple_segment(seg_name='segment 0', supported_objects=[], nb_analogsignal=4,
                                t_start=0. * pq.s, sampling_rate=10 * pq.kHz, duration=6. * pq.s,

                                nb_spiketrain=6, spikerate_range=[.5 * pq.Hz, 12 * pq.Hz],

                                event_types={'stim': ['a', 'b', 'c', 'd'],
                                             'enter_zone': ['one', 'two'],
                                             'color': ['black', 'yellow', 'green'], },
                                event_size_range=[5, 20],

                                epoch_types={'animal state': ['Sleep', 'Freeze', 'Escape'],
                                             'light': ['dark', 'lighted']},
                                epoch_duration_range=[.5, 3.],
                                # this should be multiplied by pq.s, no?

                                array_annotations={'valid': np.array([True, False]),
                                                   'number': np.array(range(5))}

                                ):
    if supported_objects and Segment not in supported_objects:
        raise ValueError('Segment must be in supported_objects')
    seg = Segment(name=seg_name)
    if AnalogSignal in supported_objects:
        for a in range(nb_analogsignal):
            anasig = AnalogSignal(rand(int(sampling_rate * duration)), sampling_rate=sampling_rate,
                                  t_start=t_start, units=pq.mV, channel_index=a,
                                  name='sig %d for segment %s' % (a, seg.name))
            seg.analogsignals.append(anasig)

    if SpikeTrain in supported_objects:
        for s in range(nb_spiketrain):
            spikerate = rand() * np.diff(spikerate_range)
            spikerate += spikerate_range[0].magnitude
            # spikedata = rand(int((spikerate*duration).simplified))*duration
            # sptr = SpikeTrain(spikedata,
            #                  t_start=t_start, t_stop=t_start+duration)
            #                  #, name = 'spiketrain %d'%s)
            spikes = rand(int((spikerate * duration).simplified))
            spikes.sort()  # spikes are supposed to be an ascending sequence
            sptr = SpikeTrain(spikes * duration, t_start=t_start, t_stop=t_start + duration)
            sptr.annotations['channel_index'] = s
            # Randomly generate array_annotations from given options
            arr_ann = {key: value[(rand(len(spikes)) * len(value)).astype('i')] for (key, value) in
                       array_annotations.items()}
            sptr.array_annotate(**arr_ann)
            seg.spiketrains.append(sptr)

    if Event in supported_objects:
        for name, labels in event_types.items():
            evt_size = rand() * np.diff(event_size_range)
            evt_size += event_size_range[0]
            evt_size = int(evt_size)
            labels = np.array(labels, dtype='S')
            labels = labels[(rand(evt_size) * len(labels)).astype('i')]
            evt = Event(times=rand(evt_size) * duration, labels=labels)
            # Randomly generate array_annotations from given options
            arr_ann = {key: value[(rand(evt_size) * len(value)).astype('i')] for (key, value) in
                       array_annotations.items()}
            evt.array_annotate(**arr_ann)
            seg.events.append(evt)

    if Epoch in supported_objects:
        for name, labels in epoch_types.items():
            t = 0
            times = []
            durations = []
            while t < duration:
                times.append(t)
                dur = rand() * (epoch_duration_range[1] - epoch_duration_range[0])
                dur += epoch_duration_range[0]
                durations.append(dur)
                t = t + dur
            labels = np.array(labels, dtype='S')
            labels = labels[(rand(len(times)) * len(labels)).astype('i')]
            assert len(times) == len(durations)
            assert len(times) == len(labels)
            epc = Epoch(times=pq.Quantity(times, units=pq.s),
                        durations=pq.Quantity(durations, units=pq.s),
                        labels=labels,)
            assert epc.times.dtype == 'float'
            # Randomly generate array_annotations from given options
            arr_ann = {key: value[(rand(len(times)) * len(value)).astype('i')] for (key, value) in
                       array_annotations.items()}
            epc.array_annotate(**arr_ann)
            seg.epochs.append(epc)

    # TODO : Spike, Event

    seg.create_many_to_one_relationship()
    return seg
    def _handle_timeseries(self, name, timeseries):
        # todo: check timeseries.attrs.get('schema_id')
        # todo: handle timeseries.attrs.get('source')
        subtype = timeseries.attrs['ancestry'][-1]

        data_group = timeseries.get('data')
        dtype = data_group.dtype
        if self._lazy:
            data = np.array((), dtype=dtype)
            lazy_shape = data_group.value.shape  # inefficient to load the data to get the shape
        else:
            data = data_group.value

        if dtype.type is np.string_:
            if self._lazy:
                times = np.array(())
            else:
                times = timeseries.get('timestamps')
            durations = timeseries.get('durations')
            if durations:
                # Epoch
                if self._lazy:
                    durations = np.array(())
                obj = Epoch(times=times,
                            durations=durations,
                            labels=data,
                            units='second')
            else:
                # Event
                obj = Event(times=times,
                            labels=data,
                            units='second')
        else:
            units = get_units(data_group)
            if 'starting_time' in timeseries:
                # AnalogSignal
                sampling_metadata = timeseries.get('starting_time')
                t_start = sampling_metadata.value * pq.s
                sampling_rate = sampling_metadata.attrs.get('rate') * pq.Hz
                assert sampling_metadata.attrs.get('unit') == 'Seconds'
                # todo: handle data.attrs['resolution']
                obj = AnalogSignal(data,
                                   units=units,
                                   sampling_rate=sampling_rate,
                                   t_start=t_start,
                                   name=name)
            elif 'timestamps' in timeseries:
                # IrregularlySampledSignal
                if self._lazy:
                    time_data = np.array(())
                else:
                    time_data = timeseries.get('timestamps')
                    assert time_data.attrs.get('unit') == 'Seconds'
                obj = IrregularlySampledSignal(time_data.value,
                                               data,
                                               units=units,
                                               time_units=pq.second)
            else:
                raise Exception("Timeseries group does not contain sufficient time information")
        if self._lazy:
            obj.lazy_shape = lazy_shape
        return obj