Exemple #1
0
    def __read_in_event(self, segment, event_array, variable,
                        recording_start_time):
        """ Reads in a data item that is an event (i.e. rewiring form/elim)\
            and saves this data to the segment.

        :param ~neo.core.Segment segment: Segment to add data to
        :param ~numpy.ndarray signal_array: the raw "event" data
        :param str variable: the variable name
        :param recording_start_time: when recording started
        :type recording_start_time: float or int
        """
        # pylint: disable=too-many-arguments, no-member
        t_start = recording_start_time * quantities.ms

        formation_times = []
        formation_labels = []
        formation_annotations = dict()
        elimination_times = []
        elimination_labels = []
        elimination_annotations = dict()

        for i in range(len(event_array)):
            event_time = t_start + event_array[i][0] * quantities.ms
            pre_id = int(event_array[i][1])
            post_id = int(event_array[i][2])
            if event_array[i][3] == 1:
                formation_times.append(event_time)
                formation_labels.append(
                    str(pre_id) + "_" + str(post_id) + "_formation")
            else:
                elimination_times.append(event_time)
                elimination_labels.append(
                    str(pre_id) + "_" + str(post_id) + "_elimination")

        formation_event_array = neo.Event(
            times=formation_times,
            labels=formation_labels,
            units="ms",
            name=variable + "_form",
            description="Synapse formation events",
            array_annotations=formation_annotations)

        elimination_event_array = neo.Event(
            times=elimination_times,
            labels=elimination_labels,
            units="ms",
            name=variable + "_elim",
            description="Synapse elimination events",
            array_annotations=elimination_annotations)

        segment.events.append(formation_event_array)

        segment.events.append(elimination_event_array)
Exemple #2
0
 def test_GetImagingDataTTL(self, mock_tdt):
     event1 = neo.Event(times=[1, 2, 3] * pq.s, name='A')
     event2 = neo.Event(times=[4, 5, 6] * pq.s, name='B')
     event3 = neo.Event(times=[7, 8, 9] * pq.s, name='C')
     segment = neo.Segment()
     segment.events.append(event1)
     segment.events.append(event2)
     segment.events.append(event3)
     block = neo.Block()
     block.segments.append(segment)
     mock_tdt.return_value = block
     self.assertEqual(
         3, GetImagingDataTTL('/dpath/', time_idx=-1, event_name='A'))
     self.assertEqual(
         6, GetImagingDataTTL('/dpath/', time_idx=-1, event_idx=1))
Exemple #3
0
def threshold(asig, threshold_array):
    dim_t, channel_num = asig.shape
    th_signal = asig.as_array()\
              - np.repeat(threshold_array[np.newaxis, :], dim_t, axis=0)
    state_array = th_signal > 0
    rolled_state_array = np.roll(state_array, 1, axis=0)

    all_times = np.array([])
    all_channels = np.array([])
    all_labels = np.array([])
    for label, func in zip(['UP',        'DOWN'],
                           [lambda x: x, lambda x: np.bitwise_not(x)]):
        trans = np.where(func(np.bitwise_not(rolled_state_array))\
                       * func(state_array))
        channels = trans[1]
        times = asig.times[trans[0]]

        if not len(times):
            raise ValueError("The choosen threshold lies not within the range "\
                           + "of the signal values!")

        all_channels = np.append(all_channels, channels)
        all_times = np.append(all_times, times)
        all_labels = np.append(all_labels, np.array([label for _ in times]))

    sort_idx = np.argsort(all_times)

    return neo.Event(times=all_times[sort_idx]*asig.times.units,
                     labels=all_labels[sort_idx],
                     name='Transitions',
                     array_annotations={'channels':all_channels[sort_idx]},
                     threshold=threshold_array)
def ReadPixelData(events, DIM_X, DIM_Y, spatial_scale):

    import numpy as np
    from utils import remove_annotations
    import neo

    PixelLabel = events.array_annotations[
        'y_coords'] * DIM_Y + events.array_annotations['x_coords']
    UpTrans = events.times
    Sorted_Idx = np.argsort(UpTrans)
    UpTrans = UpTrans[Sorted_Idx]
    PixelLabel = PixelLabel[Sorted_Idx]

    UpTrans_Evt = neo.Event(times=UpTrans,
                name='UpTrans',
                array_annotations={'channels':PixelLabel},
                description='Transitions from down to up states. '\
                           +'Annotated with the channel id ("channels")',
                Dim_x = DIM_X,
                Dim_y = DIM_Y,
                spatial_scale = spatial_scale)
    remove_annotations(UpTrans_Evt, del_keys=['nix_name', 'neo_name'])
    UpTrans_Evt.annotations.update(events.annotations)

    return (UpTrans_Evt)
Exemple #5
0
def _event_epoch_slice_by_valid_ids(obj, valid_ids):
    """
    Internal function
    """
    # modify annotations
    sparse_annotations = _get_valid_annotations(obj, valid_ids)

    # modify array annotations
    sparse_array_annotations = {key: value[valid_ids]
                                for key, value in obj.array_annotations.items() if len(value)}

    if type(obj) is neo.Event:
        sparse_obj = neo.Event(
            times=copy.deepcopy(obj.times[valid_ids]),
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            array_annotations=sparse_array_annotations,
            **sparse_annotations)
    elif type(obj) is neo.Epoch:
        sparse_obj = neo.Epoch(
            times=copy.deepcopy(obj.times[valid_ids]),
            durations=copy.deepcopy(obj.durations[valid_ids]),
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            array_annotations=sparse_array_annotations,
            **sparse_annotations)
    else:
        raise TypeError('Can only slice Event and Epoch objects by valid IDs.')

    return sparse_obj
Exemple #6
0
def _event_epoch_slice_by_valid_ids(obj, valid_ids):
    """
    Internal function
    """
    # modify annotations
    sparse_annotations = _get_valid_annotations(obj, valid_ids)

    # modify labels
    sparse_labels = _get_valid_labels(obj, valid_ids)

    if type(obj) is neo.Event:
        sparse_obj = neo.Event(
            times=copy.deepcopy(obj.times[valid_ids]),
            labels=sparse_labels,
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            **sparse_annotations)
    elif type(obj) is neo.Epoch:
        sparse_obj = neo.Epoch(
            times=copy.deepcopy(obj.times[valid_ids]),
            durations=copy.deepcopy(obj.durations[valid_ids]),
            labels=sparse_labels,
            units=copy.deepcopy(obj.units),
            name=copy.deepcopy(obj.name),
            description=copy.deepcopy(obj.description),
            file_origin=copy.deepcopy(obj.file_origin),
            **sparse_annotations)
    else:
        raise TypeError('Can only slice Event and Epoch objects by valid IDs.')

    return sparse_obj
Exemple #7
0
def _create_neo_events_from_dataframe(dataframe, metadata, file_origin):
    """
    Convert the contents of a dataframe into Neo :class:`Events
    <neo.core.Event>`.
    """

    events_list = []

    if dataframe is not None:

        # group events by type
        for type_name, df in dataframe.groupby('Type'):

            # create a Neo Event for each type
            event = neo.Event(
                name=type_name,
                file_origin=file_origin,
                times=df['Start (s)'].values * pq.s,
                labels=df['Label'].values,
            )

            events_list.append(event)

    # return the list of Neo Events
    return events_list
 def test_get_fp_start_ttl(self, mock_tdt):
     event = neo.Event(times=[1,2,3]*pq.s, name='A')
     segment = neo.Segment()
     segment.events.append(event)
     segment.events.append(event)
     block = neo.Block()
     block.segments.append(segment)
     mock_tdt.return_value = block
     self.assertEqual(1, GetBehavioralEvents().get_fp_start_ttl('/path/to/data/', event_name='A'))
     self.assertEqual(1, GetBehavioralEvents().get_fp_start_ttl('/path/to/data/', event_idx=0))
Exemple #9
0
def detect_minima(asig, order):
    signal = asig.as_array()

    t_idx, channel_idx = argrelmin(signal, order=order, axis=0)

    sort_idx = np.argsort(t_idx)

    return neo.Event(times=asig.times[t_idx[sort_idx]],
                     labels=['UP'] * len(t_idx),
                     name='Transitions',
                     array_annotations={'channels': channel_idx[sort_idx]})
def detect_critical_points(imgseq, times):
    frames = imgseq.as_array()
    if frames.dtype != np.complex128:
        raise ValueError("Vector field values must be complex numbers!")

    dim_t, dim_x, dim_y = frames.shape

    frame_ids = np.array([], dtype=int)
    labels = np.array([], dtype=str)
    x = np.array([], dtype=float)
    y = np.array([], dtype=float)
    trace = np.array([], dtype=float)
    det = np.array([], dtype=float)
    extend = np.array([], dtype=int)
    winding_number = np.array([], dtype=int)
    # winding_number = np.array([], dtype=float) # ToDo

    for i in range(dim_t):
        # ToDo: parallelize
        X, Y = np.meshgrid(np.arange(dim_y), np.arange(dim_x), indexing='xy')
        ZR = np.real(frames[i])
        ZI = np.imag(frames[i])
        contourR = plt.contour(X, Y, ZR, levels=[0])
        contourI = plt.contour(X, Y, ZI, levels=[0])

        for xy in get_line_intersections(contourR, contourI):
            x = np.append(x, xy[0])
            y = np.append(y, xy[1])
            frame_ids = np.append(frame_ids, i)

            J = jacobian(xy, ZR, ZI)
            trace_i = np.trace(J)
            det_i = np.linalg.det(J)
            labels = np.append(
                labels, classify_critical_point(det=det_i, trace=trace_i))
            trace = np.append(trace, trace_i)
            det = np.append(det, det_i)

            extend_i, winding_number_i = calc_winding_number(xy, frames[i])
            extend = np.append(extend, extend_i)
            winding_number = np.append(winding_number, winding_number_i)

    evt = neo.Event(name='Critical Points',
                    times=times[frame_ids],
                    labels=labels)
    evt.array_annotations.update({
        'x': x,
        'y': y,
        'trace': trace,
        'det': det,
        'extend': extend,
        'winding_number': winding_number
    })
    return evt
def cluster_triggers(event, metric, neighbour_distance, min_samples, time_dim):
    up_idx = np.where(event.labels == 'UP')[0]

    # build 3D array of trigger times
    triggers = np.zeros((len(up_idx), 3))
    triggers[:,0] = event.array_annotations['x_coords'][up_idx]
    triggers[:,1] = event.array_annotations['y_coords'][up_idx]
    triggers[:,2] = event.times[up_idx] * args.time_dim
    #
    # for i, channel in enumerate(evts.array_annotations['channels'][up_idx]):
    #     triggers[i][0] = asig.array_annotations['x_coords'][int(channel)]
    #     triggers[i][1] = asig.array_annotations['y_coords'][int(channel)]

    clustering = DBSCAN(eps=args.neighbour_distance,
                        min_samples=args.min_samples,
                        metric=args.metric)
    clustering.fit(triggers)

    if len(np.unique(clustering.labels_)) < 1:
        raise ValueError("No Clusters found, please adapt the parameters!")

    # remove unclassified trigger points (label == -1)
    cluster_idx = np.where(clustering.labels_ != -1)[0]
    if not len(cluster_idx):
        raise ValueError("Clusters couldn't be classified, please adapt the parameters!")
        
    wave_idx = up_idx[cluster_idx]

    evt = neo.Event(times=event.times[wave_idx],
                    labels=clustering.labels_[cluster_idx],
                    name='Wavefronts',
                    array_annotations={'channels':event.array_annotations['channels'][wave_idx],
                                       'x_coords':triggers[:,0][cluster_idx],
                                       'y_coords':triggers[:,1][cluster_idx]},
                    description='Transitions from down to up states. '\
                               +'Labels are ids of wavefronts. '
                               +'Annotated with the channel id ("channels") and '\
                               +'its position ("x_coords", "y_coords").',
                    cluster_algorithm='sklearn.cluster.DBSCAN',
                    cluster_eps=args.neighbour_distance,
                    cluster_metric=args.metric,
                    cluster_min_samples=args.min_samples)

    remove_annotations(event, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(event.annotations)
    return evt
Exemple #12
0
def remove_short_states(evt,
                        min_duration,
                        start_label='UP',
                        stop_label='DOWN'):
    # assumes event times to be sorted
    del_idx = np.array([], dtype=int)

    for channel in np.unique(evt.array_annotations['channels']):
        # select channel
        c_idx = np.where(channel == evt.array_annotations['channels'])[0]
        c_times = evt.times[c_idx]
        c_labels = evt.labels[c_idx]

        # sepearate start and stop times
        start_idx = np.where(start_label == c_labels)[0]
        stop_idx = np.where(stop_label == c_labels)[0]
        start_times = c_times[start_idx]
        stop_times = c_times[stop_idx]

        # clean borders
        leading_stops = np.argmax(stop_times > start_times[0])
        stop_idx = stop_idx[leading_stops:]
        stop_times = stop_times[leading_stops:]
        start_times = start_times[:len(stop_times)]

        # find short states
        short_state_idx = np.where(
            (stop_times -
             start_times).rescale('s') < min_duration.rescale('s'))[0]

        # remove end points of short states
        del_idx = np.append(del_idx, c_idx[stop_idx[short_state_idx]])
        if not start_label == stop_label:
            # remove start points of short states
            del_idx = np.append(del_idx, c_idx[start_idx[short_state_idx]])

    cleaned_evt = neo.Event(times=np.delete(evt.times.rescale('s'), del_idx) *
                            pq.s,
                            labels=np.delete(evt.labels, del_idx),
                            name=evt.name,
                            description=evt.description)
    cleaned_evt.annotations = evt.annotations
    for key in evt.array_annotations:
        cleaned_evt.array_annotations[key] = np.delete(
            evt.array_annotations[key], del_idx)
    return cleaned_evt
Exemple #13
0
def threshold(asig, threshold_array):
    dim_t, channel_num = asig.shape
    th_signal = asig.as_array()\
              - np.repeat(threshold_array[np.newaxis, :], dim_t, axis=0)
    state_array = th_signal > 0
    rolled_state_array = np.roll(state_array, 1, axis=0)

    all_times = np.array([])
    all_channels = np.array([], dtype=int)
    all_labels = np.array([])
    for label, func in zip(['UP',        'DOWN'],
                           [lambda x: x, lambda x: np.bitwise_not(x)]):
        trans = np.where(func(np.bitwise_not(rolled_state_array))\
                       * func(state_array))
        channels = trans[1]
        times = asig.times[trans[0]]

        if not len(times):
            raise ValueError("The choosen threshold lies not within the range "\
                           + "of the signal values!")

        all_channels = np.append(all_channels, channels)
        all_times = np.append(all_times, times)
        all_labels = np.append(all_labels, np.array([label for _ in times]))

    sort_idx = np.argsort(all_times)

    evt = neo.Event(times=all_times[sort_idx]*asig.times.units,
                    labels=all_labels[sort_idx],
                    name='transitions',
                    trigger_detection='threshold',
                    array_annotations={'channels':all_channels[sort_idx]},
                    threshold=threshold_array,
                    description='Transitions between down and up states with '\
                               +'labels "UP" and "DOWN". '\
                               +'Annotated with the channel id ("channels").')

    for key in asig.array_annotations.keys():
        evt_ann = {key : asig.array_annotations[key][all_channels[sort_idx]]}
        evt.array_annotations.update(evt_ann)

    remove_annotations(asig, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(asig.annotations)
    return evt
Exemple #14
0
def detect_minima(asig, order):
    signal = asig.as_array()
    t_idx, channel_idx = argrelmin(signal, order=order, axis=0)

    sort_idx = np.argsort(t_idx)

    evt = neo.Event(times=asig.times[t_idx[sort_idx]],
                    labels=['UP'] * len(t_idx),
                    name='Transitions',
                    minima_order=order,
                    array_annotations={'channels': channel_idx[sort_idx]})

    for key in asig.array_annotations.keys():
        evt_ann = {key: asig.array_annotations[key][channel_idx[sort_idx]]}
        evt.array_annotations.update(evt_ann)

    remove_annotations(asig, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(asig.annotations)
    return evt
Exemple #15
0
def event_array_to_events(event_array):
    """ Return a list of events for an event array.

    Note that while the created events may have references to a segment,
    the relationships in the other direction are not automatically created
    (the events are not attached to the segment). Other properties like
    annotations are not copied or referenced in the created events.

    :param event_array: An event array from which the Event objects are
        constructed.
    :type event_array: :class:`neo.core.EventArray`
    :return: A list of events, one for of the events in ``event_array``.
    :rtype: list
    """
    events = []
    for i, t in enumerate(event_array.times):
        e = neo.Event(
            t, event_array.labels[i] if i < len(event_array.labels) else '')
        e.segment = event_array.segment
        events.append(e)
    return events
Exemple #16
0
            mode_trigger[mode_id*n_sites + site_id] \
                                    = interpolated_mode_grids[mode_id, ix, iy]

    # modes, xs, ys = np.where(np.isfinite(mode_grids))
    # channels = [np.where((asig.array_annotations['x_coords'] == x) \
    #                    & (asig.array_annotations['y_coords'] == y))[0][0] \
    #             for x,y in zip(xs,ys)]

    # remove_annotations(block.segments[0].events[evt_id])
    remove_annotations(waves)
    evt = neo.Event(
        mode_trigger * waves.units,
        labels=np.repeat(mode_labels, n_sites).astype(str),
        name='wavemodes',
        mode_labels=mode_labels,
        mode_counts=mode_counts[mode_labels],
        mode_distortions=mode_dists,
        interpolation_step_size=args.interpolation_step_size,
        n_modes=n_modes,
        pca_dims='None' if args.pca_dims is None else args.pca_dims,
        **waves.annotations)
    evt.annotations['spatial_scale'] *= args.interpolation_step_size
    evt.array_annotations['x_coords'] = np.tile(ixs, n_modes).astype(int)
    evt.array_annotations['y_coords'] = np.tile(iys, n_modes).astype(int)
    evt.array_annotations['channels'] = np.tile(np.arange(n_sites), n_modes)

    block.segments[0].events.append(evt)
    # save output neo object

    write_neo(args.output, block)
Exemple #17
0
                        units="mV",
                        sampling_rate=1 * pq.Hz)
seg.analogsignals.append(asig)
asig2 = neo.AnalogSignal(name="signal2",
                         signal=[1.1, 1.2, 2.5],
                         units="mV",
                         sampling_rate=1 * pq.Hz)
seg.analogsignals.append(asig2)
irasig = neo.IrregularlySampledSignal(name="irsignal",
                                      signal=np.random.random((100, 2)),
                                      units="mV",
                                      times=np.cumsum(
                                          np.random.random(100) * pq.s))
seg.irregularlysampledsignals.append(irasig)
event = neo.Event(name="event",
                  times=np.cumsum(np.random.random(10)) * pq.ms,
                  labels=["event-" + str(idx) for idx in range(10)])
seg.events.append(event)
epoch = neo.Epoch(name="epoch",
                  times=np.cumsum(np.random.random(10)) * pq.ms,
                  durations=np.random.random(10) * pq.ms,
                  labels=["epoch-" + str(idx) for idx in range(10)])
seg.epochs.append(epoch)
st = neo.SpikeTrain(name="train1",
                    times=[0.21, 0.37, 0.53, 0.56],
                    t_start=0 * pq.s,
                    t_stop=2.4 * pq.s,
                    units=pq.s,
                    sampling_rate=0.01)
seg.spiketrains.append(st)
Exemple #18
0
def detect_minima(asig, order, interpolation_points, interpolation):
    signal = asig.as_array()
    sampling_time = asig.times[1] - asig.times[0]

    t_idx, channel_idx = argrelmin(signal, order=order, axis=0)
    t_idx_max, channel_idx_max = argrelmax(signal, order=order, axis=0)

    if interpolation:

        # minimum
        fitted_idx_times = np.zeros([len(t_idx)])
        start_arr = t_idx - int(interpolation_points / 2)
        start_arr = np.where(start_arr > 0, start_arr, 0)
        stop_arr = start_arr + int(interpolation_points)
        start_arr = np.where(stop_arr < len(signal), start_arr,
                             len(signal) - interpolation_points - 1)
        stop_arr = np.where(stop_arr < len(signal), stop_arr, len(signal) - 1)

        signal_arr = np.empty((interpolation_points, len(start_arr)))
        signal_arr.fill(np.nan)

        for i, (start, stop,
                channel_i) in enumerate(zip(start_arr, stop_arr, channel_idx)):
            signal_arr[:, i] = signal[start:stop, channel_i]

        X_temp = range(0, interpolation_points)
        params = np.polyfit(X_temp, signal_arr, 2)

        min_pos = -params[1, :] / (2 * params[0, :]) + start_arr
        min_pos = np.where(min_pos > 0, min_pos, 0)
        minimum_times = min_pos * sampling_time

        minimum_value = params[0, :] * (
            -params[1, :] / (2 * params[0, :]))**2 + params[1, :] * (
                -params[1, :] / (2 * params[0, :])) + params[2, :]

        # maximum
        fitted_idx_times = np.zeros([len(t_idx_max)])
        start_arr = t_idx_max - int(interpolation_points / 2)
        start_arr = np.where(start_arr > 0, start_arr, 0)
        stop_arr = start_arr + int(interpolation_points)
        start_arr = np.where(stop_arr < len(signal), start_arr,
                             len(signal) - interpolation_points - 1)
        stop_arr = np.where(stop_arr < len(signal), stop_arr, len(signal) - 1)

        signal_arr = np.empty((interpolation_points, len(start_arr)))
        signal_arr.fill(np.nan)

        for i, (start, stop, channel_i) in enumerate(
                zip(start_arr, stop_arr, channel_idx_max)):
            signal_arr[:, i] = signal[start:stop, channel_i]

        X_temp = range(0, interpolation_points)
        params = np.polyfit(X_temp, signal_arr, 2)

        max_pos = -params[1, :] / (2 * params[0, :]) + start_arr
        max_pos = np.where(max_pos > 0, max_pos, 0)

        maximum_times = max_pos * sampling_time
        maximum_value = params[0, :] * (
            -params[1, :] / (2 * params[0, :]))**2 + params[1, :] * (
                -params[1, :] / (2 * params[0, :])) + params[2, :]

        amplitude = []
        ch_arr = []
        min_arr = []

        for i in range(len(min_pos)):  # for each transition
            ch = channel_idx[i]
            min_time = min_pos[i]
            #print('min time', min_time)
            min_value = minimum_value[i]
            #print('min value', min_value)
            #print('signal', signal[t_idx[i]][ch])

            ch_idx = np.where(channel_idx_max == ch)[0]
            max_time = max_pos[ch_idx]
            max_value = maximum_value[ch_idx]
            time_idx = np.where(max_time > min_time)[0]
            times = max_time[time_idx]

            max_value = max_value[time_idx]
            #print('MAX VALUES', max_value)

            try:
                idx_min_ampl = np.argmin(times)
                amplitude.append(max_value[idx_min_ampl] - min_value)
                #print('time', times[idx_min_ampl])
                #print('max signal', signal[max_value[idx_min_ampl]][ch])

            except (IndexError, ValueError) as e:
                amplitude.append(max_value - min_value)
                #print('time', times)

            ch_arr.append(ch)
        #sio.savemat('/Users/chiaradeluca/Desktop/PhD/Wavescalephant/wavescalephant-master/Output/MF_LENS/stage03_trigger_detection/Amplitude.mat', {'Amplitude': amplitude, 'Ch': ch_arr, 'times': minimum_times})
        arr_dict = {
            'Amplitude': amplitude,
            'Ch': ch_arr,
            'times': minimum_times
        }

    else:
        minimum_times = asig.times[t_idx]
        maximum_times = asig.times[t_idx_max]
        amplitude = maximum_times - minimum_times
        ch_arr = channel_idx
        arr_dict = {
            'Amplitude': amplitude,
            'Ch': ch_arr,
            'times': minimum_times
        }

    sort_idx = np.argsort(minimum_times)

    evt = neo.Event(times=minimum_times[sort_idx],
                    labels=['UP'] * len(minimum_times),
                    name='Transitions',
                    minima_order=order,
                    use_quadtratic_interpolation=interpolation,
                    num_interpolation_points=interpolation_points,
                    array_annotations={'channels': channel_idx[sort_idx]})

    for key in asig.array_annotations.keys():
        evt_ann = {key: asig.array_annotations[key][channel_idx[sort_idx]]}
        evt.array_annotations.update(evt_ann)

    remove_annotations(asig, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(asig.annotations)
    return evt, arr_dict
Exemple #19
0
def _read_data_file(metadata, lazy=False, signal_group_mode='split-all'):
    """
    Read in the ``data_file`` given in ``metadata`` using an automatically
    detected :mod:`neo.io` class if ``lazy=False`` or a :mod:`neo.rawio` class
    if ``lazy=True``. If ``lazy=True``, manually load epochs, events, and spike
    trains, but not signals. Return a Neo :class:`Block <neo.core.Block>`.
    """

    # read in the electrophysiology data
    # - signal_group_mode='split-all' ensures every channel gets its own
    #   AnalogSignal, which is important for indexing in EphyviewerConfigurator
    io = neo.io.get_io(_abs_path(metadata, 'data_file'))
    blk = io.read_block(lazy=lazy, signal_group_mode=signal_group_mode)

    # load all objects except analog signals
    if lazy:

        if version.parse(neo.__version__) >= version.parse(
                '0.8.0'):  # Neo >= 0.8.0 has proxy objects with load method

            for i in range(len(blk.segments[0].epochs)):
                epoch = blk.segments[0].epochs[i]
                if hasattr(epoch, 'load'):
                    blk.segments[0].epochs[i] = epoch.load()

            for i in range(len(blk.segments[0].events)):
                event = blk.segments[0].events[i]
                if hasattr(event, 'load'):
                    blk.segments[0].events[i] = event.load()

            for i in range(len(blk.segments[0].spiketrains)):
                spiketrain = blk.segments[0].spiketrains[i]
                if hasattr(spiketrain, 'load'):
                    blk.segments[0].spiketrains[i] = spiketrain.load()

        else:  # Neo < 0.8.0 does not have proxy objects

            neorawioclass = neo.rawio.get_rawio_class(
                _abs_path(metadata, 'data_file'))
            if neorawioclass is not None:
                neorawio = neorawioclass(_abs_path(metadata, 'data_file'))
                neorawio.parse_header()

                for i in range(len(blk.segments[0].epochs)):
                    epoch = blk.segments[0].epochs[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == epoch.name
                                          and chan['type'] == b'epoch'), None)
                    if channel_index is not None:
                        ep_raw_times, ep_raw_durations, ep_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ep_times = neorawio.rescale_event_timestamp(
                            ep_raw_times, dtype='float64')
                        ep_durations = neorawio.rescale_epoch_duration(
                            ep_raw_durations, dtype='float64')
                        ep = neo.Epoch(times=ep_times * pq.s,
                                       durations=ep_durations * pq.s,
                                       labels=ep_labels,
                                       name=epoch.name)
                        blk.segments[0].epochs[i] = ep

                for i in range(len(blk.segments[0].events)):
                    event = blk.segments[0].events[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == event.name
                                          and chan['type'] == b'event'), None)
                    if channel_index is not None:
                        ev_raw_times, _, ev_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ev_times = neorawio.rescale_event_timestamp(
                            ev_raw_times, dtype='float64')
                        ev = neo.Event(times=ev_times * pq.s,
                                       labels=ev_labels,
                                       name=event.name)
                        blk.segments[0].events[i] = ev

                for i in range(len(blk.segments[0].spiketrains)):
                    spiketrain = blk.segments[0].spiketrains[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['unit_channels'])
                                          if chan['name'] == spiketrain.name),
                                         None)
                    if channel_index is not None:
                        st_raw_times = neorawio.get_spike_timestamps(
                            unit_index=channel_index)
                        st_times = neorawio.rescale_spike_timestamp(
                            st_raw_times, dtype='float64')
                        st = neo.SpikeTrain(times=st_times * pq.s,
                                            name=st.name)
                        blk.segments[0].spiketrains[i] = st

    # convert byte labels to Unicode strings
    for epoch in blk.segments[0].epochs:
        epoch.labels = epoch.labels.astype('U')

    for event in blk.segments[0].events:
        event.labels = event.labels.astype('U')

    return blk
Exemple #20
0
import quantities as pq
import numpy as np
import neo
times = np.arange(0, 3) * pq.s
ev = neo.Event(times=times)
new_time = ev.rescale('us')
Exemple #21
0
def detect_minima(asig, order, interpolation_points, interpolation, threshold_fraction,  Min_Peak_Distance):
        
    signal = asig.as_array()
    times = asig.times
    sampling_time = asig.times[1] - asig.times[0]
    min_idx, channel_idx_minima = argrelmin(signal, order=order, axis=0)

    amplitude_span = np.max(signal, axis = 0) - np.min(signal, axis = 0)
    threshold = np.min(signal, axis = 0) + threshold_fraction*(amplitude_span)
    
    
    min_time_idx = []
    channel_idx = []
    for ch in range(len(signal[0])):
        peaks, _ = find_peaks(signal.T[ch], height=threshold[ch], distance = np.int32(Min_Peak_Distance/sampling_time))#, prominence=prominence)
        mins = min_idx[np.where(channel_idx_minima == ch)[0]]

        clean_mins = np.array([], dtype=int)
        for i, peak in enumerate(peaks):
            distance_to_peak = times[peak] - times[mins]
            distance_to_peak = distance_to_peak[distance_to_peak > 0]
            if distance_to_peak.size:
                trans_idx = np.argmin(distance_to_peak)
                clean_mins = np.append(clean_mins, mins[trans_idx])

        min_time_idx.extend(clean_mins)
        channel_idx.extend(list(np.ones(len(clean_mins))*ch))
        
    
    # compute local minima times.
    if interpolation:
        # parabolic fit around the local minima
        fitted_idx_times = np.zeros([len(min_time_idx)])
        start_arr = min_time_idx - 1 #int(interpolation_points/2)
        start_arr = np.where(start_arr > 0, start_arr, 0)
        stop_arr = start_arr + int(interpolation_points)

        start_arr = np.where(stop_arr < len(signal), start_arr, len(signal)-interpolation_points-1)
        stop_arr = np.where(stop_arr < len(signal), stop_arr, len(signal)-1)

        signal_arr = np.empty((interpolation_points, len(start_arr)))
        signal_arr.fill(np.nan)

        for i, (start, stop, channel_i) in enumerate(zip(start_arr, stop_arr, channel_idx)):
            signal_arr[:,i] = signal[start:stop, channel_i]

        X_temp = range(0, interpolation_points)
        params = np.polyfit(X_temp, signal_arr, 2)

        min_pos = -params[1,:] / (2*params[0,:]) + start_arr
        min_pos = np.where(min_pos > 0, min_pos, 0)
        minimum_times = min_pos * sampling_time
        minimum_value = params[0,:]*( -params[1,:] / (2*params[0,:]) )**2 + params[1,:]*( -params[1,:] / (2*params[0,:]) ) + params[2,:]

        minimum_times[np.where(minimum_times > asig.t_stop)[0]] = asig.t_stop
    else:
        minimum_times = asig.times[min_time_idx]
    
    ###################################
    sort_idx = np.argsort(minimum_times)
    channel_idx = np.int32(channel_idx)
    
    evt = neo.Event(times=minimum_times[sort_idx],
                    labels=['UP'] * len(minimum_times),
                    name='Transitions',
                    minima_order=order,
                    num_interpolation_points=interpolation_points,
                    array_annotations={'channels':channel_idx[sort_idx]})

    for key in asig.array_annotations.keys():
        evt_ann = {key : asig.array_annotations[key][channel_idx[sort_idx]]}
        evt.array_annotations.update(evt_ann)

    remove_annotations(asig, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(asig.annotations)

    return evt
def detect_transitions(asig, transition_phase):
    # ToDo: replace with elephant function
    signal = asig.as_array()
    dim_t, channel_num = signal.shape

    hilbert_signal = hilbert(signal, axis=0)
    hilbert_phase = np.angle(hilbert_signal)

    def _detect_phase_crossings(phase):
        # detect phase crossings from below phase to above phase
        is_larger = hilbert_phase > phase
        positive_crossings = ~is_larger & np.roll(is_larger, -1, axis=0)
        positive_crossings = positive_crossings[:-1]

        # select phases within [-pi, pi]
        real_crossings = np.real(hilbert_signal[:-1]) > np.imag(
            hilbert_signal[:-1])
        crossings = real_crossings & positive_crossings

        # arrange transitions times per channel
        times = asig.times[:-1]
        crossings_list = [
            times[crossings[:, channel]].magnitude
            for channel in range(channel_num)
        ]
        return crossings_list

    # UP transitions: A change of the hilbert phase from < transtion_phase
    #                 to > transition_phase, followed by a peak (phase = 0).

    peaks = _detect_phase_crossings(0)
    start = time.time()
    transitions = _detect_phase_crossings(transition_phase)

    up_transitions = np.array([])
    channels = np.array([], dtype=int)

    for channel_id, (channel_peaks,
                     channel_transitions) in enumerate(zip(peaks,
                                                           transitions)):
        channel_up_transitions = np.array([])
        if channel_peaks is not None:
            for peak in channel_peaks:
                distance_to_peak = peak - np.array(channel_transitions)
                distance_to_peak = distance_to_peak[distance_to_peak > 0]
                if distance_to_peak.size:
                    trans_idx = np.argmin(distance_to_peak)
                    channel_up_transitions = np.append(
                        channel_up_transitions, channel_transitions[trans_idx])
        channel_up_transitions = np.unique(channel_up_transitions)
        up_transitions = np.append(up_transitions, channel_up_transitions)
        channels = np.append(
            channels,
            np.ones_like(channel_up_transitions, dtype=int) * channel_id)

    # save transitions as Event labels:'UP', array_annotations: channels
    sort_idx = np.argsort(up_transitions)

    evt = neo.Event(times=up_transitions[sort_idx]*asig.times.units,
                    labels=['UP'] * len(up_transitions),
                    name='Transitions',
                    array_annotations={'channels':channels[sort_idx]},
                    hilbert_transition_phase=transition_phase,
                    description='Transitions from down to up states. '\
                               +'annotated with the channel id ("channels").')

    for key in asig.array_annotations.keys():
        evt_ann = {key: asig.array_annotations[key][channels[sort_idx]]}
        evt.array_annotations.update(evt_ann)

    remove_annotations(asig, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(asig.annotations)
    return evt
    Label = []
    Pixels = []

    for i in range(0, len(Wave)):
        Times.extend(Wave[i]['times'].magnitude)
        Label.extend(np.ones([len(Wave[i]['ndx'])]) * i)
        Pixels.extend(Wave[i]['ch'])

    Label = [str(i) for i in Label]
    Times = Times * (Wave[0]['times'].units)
    waves = neo.Event(times=Times.rescale(pq.s),
                    labels=Label,
                    name='Wavefronts',
                    array_annotations={'channels':Pixels,
                                       'x_coords':[p % DIM_Y for p in Pixels],
                                       'y_coords':[np.floor(p/DIM_Y) for p in Pixels]},
                    description='Transitions from down to up states. '\
                               +'Labels are ids of wavefronts. '
                               +'Annotated with the channel id ("channels") and '\
                               +'its position ("x_coords", "y_coords").',
                    spatial_scale = UpTrans_Evt.annotations['spatial_scale'])

    #remove_annotations(waves, del_keys=['nix_name', 'neo_name'])
    waves.annotations.update(evts.annotations)
    remove_annotations(waves, del_keys=['nix_name', 'neo_name'])

    block.segments[0].events.append(waves)
    remove_annotations(waves, del_keys=['nix_name', 'neo_name'])

    write_neo(args.output, block)
Exemple #24
0
def load_dataset(metadata,
                 lazy=False,
                 signal_group_mode='split-all',
                 filter_events_from_epochs=False):
    """
    Load a dataset.

    ``metadata`` may be a :class:`MetadataSelector
    <neurotic.datasets.metadata.MetadataSelector>` or a simple dictionary
    containing the appropriate data.

    The ``data_file`` in ``metadata`` is read into a Neo :class:`Block
    <neo.core.Block>` using an automatically detected :mod:`neo.io` class
    if ``lazy=False`` or a :mod:`neo.rawio` class if ``lazy=True``.

    Epochs and events loaded from ``annotations_file`` and
    ``epoch_encoder_file`` and spike trains loaded from ``tridesclous_file``
    are added to the Neo Block.

    If ``lazy=False``, filters given in ``metadata`` are applied to the
    signals and amplitude discriminators are run to detect spikes.
    """

    # read in the electrophysiology data
    blk = _read_data_file(metadata, lazy, signal_group_mode)

    # apply filters to signals if not using lazy loading of signals
    if not lazy:
        blk = _apply_filters(metadata, blk)

    # copy events into epochs and vice versa
    epochs_from_events = [
        neo.Epoch(name=ev.name,
                  times=ev.times,
                  labels=ev.labels,
                  durations=np.zeros_like(ev.times))
        for ev in blk.segments[0].events
    ]
    events_from_epochs = [
        neo.Event(name=ep.name, times=ep.times, labels=ep.labels)
        for ep in blk.segments[0].epochs
    ]
    if not filter_events_from_epochs:
        blk.segments[0].epochs += epochs_from_events
    blk.segments[0].events += events_from_epochs

    # read in annotations
    annotations_dataframe = _read_annotations_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        annotations_dataframe, metadata,
        _abs_path(metadata, 'annotations_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        annotations_dataframe, metadata, _abs_path(metadata,
                                                   'annotations_file'))

    # read in epoch encoder file
    epoch_encoder_dataframe = _read_epoch_encoder_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'))

    # classify spikes by amplitude if not using lazy loading of signals
    if not lazy:
        blk.segments[0].spiketrains += _run_amplitude_discriminators(
            metadata, blk)

    # read in spikes identified by spike sorting using tridesclous
    t_start = blk.segments[0].analogsignals[0].t_start
    t_stop = blk.segments[0].analogsignals[0].t_stop
    sampling_period = blk.segments[0].analogsignals[0].sampling_period
    spikes_dataframe = _read_spikes_file(metadata, blk)
    blk.segments[0].spiketrains += _create_neo_spike_trains_from_dataframe(
        spikes_dataframe, metadata, t_start, t_stop, sampling_period)

    # alphabetize epoch and event channels by name
    blk.segments[0].epochs.sort(key=lambda ep: ep.name)
    blk.segments[0].events.sort(key=lambda ev: ev.name)

    return blk
Exemple #25
0
        seg.irregularlysampledsignals.append(irr)

    for ind2 in range(3):
        an = neo.AnalogSignal(name='AnalogSignal' + str(ind2), signal=np.random.rand(10) * qu.mV,
                              sampling_rate=10 * qu.Hz)
        an.segment = seg
        seg.analogsignals.append(an)

    for ind2 in range(3):
        an = neo.AnalogSignalArray(name='AnalogSignalArray' + str(ind2), signal=np.random.rand(10, 10) * qu.mV,
                                   sampling_rate=10 * qu.Hz)
        sp.segment = seg
        seg.analogsignalarrays.append(an)

    for ind2 in range(3):
        ev = neo.Event(name='Event' + str(ind2), time=np.random.rand() * qu.s, label='h')
        ev.segment = seg
        seg.events.append(ev)

    for ind2 in range(3):
        eva = neo.EventArray(name='EventArray' + str(ind2), times=np.random.rand(10) * qu.s, label=['h'] * 10)
        eva.segment = seg
        seg.eventarrays.append(eva)

    for ind2 in range(3):
        ep = neo.Epoch(name='Epoch' + str(ind2), time=np.random.rand() * qu.s, duration=np.random.rand() * qu.s,
                       label='cc')
        ep.segment = seg
        seg.epochs.append(ep)

    for ind2 in range(3):
Exemple #26
0
def _read_data_file(metadata, lazy=False, signal_group_mode='split-all'):
    """
    Read in the ``data_file`` given in ``metadata`` using a :mod:`neo.io`
    class. Lazy-loading is used for signals if both ``lazy=True`` and the data
    file type is supported by a :mod:`neo.rawio` class; otherwise, signals are
    fully loaded. Lazy-loading is never used for epochs, events, and spike
    trains contained in the data file; these are always fully loaded. Returns a
    Neo :class:`Block <neo.core.Block>`.
    """

    # get a Neo IO object appropriate for the data file type
    io = _get_io(metadata)

    # force lazy=False if lazy is not supported by the reader class
    if lazy and not io.support_lazy:
        lazy = False
        logger.info(
            f'NOTE: Not reading signals in lazy mode because Neo\'s {io.__class__.__name__} reader does not support it.'
        )

    if 'signal_group_mode' in inspect.signature(
            io.read_block).parameters.keys():
        # - signal_group_mode='split-all' is the default because this ensures
        #   every channel gets its own AnalogSignal, which is important for
        #   indexing in EphyviewerConfigurator
        blk = io.read_block(lazy=lazy, signal_group_mode=signal_group_mode)
    else:
        # some IOs do not have signal_group_mode
        blk = io.read_block(lazy=lazy)

    if lazy and isinstance(io, neo.rawio.baserawio.BaseRawIO):
        # store the rawio for use with AnalogSignalFromNeoRawIOSource
        blk.rawio = io

    # load all objects except analog signals
    if lazy:

        if version.parse(neo.__version__) >= version.parse(
                '0.8.0'):  # Neo >= 0.8.0 has proxy objects with load method

            for i in range(len(blk.segments[0].epochs)):
                epoch = blk.segments[0].epochs[i]
                if hasattr(epoch, 'load'):
                    blk.segments[0].epochs[i] = epoch.load()

            for i in range(len(blk.segments[0].events)):
                event = blk.segments[0].events[i]
                if hasattr(event, 'load'):
                    blk.segments[0].events[i] = event.load()

            for i in range(len(blk.segments[0].spiketrains)):
                spiketrain = blk.segments[0].spiketrains[i]
                if hasattr(spiketrain, 'load'):
                    blk.segments[0].spiketrains[i] = spiketrain.load()

        else:  # Neo < 0.8.0 does not have proxy objects

            neorawioclass = neo.rawio.get_rawio_class(
                _abs_path(metadata, 'data_file'))
            if neorawioclass is not None:
                neorawio = neorawioclass(_abs_path(metadata, 'data_file'))
                neorawio.parse_header()

                for i in range(len(blk.segments[0].epochs)):
                    epoch = blk.segments[0].epochs[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == epoch.name
                                          and chan['type'] == b'epoch'), None)
                    if channel_index is not None:
                        ep_raw_times, ep_raw_durations, ep_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ep_times = neorawio.rescale_event_timestamp(
                            ep_raw_times, dtype='float64')
                        ep_durations = neorawio.rescale_epoch_duration(
                            ep_raw_durations, dtype='float64')
                        ep = neo.Epoch(times=ep_times * pq.s,
                                       durations=ep_durations * pq.s,
                                       labels=ep_labels,
                                       name=epoch.name)
                        blk.segments[0].epochs[i] = ep

                for i in range(len(blk.segments[0].events)):
                    event = blk.segments[0].events[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['event_channels'])
                                          if chan['name'] == event.name
                                          and chan['type'] == b'event'), None)
                    if channel_index is not None:
                        ev_raw_times, _, ev_labels = neorawio.get_event_timestamps(
                            event_channel_index=channel_index)
                        ev_times = neorawio.rescale_event_timestamp(
                            ev_raw_times, dtype='float64')
                        ev = neo.Event(times=ev_times * pq.s,
                                       labels=ev_labels,
                                       name=event.name)
                        blk.segments[0].events[i] = ev

                for i in range(len(blk.segments[0].spiketrains)):
                    spiketrain = blk.segments[0].spiketrains[i]
                    channel_index = next((i for i, chan in enumerate(
                        neorawio.header['unit_channels'])
                                          if chan['name'] == spiketrain.name),
                                         None)
                    if channel_index is not None:
                        st_raw_times = neorawio.get_spike_timestamps(
                            unit_index=channel_index)
                        st_times = neorawio.rescale_spike_timestamp(
                            st_raw_times, dtype='float64')
                        st = neo.SpikeTrain(times=st_times * pq.s,
                                            name=st.name)
                        blk.segments[0].spiketrains[i] = st

    # convert byte labels to Unicode strings
    for epoch in blk.segments[0].epochs:
        epoch.labels = epoch.labels.astype('U')

    for event in blk.segments[0].events:
        event.labels = event.labels.astype('U')

    return blk
Exemple #27
0
    def AddEvent(self, Times, Name):
        eve = neo.Event(times=Times, units=pq.s, name=Name)

        self.Seg.events.append(eve)
        self.UpdateEventDict()
Exemple #28
0
def load_dataset(metadata,
                 blk=None,
                 lazy=False,
                 signal_group_mode='split-all',
                 filter_events_from_epochs=False):
    """
    Load a dataset.

    ``metadata`` may be a :class:`MetadataSelector
    <neurotic.datasets.metadata.MetadataSelector>` or a simple dictionary
    containing the appropriate data.

    The ``data_file`` in ``metadata`` is read into a Neo :class:`Block
    <neo.core.Block>` using an automatically detected :mod:`neo.io` class
    if ``lazy=False`` or a :mod:`neo.rawio` class if ``lazy=True``. If
    ``data_file`` is unspecified, an empty Neo Block is created instead. If a
    Neo Block is passed as ``blk``, ``data_file`` is ignored.

    Epochs and events loaded from ``annotations_file`` and
    ``epoch_encoder_file`` and spike trains loaded from ``tridesclous_file``
    are added to the Neo Block.

    If ``lazy=False``, parameters given in ``metadata`` are used to apply
    filters to the signals, to detect spikes using amplitude discriminators, to
    calculate smoothed firing rates from spike trains, to detect bursts of
    spikes, and to calculate the rectified area under the curve (RAUC) for each
    signal.
    """

    if blk is None:
        if metadata.get('data_file', None) is not None:
            # read in the electrophysiology data
            blk = _read_data_file(metadata, lazy, signal_group_mode)
        else:
            # create an empty Block
            blk = neo.Block()
            seg = neo.Segment()
            blk.segments.append(seg)
    else:
        # a Block was provided
        if not isinstance(blk, neo.Block):
            raise TypeError('blk must be a neo.Block')

    # update the real-world start time of the data if provided
    if metadata.get('rec_datetime', None) is not None:
        if isinstance(metadata['rec_datetime'], datetime.datetime):
            blk.rec_datetime = metadata['rec_datetime']
        else:
            logger.warning(
                'Ignoring rec_datetime because it is not a properly formatted datetime: {}'
                .format(metadata['rec_datetime']))

    # apply filters to signals if not using lazy loading of signals
    if not lazy:
        blk = _apply_filters(metadata, blk)

    # copy events into epochs and vice versa
    epochs_from_events = [
        neo.Epoch(name=ev.name,
                  times=ev.times,
                  labels=ev.labels,
                  durations=np.zeros_like(ev.times))
        for ev in blk.segments[0].events
    ]
    events_from_epochs = [
        neo.Event(name=ep.name, times=ep.times, labels=ep.labels)
        for ep in blk.segments[0].epochs
    ]
    if not filter_events_from_epochs:
        blk.segments[0].epochs += epochs_from_events
    blk.segments[0].events += events_from_epochs

    # read in annotations
    annotations_dataframe = _read_annotations_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        annotations_dataframe, metadata,
        _abs_path(metadata, 'annotations_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        annotations_dataframe, metadata, _abs_path(metadata,
                                                   'annotations_file'))

    # read in epoch encoder file
    epoch_encoder_dataframe = _read_epoch_encoder_file(metadata)
    blk.segments[0].epochs += _create_neo_epochs_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'), filter_events_from_epochs)
    blk.segments[0].events += _create_neo_events_from_dataframe(
        epoch_encoder_dataframe, metadata,
        _abs_path(metadata, 'epoch_encoder_file'))

    # classify spikes by amplitude if not using lazy loading of signals
    if not lazy:
        blk.segments[0].spiketrains += _run_amplitude_discriminators(
            metadata, blk)

    # read in spikes identified by spike sorting using tridesclous
    spikes_dataframe = _read_spikes_file(metadata, blk)
    if spikes_dataframe is not None:
        if blk.segments[0].analogsignals:
            t_start = blk.segments[0].analogsignals[
                0].t_start  # assuming all AnalogSignals start at the same time
            t_stop = blk.segments[0].analogsignals[
                0].t_stop  # assuming all AnalogSignals start at the same time
            sampling_period = blk.segments[0].analogsignals[
                0].sampling_period  # assuming all AnalogSignals have the same sampling rate
            blk.segments[
                0].spiketrains += _create_neo_spike_trains_from_dataframe(
                    spikes_dataframe, metadata, t_start, t_stop,
                    sampling_period)
        else:
            logger.warning(
                'Ignoring tridesclous_file because the sampling rate and start time could not be inferred from analog signals'
            )

    # calculate smoothed firing rates from spike trains if not using lazy
    # loading of signals
    if not lazy:
        blk = _compute_firing_rates(metadata, blk)

    # identify bursts from spike trains if not using lazy loading of signals
    if not lazy:
        blk.segments[0].epochs += _run_burst_detectors(metadata, blk)

    # alphabetize epoch and event channels by name
    blk.segments[0].epochs.sort(key=lambda ep: ep.name or '')
    blk.segments[0].events.sort(key=lambda ev: ev.name or '')

    # compute rectified area under the curve (RAUC) for each signal if not
    # using lazy loading of signals
    if not lazy and metadata.get('rauc_bin_duration', None) is not None:
        for sig in blk.segments[0].analogsignals:
            rauc_sig = _elephant_tools.rauc(
                signal=sig,
                baseline=metadata.get('rauc_baseline', None),
                bin_duration=metadata['rauc_bin_duration'] * pq.s,
            )
            rauc_sig.name = sig.name + ' RAUC'
            sig.annotate(
                rauc_sig=rauc_sig,
                rauc_baseline=metadata.get('rauc_baseline', None),
                rauc_bin_duration=metadata['rauc_bin_duration'] * pq.s,
            )

    return blk
def detect_transitions(asig, transition_phase):
    # ToDo: replace with elephant function
    signal = asig.as_array()
    dim_t, channel_num = signal.shape

    hilbert_signal = hilbert(signal, axis=0)
    hilbert_phase = np.angle(hilbert_signal)

    def _detect_phase_crossings(phase):
        t_idx, channel_idx = np.where(
            np.diff(np.signbit(hilbert_phase - phase), axis=0))
        crossings = [None] * channel_num
        for ti, channel in zip(t_idx, channel_idx):
            # select only crossings from negative to positive
            if (hilbert_phase-phase)[ti][channel] <= 0 \
            and np.real(hilbert_signal[ti][channel]) \
              > np.imag(hilbert_signal[ti][channel]):
                if crossings[channel] is None:
                    crossings[channel] = np.array([])
                if asig.times[ti].magnitude not in crossings[channel]:
                    crossings[channel] = np.append(crossings[channel],
                                                   asig.times[ti].magnitude)
        return crossings

    # UP transitions: A change of the hilbert phase from < transtion_phase
    #                 to > transition_phase, followed by a peak (phase = 0).

    peaks = _detect_phase_crossings(0)
    transitions = _detect_phase_crossings(transition_phase)
    up_transitions = np.array([])
    channels = np.array([], dtype=int)

    for channel_id, (channel_peaks,
                     channel_transitions) in enumerate(zip(peaks,
                                                           transitions)):
        channel_up_transitions = np.array([])
        if channel_peaks is not None:
            for peak in channel_peaks:
                distance_to_peak = peak - np.array(channel_transitions)
                distance_to_peak = distance_to_peak[distance_to_peak > 0]
                if distance_to_peak.size:
                    trans_idx = np.argmin(distance_to_peak)
                    channel_up_transitions = np.append(
                        channel_up_transitions, channel_transitions[trans_idx])
        channel_up_transitions = np.unique(channel_up_transitions)
        up_transitions = np.append(up_transitions, channel_up_transitions)
        channels = np.append(
            channels,
            np.ones_like(channel_up_transitions, dtype=int) * channel_id)

    # save transitions as Event labels:'UP', array_annotations: channels
    sort_idx = np.argsort(up_transitions)

    evt = neo.Event(times=up_transitions[sort_idx]*asig.times.units,
                     labels=['UP'] * len(up_transitions),
                     name='Transitions',
                     array_annotations={'channels':channels[sort_idx]},
                     hilbert_transition_phase=transition_phase,
                     description='Transitions from down to up states. '\
                                +'annotated with the channel id ("channels").')

    for key in asig.array_annotations.keys():
        evt_ann = {key: asig.array_annotations[key][channels[sort_idx]]}
        evt.array_annotations.update(evt_ann)

    remove_annotations(asig, del_keys=['nix_name', 'neo_name'])
    evt.annotations.update(asig.annotations)
    return evt
def detect_transitions(asig, transition_phase):
    # ToDo: replace with elephant function when signal can be neo object
    signal = asig.as_array()
    dim_t, channel_num = signal.shape

    hilbert_signal = hilbert(signal, axis=0)
    hilbert_phase = np.angle(hilbert_signal)

    # plt.plot(np.real(hilbert_signal[:250,5050]), color='r')
    # plt.plot(np.imag(hilbert_signal[:250,5050]), color='b')
    # plt.plot(hilbert_phase[:250,5050], color='g')
    # plt.show()

    def _detect_phase_crossings(phase):
        t_idx, channel_idx = np.where(
            np.diff(np.signbit(hilbert_phase - phase), axis=0))
        crossings = [None] * channel_num
        for ti, channel in zip(t_idx, channel_idx):
            # select only crossings from negative to positive
            if (hilbert_phase-phase)[ti][channel] <= 0 \
            and np.real(hilbert_signal[ti][channel]) \
              > np.imag(hilbert_signal[ti][channel]):
                if crossings[channel] is None:
                    crossings[channel] = np.array([])
                if asig.times[ti].magnitude not in crossings[channel]:
                    crossings[channel] = np.append(crossings[channel],
                                                   asig.times[ti].magnitude)
        return crossings

    # UP transitions: A change of the hilbert phase from < transtion_phase
    #                 to > transition_phase, followed by a peak (phase = 0).

    peaks = _detect_phase_crossings(0)
    transitions = _detect_phase_crossings(transition_phase)
    up_transitions = np.array([])
    channels = np.array([])

    for channel_id, (channel_peaks,
                     channel_transitions) in enumerate(zip(peaks,
                                                           transitions)):
        channel_up_transitions = np.array([])
        if channel_peaks is not None:
            for peak in channel_peaks:
                distance_to_peak = peak - np.array(channel_transitions)
                distance_to_peak = distance_to_peak[distance_to_peak > 0]
                if distance_to_peak.size:
                    trans_idx = np.argmin(distance_to_peak)
                    channel_up_transitions = np.append(
                        channel_up_transitions, channel_transitions[trans_idx])
        channel_up_transitions = np.unique(channel_up_transitions)
        up_transitions = np.append(up_transitions, channel_up_transitions)
        channels = np.append(channels,
                             np.ones_like(channel_up_transitions) * channel_id)

    # save transitions as Event labels:'UP', array_annotations: channels
    sort_idx = np.argsort(up_transitions)

    return neo.Event(times=up_transitions[sort_idx] * asig.times.units,
                     labels=['UP'] * len(up_transitions),
                     name='Transitions',
                     array_annotations={'channels': channels[sort_idx]},
                     hilbert_transition_phase=transition_phase)