Esempio n. 1
0
def detect_epoch_ripples(epoch_key,
                         animals,
                         sampling_frequency,
                         brain_areas=_BRAIN_AREAS):
    '''Returns a list of tuples containing the start and end times of
    ripples. Candidate ripples are computed via the ripple detection
    function and then filtered to exclude ripples where the animal was
    still moving.
    '''
    logger.info('Detecting ripples')

    tetrode_info = make_tetrode_dataframe(animals).xs(epoch_key,
                                                      drop_level=False)
    brain_areas = [brain_areas] if isinstance(brain_areas,
                                              str) else brain_areas
    is_brain_areas = tetrode_info.area.isin(brain_areas)
    if 'CA1' in brain_areas:
        is_brain_areas = is_brain_areas & (tetrode_info.descrip.isin(
            ['riptet']) | tetrode_info.validripple)
    logger.debug(
        tetrode_info[is_brain_areas].loc[:, ['area', 'depth', 'descrip']])
    tetrode_keys = tetrode_info[is_brain_areas].index.tolist()
    LFPs = get_LFPs(tetrode_keys, animals)

    speed = get_interpolated_position_dataframe(epoch_key,
                                                animals,
                                                max_distance_from_well=5).speed
    not_null = np.any(pd.notnull(LFPs), axis=1) & pd.notnull(speed)

    return Kay_ripple_detector(LFPs.index[not_null],
                               LFPs.values[not_null],
                               speed.values[not_null],
                               sampling_frequency,
                               minimum_duration=pd.Timedelta(milliseconds=15),
                               zscore_threshold=3)
def decode_replay_during_hippocampus_ripple(epoch_key):
    tetrode_info = make_tetrode_dataframe(ANIMALS).xs(epoch_key,
                                                      drop_level=False)
    ripple_times = detect_epoch_ripples(epoch_key,
                                        ANIMALS,
                                        sampling_frequency=SAMPLING_FREQUENCY,
                                        brain_areas='CA1')

    for brain_area in tetrode_info.area.dropna().unique().tolist():
        if brain_area not in ['???', 'Reference', 'CA1']:
            print(brain_area)
            try:
                # Compare different types of ripples
                replay_info, state_probability, posterior_density = (
                    decode_ripple_clusterless(epoch_key,
                                              ANIMALS,
                                              ripple_times,
                                              mark_names=None,
                                              brain_areas=brain_area))

                results = dict()
                name = 'hippocampal_ripple/' + brain_area
                results[name + '/replay_info'] = replay_info.to_xarray()
                results[name + '/state_probability'] = state_probability
                results[name + '/posterior_density'] = posterior_density

                for group_name, data in results.items():
                    save_xarray(PROCESSED_DATA_DIR, epoch_key, data,
                                group_name)
            except (ValueError, KeyError, FileNotFoundError, RuntimeError):
                continue
Esempio n. 3
0
def load_data(epoch_key):
    logger.info('Loading position information and linearizing...')
    position_info = (get_interpolated_position_info(
        epoch_key, ANIMALS).dropna(subset=["linear_position"]))
    track_graph = get_track_graph()

    logger.info('Loading multiunits...')
    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)
    is_brain_areas = tetrode_info.area.str.upper().isin(
        ["CA1", "DCA1", "ICA1"])
    tetrode_keys = tetrode_info.loc[is_brain_areas].index

    def _time_function(*args, **kwargs):
        return position_info.index

    multiunits = get_all_multiunit_indicators(tetrode_keys, ANIMALS,
                                              _time_function)

    multiunit_spikes = (np.any(~np.isnan(multiunits.values),
                               axis=1)).astype(np.float)
    multiunit_firing_rate = pd.DataFrame(get_multiunit_population_firing_rate(
        multiunit_spikes, SAMPLING_FREQUENCY),
                                         index=position_info.index,
                                         columns=['firing_rate'])

    return {
        'position_info': position_info,
        'tetrode_info': tetrode_info,
        'multiunits': multiunits,
        'multiunit_firing_rate': multiunit_firing_rate,
        'track_graph': track_graph,
    }
Esempio n. 4
0
def get_theta_times(epoch_key, sampling_frequency=1500):
    THETA_BAND = (6, 12)
    TIME_WINDOW_STEP = TIME_WINDOW_DURATION = 0.300
    TIME_HALFBANDWIDTH_PRODUCT = 1

    position_info = (get_interpolated_position_dataframe(
        epoch_key, ANIMALS).dropna(subset=['linear_position', 'speed']))
    time = position_info.index
    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)
    tetrode_keys = tetrode_info.loc[tetrode_info.area == 'Reference'].index

    lfps = get_LFPs(tetrode_keys, ANIMALS).reindex(time)

    multitaper_params = dict(
        time_halfbandwidth_product=TIME_HALFBANDWIDTH_PRODUCT,
        time_window_duration=TIME_WINDOW_DURATION,
        time_window_step=TIME_WINDOW_STEP,
        start_time=(time.values / np.timedelta64(1, 's')).min(),
    )

    df, model = detect_spectral_rhythm(time=time.values /
                                       np.timedelta64(1, 's'),
                                       lfps=lfps.values,
                                       sampling_frequency=sampling_frequency,
                                       multitaper_params=multitaper_params,
                                       frequency_band=THETA_BAND)

    return df.is_spectral_rhythm
Esempio n. 5
0
def load_data(epoch_key, brain_areas=None):

    if brain_areas is None:
        brain_areas = BRAIN_AREAS

    time = get_trial_time(epoch_key, ANIMALS)
    time = (pd.Series(np.ones_like(time, dtype=np.float),
                      index=time).resample('2ms').mean().index)

    def _time_function(*args, **kwargs):
        return time

    position_info = (get_interpolated_position_dataframe(
        epoch_key, ANIMALS,
        _time_function).dropna(subset=['linear_position', 'speed']))

    time = position_info.index

    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)
    is_brain_areas = (
        tetrode_info.area.astype(str).str.upper().isin(brain_areas))
    tetrode_keys = tetrode_info.loc[is_brain_areas].index
    lfps = get_LFPs(tetrode_keys, ANIMALS)
    lfps = lfps.resample('2ms').mean().fillna(method='pad').reindex(time)

    try:
        neuron_info = make_neuron_dataframe(ANIMALS).xs(epoch_key,
                                                        drop_level=False)
        neuron_info = neuron_info.loc[(neuron_info.numspikes > 100)
                                      & neuron_info.area.isin(brain_areas) &
                                      (neuron_info.type == 'principal')]
        spikes = get_all_spike_indicators(neuron_info.index, ANIMALS,
                                          _time_function).reindex(time)
    except KeyError:
        spikes = None

    tetrode_info = tetrode_info.loc[is_brain_areas]
    multiunit = (get_all_multiunit_indicators(
        tetrode_info.index, ANIMALS,
        _time_function).sel(features=_MARKS).reindex({'time': time}))
    multiunit_spikes = (np.any(~np.isnan(multiunit.values),
                               axis=1)).astype(np.float)
    multiunit_firing_rate = pd.DataFrame(get_multiunit_population_firing_rate(
        multiunit_spikes, SAMPLING_FREQUENCY, smoothing_sigma=0.020),
                                         index=time,
                                         columns=['firing_rate'])

    return {
        'position_info': position_info,
        'spikes': spikes,
        'multiunit': multiunit,
        'lfps': lfps,
        'tetrode_info': tetrode_info,
        'multiunit_firing_rate': multiunit_firing_rate,
        'sampling_frequency': SAMPLING_FREQUENCY,
    }
Esempio n. 6
0
def load_from_filterframework(animal,
                              datatype,
                              filterframework_dir,
                              index_keys=[]):
    if type(index_keys) != list:
        index_keys = [
            index_keys,
        ]
    animal_dict = {}
    animal_dict[animal] = lfdp.Animal(directory=filterframework_dir,
                                      short_name=animal)

    if datatype == 'ntrodeInfo':
        out = lfdp.make_tetrode_dataframe(animal_dict)
        out['subarea'] = out['subarea'].astype(str)

    elif datatype == 'taskInfo':
        out = lfdp.make_epochs_dataframe(animal_dict)

    elif datatype == 'linearcoord' or datatype == 'task_segments':
        out = lfdp.make_epochs_dataframe(animal_dict)
        out = _get_linearcoord_tasksegments(out, animal_dict)

    elif datatype == 'position':
        if len(index_keys[0]) != 3:
            print('epoch_keys requred as list of (animal, day, epoch)')
            return
        position_dict_df = {}
        for (animal, day, epoch) in index_keys:
            epoch_index = (animal, day, epoch)
            position_dict_df[(animal, day,
                              epoch)] = lfdp.get_position_dataframe(
                                  epoch_index, animal_dict)
        out = pd.concat(position_dict_df).reset_index().rename(
            {
                'level_0': 'animal',
                'level_1': 'day',
                'level_2': 'epoch'
            },
            axis=1)
        out['timedelta'] = pd.TimedeltaIndex(out['time'], unit='ns')
        out.set_index(['animal', 'day', 'epoch', 'timedelta'], inplace=True)
        out['is_correct'] = out.is_correct.astype('float')

    elif datatype == 'lfp':
        if len(index_keys[0]) != 4:
            print(
                'ntrode_keys requred as list of (animal, day, epoch, ntrode)')
            return
        out = _load_lfp_from_filterframework(index_keys, animal_dict)

    if 'tetrode_number' in out.index.names:
        print('renaming "tetrode_number" to "ntrode" in multiindex')
        out.index.rename('ntrode', level='tetrode_number', inplace=True)

    return out
Esempio n. 7
0
def load_data(epoch_key):
    logger.info('Loading data...')
    time = get_trial_time(epoch_key, ANIMALS)
    time = (pd.Series(np.ones_like(time, dtype=np.float),
                      index=time).resample('2ms').mean().index)

    def _time_function(*args, **kwargs):
        return time

    position_info = (get_interpolated_position_dataframe(
        epoch_key, ANIMALS,
        _time_function).dropna(subset=['linear_position', 'speed']))

    time = position_info.index
    speed = position_info['speed']
    position_boundaries = get_position_boundaries(position_info)

    neuron_info = make_neuron_dataframe(ANIMALS).xs(epoch_key,
                                                    drop_level=False)
    spikes = get_spikes(neuron_info, _time_function)

    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)

    track_graph, _ = make_track_graph(epoch_key, ANIMALS)

    logger.info('Finding multiunit high synchrony events...')
    adhoc_multiunit = get_adhoc_multiunit(speed, tetrode_info, _time_function)

    logger.info('Finding ripple times...')
    adhoc_ripple = get_adhoc_ripple(epoch_key, tetrode_info, time)

    logger.info('Estimating gamma power...')
    gamma_power = estimate_gamma_power(time, tetrode_info)

    logger.info('Estimating theta power...')
    theta_power = estimate_theta_power(time, tetrode_info)

    return {
        'position_info': position_info,
        'tetrode_info': tetrode_info,
        'neuron_info': neuron_info,
        'spikes': spikes,
        'track_graph': track_graph,
        'sampling_frequency': SAMPLING_FREQUENCY,
        **position_boundaries,
        **adhoc_ripple,
        **adhoc_multiunit,
        **gamma_power,
        **theta_power,
    }
Esempio n. 8
0
def get_ripple_times(epoch_key, sampling_frequency=1500,
                     brain_areas=['CA1', 'CA2', 'CA3']):
    position_info = (
        get_interpolated_position_dataframe(epoch_key, ANIMALS)
        .dropna(subset=['linear_position', 'speed']))
    speed = position_info['speed']
    time = position_info.index
    tetrode_info = make_tetrode_dataframe(ANIMALS).xs(
        epoch_key, drop_level=False)
    if ~np.all(np.isnan(tetrode_info.validripple.astype(float))):
        tetrode_keys = tetrode_info.loc[
            (tetrode_info.validripple == 1)].index
    else:
        is_brain_areas = (
            tetrode_info.area.astype(str).str.upper().isin(brain_areas))
        tetrode_keys = tetrode_info.loc[is_brain_areas].index

    ripple_lfps = get_LFPs(tetrode_keys, ANIMALS).reindex(time)
    ripple_filtered_lfps = pd.DataFrame(
        filter_ripple_band(np.asarray(ripple_lfps)),
        index=ripple_lfps.index)

    ripple_times = Kay_ripple_detector(
        time, ripple_lfps.values, speed.values, sampling_frequency,
        zscore_threshold=2.0, close_ripple_threshold=np.timedelta64(0, 'ms'),
        minimum_duration=np.timedelta64(15, 'ms'))

    ripple_consensus_trace = pd.DataFrame(
        get_ripple_consensus_trace(
            ripple_filtered_lfps, sampling_frequency),
        index=ripple_filtered_lfps.index,
        columns=['ripple_consensus_trace'])
    ripple_consensus_trace_zscore = pd.DataFrame(
        zscore(ripple_consensus_trace, nan_policy='omit'),
        index=ripple_filtered_lfps.index,
        columns=['ripple_consensus_trace_zscore'])

    return (ripple_times, ripple_filtered_lfps, ripple_lfps,
            ripple_consensus_trace_zscore)
def decode_replay_by_brain_area(epoch_key):
    tetrode_info = make_tetrode_dataframe(ANIMALS).xs(epoch_key,
                                                      drop_level=False)

    results = dict()

    # position_occupancy = get_position_occupancy(
    #     epoch_key, ANIMALS, EXTENT, GRIDSIZE)
    # results['position_occupancy'] = position_occupancy.to_xarray()

    for brain_area in tetrode_info.area.dropna().unique().tolist():
        if brain_area not in ['???', 'Reference']:
            print(brain_area)
            try:
                ripple_times = detect_epoch_ripples(
                    epoch_key,
                    ANIMALS,
                    sampling_frequency=SAMPLING_FREQUENCY,
                    brain_areas=brain_area)
                # Compare different types of ripples
                replay_info, state_probability, posterior_density = (
                    decode_ripple_clusterless(epoch_key,
                                              ANIMALS,
                                              ripple_times,
                                              mark_names=None,
                                              brain_areas=brain_area))

                results[brain_area + '/replay_info'] = replay_info.to_xarray()
                results[brain_area + '/state_probability'] = state_probability
                results[brain_area + '/posterior_density'] = posterior_density
            except (ValueError, FileNotFoundError):
                continue

    for group_name, data in results.items():
        try:
            save_xarray(PROCESSED_DATA_DIR, epoch_key, data, group_name)
        except KeyError:
            continue
Esempio n. 10
0
def get_ripple_times(epoch_key,
                     sampling_frequency=1500,
                     brain_areas=BRAIN_AREAS):
    position_info = (get_interpolated_position_dataframe(
        epoch_key, ANIMALS).dropna(subset=['linear_position', 'speed']))
    speed = position_info['speed']
    time = position_info.index
    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)
    if ~np.all(np.isnan(tetrode_info.validripple.astype(float))):
        tetrode_keys = tetrode_info.loc[(tetrode_info.validripple == 1)].index
    else:
        is_brain_areas = (
            tetrode_info.area.astype(str).str.upper().isin(brain_areas))
        tetrode_keys = tetrode_info.loc[is_brain_areas].index

    lfps = get_LFPs(tetrode_keys, ANIMALS).reindex(time)
    return Kay_ripple_detector(time,
                               lfps.values,
                               speed.values,
                               sampling_frequency,
                               zscore_threshold=2.0,
                               close_ripple_threshold=np.timedelta64(0, 'ms'),
                               minimum_duration=np.timedelta64(15, 'ms'))
Esempio n. 11
0
def get_ripple_times2(epoch_key,
                      sampling_frequency=1500,
                      brain_areas=BRAIN_AREAS):
    RIPPLE_BAND = (150, 250)
    TIME_WINDOW_STEP = TIME_WINDOW_DURATION = 0.020
    TIME_HALFBANDWIDTH_PRODUCT = 1

    position_info = (get_interpolated_position_dataframe(
        epoch_key, ANIMALS).dropna(subset=['linear_distance', 'linear_speed']))
    time = position_info.index
    tetrode_info = make_tetrode_dataframe(ANIMALS).xs(epoch_key,
                                                      drop_level=False)
    if ~np.all(np.isnan(tetrode_info.validripple.astype(float))):
        tetrode_keys = tetrode_info.loc[(tetrode_info.validripple == 1)].index
    else:
        is_brain_areas = (
            tetrode_info.area.astype(str).str.upper().isin(brain_areas))
        tetrode_keys = tetrode_info.loc[is_brain_areas].index

    lfps = get_LFPs(tetrode_keys, ANIMALS).reindex(time)

    multitaper_params = dict(
        time_halfbandwidth_product=TIME_HALFBANDWIDTH_PRODUCT,
        time_window_duration=TIME_WINDOW_DURATION,
        time_window_step=TIME_WINDOW_STEP,
        start_time=(time.values / np.timedelta64(1, 's')).min(),
    )

    df, model = detect_spectral_rhythm(time=time.values /
                                       np.timedelta64(1, 's'),
                                       lfps=lfps.values,
                                       sampling_frequency=sampling_frequency,
                                       multitaper_params=multitaper_params,
                                       frequency_band=RIPPLE_BAND)

    return df.is_spectral_rhythm
Esempio n. 12
0
def load_data(
    epoch_key,
    position_to_linearize=['nose_x', 'nose_y'],
    max_distance_from_well=30,
    min_distance_traveled=50,
):
    logger.info('Loading position info...')
    environment = np.asarray(
        make_epochs_dataframe(ANIMALS).loc[epoch_key].environment)[0]
    if environment == "lineartrack":
        edge_order, edge_spacing = LINEAR_EDGE_ORDER, LINEAR_EDGE_SPACING
    elif environment == "wtrack":
        edge_order, edge_spacing = WTRACK_EDGE_ORDER, WTRACK_EDGE_SPACING
    else:
        edge_order, edge_spacing = None, None
    position_info = get_interpolated_position_info(
        epoch_key,
        position_to_linearize=position_to_linearize,
        max_distance_from_well=max_distance_from_well,
        min_distance_traveled=min_distance_traveled,
        edge_order=edge_order,
        edge_spacing=edge_spacing,
    ).dropna(subset=["linear_position"])
    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)
    tetrode_keys = tetrode_info.loc[tetrode_info.area.isin(['ca1R',
                                                            'ca1L'])].index

    logger.info('Loading multiunit...')

    def _time_function(*args, **kwargs):
        return position_info.index

    adhoc_multiunit = get_adhoc_multiunit(position_info, tetrode_keys,
                                          _time_function,
                                          position_to_linearize)

    logger.info('Loading spikes...')
    time = position_info.index
    try:
        neuron_info = make_neuron_dataframe(
            ANIMALS, exclude_animals=['Monty', 'Peanut']).xs(epoch_key,
                                                             drop_level=False)
        neuron_info = neuron_info.loc[neuron_info.accepted.astype(bool)]
        spikes = get_all_spike_indicators(neuron_info.index, ANIMALS,
                                          _time_function).reindex(time)
    except (ValueError, KeyError):
        neuron_info = None
        spikes = None

    logger.info('Finding ripple times...')
    adhoc_ripple = get_adhoc_ripple(epoch_key, tetrode_info, time,
                                    position_to_linearize)

    track_graph = make_track_graph(epoch_key, ANIMALS)

    dio = get_DIO(epoch_key, ANIMALS)
    dio_indicator = get_DIO_indicator(epoch_key,
                                      ANIMALS,
                                      time_function=_time_function)

    return {
        'position_info': position_info,
        'tetrode_info': tetrode_info,
        'neuron_info': neuron_info,
        'spikes': spikes,
        'dio': dio,
        'dio_indicator': dio_indicator,
        'track_graph': track_graph,
        'edge_order': edge_order,
        'edge_spacing': edge_spacing,
        **adhoc_ripple,
        **adhoc_multiunit,
    }
Esempio n. 13
0
def load_data(epoch_key, brain_areas=['CA1', 'CA2', 'CA3']):

    time = get_trial_time(epoch_key, ANIMALS)
    time = (pd.Series(np.ones_like(time, dtype=np.float), index=time)
            .resample('2ms').mean()
            .index)

    def _time_function(*args, **kwargs):
        return time

    logger.info('Loading position info...')
    position_info = (
        get_interpolated_position_dataframe(
            epoch_key, ANIMALS, _time_function)
        .dropna(subset=['linear_position', 'speed']))

    time = position_info.index

    tetrode_info = make_tetrode_dataframe(ANIMALS, epoch_key=epoch_key)
    is_brain_areas = (
        tetrode_info.area.astype(str).str.upper().isin(brain_areas))
    tetrode_keys = tetrode_info.loc[is_brain_areas].index
    lfps = get_LFPs(tetrode_keys, ANIMALS)
    lfps = lfps.resample('2ms').mean().fillna(method='pad').reindex(time)

    logger.info('Loading spikes...')
    try:
        neuron_info = make_neuron_dataframe(ANIMALS).xs(
            epoch_key, drop_level=False)
        neuron_info = neuron_info.loc[
            (neuron_info.numspikes > 100) &
            neuron_info.area.isin(brain_areas) &
            (neuron_info.type == 'principal')]
        spikes = get_all_spike_indicators(
            neuron_info.index, ANIMALS, _time_function).reindex(time)
    except KeyError:
        spikes = None

    logger.info('Loading multiunit...')
    tetrode_info = tetrode_info.loc[is_brain_areas]
    multiunit = (get_all_multiunit_indicators(
        tetrode_info.index, ANIMALS, _time_function)
        .reindex({'time': time}))

    multiunit = multiunit.sel(features=MARKS)
    multiunit_spikes = (np.any(~np.isnan(multiunit.values), axis=1)
                        ).astype(np.float)
    multiunit_firing_rate = pd.DataFrame(
        get_multiunit_population_firing_rate(
            multiunit_spikes, SAMPLING_FREQUENCY), index=time,
        columns=['firing_rate'])

    logger.info('Finding ripple times...')
    (ripple_times, ripple_filtered_lfps, ripple_lfps,
     ripple_consensus_trace_zscore) = get_ripple_times(epoch_key)

    ripple_times = ripple_times.assign(
        duration=lambda df: (df.end_time - df.start_time).dt.total_seconds())

    return {
        'position_info': position_info,
        'ripple_times': ripple_times,
        'spikes': spikes,
        'multiunit': multiunit,
        'lfps': lfps,
        'tetrode_info': tetrode_info,
        'ripple_filtered_lfps': ripple_filtered_lfps,
        'ripple_lfps': ripple_lfps,
        'ripple_consensus_trace_zscore': ripple_consensus_trace_zscore,
        'multiunit_firing_rate': multiunit_firing_rate,
        'sampling_frequency': SAMPLING_FREQUENCY,
    }
Esempio n. 14
0
def decode_ripple_clusterless(epoch_key,
                              animals,
                              ripple_times,
                              sampling_frequency=1500,
                              n_place_bins=61,
                              place_std_deviation=None,
                              mark_std_deviation=20,
                              mark_names=_MARKS,
                              brain_areas=_BRAIN_AREAS):
    logger.info('Decoding ripples')
    tetrode_info = make_tetrode_dataframe(animals).xs(epoch_key,
                                                      drop_level=False)
    brain_areas = [brain_areas] if isinstance(brain_areas,
                                              str) else brain_areas
    is_brain_areas = tetrode_info.area.isin(brain_areas)
    brain_areas_tetrodes = tetrode_info[
        is_brain_areas
        & ~tetrode_info.descrip.str.endswith('Ref').fillna(False)
        & ~tetrode_info.descrip.str.startswith('Ref').fillna(False)]
    logger.debug(brain_areas_tetrodes.loc[:, ['area', 'depth', 'descrip']])

    position_info = get_interpolated_position_dataframe(
        epoch_key, animals, max_distance_from_well=5)

    if mark_names is None:
        # Use all available mark dimensions
        mark_names = get_multiunit_indicator_dataframe(
            brain_areas_tetrodes.index[0], animals).columns.tolist()
        mark_names = [
            mark_name for mark_name in mark_names
            if mark_name not in ['x_position', 'y_position']
        ]

    is_training = (position_info.speed > 4) & position_info.is_correct
    marks = [(get_multiunit_indicator_dataframe(tetrode_key,
                                                animals).loc[:, mark_names])
             for tetrode_key in brain_areas_tetrodes.index]
    marks = [
        tetrode_marks for tetrode_marks in marks
        if (tetrode_marks.loc[is_training].dropna().shape[0]) != 0
    ]

    train_position_info = position_info.loc[is_training]

    training_marks = np.stack([
        tetrode_marks.loc[train_position_info.index, mark_names]
        for tetrode_marks in marks
    ],
                              axis=0)

    decoder = ClusterlessDecoder(
        train_position_info.linear_distance.values,
        train_position_info.task.values,
        training_marks,
        replay_speedup_factor=16,
    ).fit()

    test_marks = _get_ripple_marks(marks, ripple_times, sampling_frequency)
    logger.info('Predicting replay types')
    results = [
        decoder.predict(ripple_marks, time.total_seconds())
        for ripple_marks, time in test_marks
    ]

    return summarize_replay_results(results, ripple_times, position_info,
                                    epoch_key)
import numpy as np
import pandas as pd
import statsmodels.api as sm
from patsy import dmatrix

from loren_frank_data_processing import (get_interpolated_position_dataframe,
                                         get_spike_indicator_dataframe,
                                         make_epochs_dataframe,
                                         make_neuron_dataframe,
                                         make_tetrode_dataframe)
from src.parameters import ANIMALS
from time_rescale import TimeRescaling

########################## Loading Data #######################################
epoch_info = make_epochs_dataframe(ANIMALS)
tetrode_info = make_tetrode_dataframe(ANIMALS)

epoch_key = ('HPa', 6, 2)
tetrode_key = ('HPa', 6, 2, 5)
neuron_info = make_neuron_dataframe(ANIMALS)
neuron_key = ('HPa', 6, 2, 5, 2)

spike = get_spike_indicator_dataframe(neuron_key, ANIMALS)
linear_position = get_interpolated_position_dataframe(
    epoch_key, ANIMALS)['linear_position']
x_pos = get_interpolated_position_dataframe(epoch_key, ANIMALS)['x_position']
y_pos = get_interpolated_position_dataframe(epoch_key, ANIMALS)['y_position']
speed = get_interpolated_position_dataframe(epoch_key, ANIMALS)['speed']
head_direction = get_interpolated_position_dataframe(epoch_key,
                                                     ANIMALS)['head_direction']
Esempio n. 16
0
def load_data(epoch_key):
    logger.info('Loading position information and linearizing...')
    position_info = (get_interpolated_position_info(epoch_key, ANIMALS)
                     .dropna(subset=["linear_position"]))
    track_graph = get_track_graph()

    logger.info('Loading multiunits...')
    tetrode_info = make_tetrode_dataframe(
        ANIMALS, epoch_key=epoch_key)

    def n_dead_chans(x):
        if isinstance(x, float):
            return 1
        elif isinstance(x, (list, tuple, np.ndarray)):
            return len(x)

    # Temporarily remove these tetrodes that have bad marks in peanut, 14, 4
    bad_trode = [9, 16, 21]

    tetrode_keys = tetrode_info.loc[
        (tetrode_info.area.str.upper() == "CA1")
        & (tetrode_info.deadchans.apply(lambda x: n_dead_chans(x)) < 4)
        & ~tetrode_info.nTrode.isin(tetrode_info.ref.dropna().unique())
        & ~tetrode_info.nTrode.isin(bad_trode)
    ].index

    def _time_function(*args, **kwargs):
        return position_info.index

    multiunits = get_all_multiunit_indicators(
        tetrode_keys, ANIMALS, _time_function)

    multiunit_spikes = (np.any(~np.isnan(multiunits.values), axis=1)
                        ).astype(np.float)
    multiunit_firing_rate = pd.DataFrame(
        get_multiunit_population_firing_rate(
            multiunit_spikes, SAMPLING_FREQUENCY), index=position_info.index,
        columns=['firing_rate'])

    logger.info('Loading sorted spikes...')
    neuron_info = make_neuron_dataframe(ANIMALS).xs(
        epoch_key, drop_level=False)
    neuron_info = pd.merge(
        neuron_info,
        tetrode_info.loc[:, ["area", "suparea"]],
        left_index=True,
        right_index=True,
    )
    neuron_info = (neuron_info
                   .loc[neuron_info.accepted.astype(bool)]  # only accepted
                   .dropna(subset=["area"]))  # drop cells that don't have area

    spikes = get_all_spike_indicators(
        neuron_info.index, ANIMALS, _time_function)

    return {
        'position_info': position_info,
        'tetrode_info': tetrode_info,
        'multiunits': multiunits,
        'multiunit_firing_rate': multiunit_firing_rate,
        'neuron_info': neuron_info,
        'spikes': spikes,
        'track_graph': track_graph,
    }
import numpy as np
import pandas as pd
import scipy.signal as scis

from loren_frank_data_processing import (get_interpolated_position_dataframe,
                                         get_LFP_dataframe,
                                         get_spike_indicator_dataframe,
                                         make_epochs_dataframe,
                                         make_neuron_dataframe,
                                         make_tetrode_dataframe)
from src.parameters import ANIMALS

epoch_info = make_epochs_dataframe(ANIMALS)

epoch_key = ('HPa', 3, 2)
tetrode_info = make_tetrode_dataframe(ANIMALS).xs(epoch_key, drop_level=False)
tetrode_key = ('HPa', 3, 2, 4)

neuron_info = make_neuron_dataframe(ANIMALS).xs(epoch_key, drop_level=False)
neuron_key = ('HPa', 3, 2, 4, 3)

spike = get_spike_indicator_dataframe(neuron_key, ANIMALS)
position_info = get_interpolated_position_dataframe(epoch_key, ANIMALS)
linear_distance = position_info['linear_distance']
x_pos = position_info['x_position']
y_pos = position_info['y_position']
speed = position_info['speed']
head_direction = position_info['head_direction']
eeg = get_LFP_dataframe(tetrode_key, ANIMALS)

spike = spike.to_frame('is_spike')
Esempio n. 18
0
from collections import namedtuple

Animal = namedtuple('Animal',
                    {'short_name', 'directory', 'preprocessing_directory'})
animals = {
    'JZ1':
    Animal(short_name='JZ1',
           directory='../Raw-Data/JZ1',
           preprocessing_directory='../Raw-Data/JZ1')
}
date = 20161114
epoch_index = ('JZ1', 1, 2)
tets = [21]
areas = ['ca1']

full_tetrode_info = lfdp.make_tetrode_dataframe(animals)

multiunit_data = [
    lfdp.get_multiunit_indicator_dataframe(tetindex, animals).values
    for tetindex in full_tetrode_info.xs(epoch_index, drop_level=False).query(
        'area.isin(@areas) & tetrode_number.isin(@tets)').index
]

position_variables = ['linear_distance', 'trajectory_direction', 'speed']

position_info = lfdp.get_interpolated_position_dataframe(epoch_index, animals)

train_position_info = position_info.query('speed > 4')

# marks = train_position_info.join(multiunit_data[0])