Example #1
0
# %%
ex0 = [
    'exclude', 'and_GPe', 'and_Str', 'Left', 'Right', 'Other XLS', 'Exclude',
    'zone_1_d1r', '_gpe_muscimol', '_gpe_pbs', 'mW', 'mw'
]

inc = [['AG', 'GPe', 'CAG', 'Arch', 'zone_1'],
       ['AG', 'Str', 'A2A', 'Ai32', 'zone_1'],
       ['AG', 'Str', 'A2A', 'ChR2', 'zone_1']]
exc = [ex0, ex0, ex0]

basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'
# ethovision_tools.add_dlc_to_csv(basepath,inc,exc,save=True)
pns = dataloc.raw_csv(basepath, inc[1], ex0)
raw, meta = ethovision_tools.csv_load(pns[1], method='preproc')
r, _ = ethovision_tools.csv_load(pns[1])
# raw,meta=ethovision_tools.csv_load(pns[0])
# %% ID Crossings:

ac_on, ac_off = signals.thresh(raw['iz1'].astype(int), 0.5, 'Pos')
min = 0  #meta['fs'][0] * 4 #4 seconds
all_cross = []
cross_t = []
fs = meta['fs'][0]
for on, off in zip(ac_on, ac_off):
    if (off - on) > min:
        all_cross.append([on, off])
        cross_t.append(on / fs)
durs = np.diff(np.array(all_cross), axis=1) / fs
print('%d crossings detected. Median dur: %1.2fs' % \
# inc = [x + ['AG3233_5'] for x in inc]
ethovision_tools.raw_csv_to_preprocessed_csv(newbase,
                                             inc,
                                             exc,
                                             force_replace=True,
                                             win=10)

# %%

pns = dataloc.raw_csv(newbase, inc[0], exc[0])
if not isinstance(pns, list):
    pns = [pns]
saveit = True
closeit = True
for pn in pns:
    df, meta = ethovision_tools.csv_load(pn, columns='All', method='preproc')
    plots.plot_openloop_day(df,
                            meta,
                            save=saveit,
                            close=closeit,
                            save_dir=pn.parent)

# %%

analysis = 'Str_A2a_ChR2_1mw'
behavior_str = '10x'

fn = '%s_openloop_data' % analysis
qans = [
    'AG3233_5',
    'AG3233_4',
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
import matplotlib.gridspec as gridspec
from scipy.stats import sem, t
import pdb
import math
import time

# %% Plot one 10x30 experiment day
inc = [['AG', 'Str', 'A2A', 'Ai32', 'hm4di_cno', '10x10']]
exc = [['exclude', '_and_Str', 'Left', 'Right', 'Other XLS', 'Exclude']]
basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'
pns = dataloc.raw_csv(basepath, inc[0], exc[0])
# a=time.time()
df, meta = ethovision_tools.csv_load(pns[2], columns='All', method='raw')
df, meta = behavior.preproc_raw(df, meta)
# b=time.time()
print('%2.2f seconds to load' % (b - a))
# raw=ethovision_tools.add_amb_to_raw(raw,meta)

#The magic:
plots.plot_openloop_day(df, meta, save=True)

# %% Debug new metric: meander
#DLC Measure:
dir_smooth = behavior.smooth_direction(raw, meta, use_dlc=True)
dir_smooth_etho = behavior.smooth_direction(raw, meta)

diff_angle = signal.angle_vector_delta(dir_smooth[0:-1],
                                       dir_smooth[1:],
from pathlib import Path
from gittislab import dataloc, ethovision_tools, signals, plots
import matplotlib.pyplot as plt

basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/Str/Naive/A2A/Ai32/Bilateral/10x10/'
inc = [['AG5477_4']]
# basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/GPe/Naive/CAG/Arch/Right/5x30/'
# inc=[['AG4486_1']]
exc = [['exclude']]
ethovision_tools.unify_raw_to_csv(basepath, inc, exc)
ethovision_tools.raw_csv_to_preprocessed_csv(basepath,
                                             inc,
                                             exc,
                                             force_replace=True)
pns = dataloc.raw_csv(basepath, inc[0], exc[0])
raw, meta = ethovision_tools.csv_load(pns, method='preproc')
ethovision_tools.boris_prep(basepath,
                            inc,
                            exc,
                            plot_cols=['time', 'mouse_height', 'vel'],
                            event_col='rear',
                            event_thresh=0.5,
                            method='preproc')

dlc = ethovision_tools.add_dlc_helper(raw,
                                      meta,
                                      pns.parent,
                                      force_replace=True)
dlc = dlc[0]
mouse_height = (dlc['dlc_rear_centroid_y'] - dlc['dlc_front_centroid_y'])
head_xy = np.array(dlc.loc[:,
)
mean_power = np.mean(dat, axis=0)
f = np.polyfit(x, mean_power, deg=2)
laser_cal_fit = np.poly1d(f)
plt.sca(ax)
plt.plot(x, laser_cal_fit(x), 'r')

ax.set_ylabel('Blue laser output (mW)')
ax.set_xlabel('Arduino PWM level')

# %% Load in 10x10 day for comparison:
# pnfn=Path('/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/Str/Naive/A2A/Ai32/Bilateral/10x10_hm4di_saline/AG6846_5_BI040121/Raw_AG6846_5_BI040121.csv')
pnfn = Path(
    '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/Str/Naive/A2A/Ai32/Bilateral/10x10_hm4di_cno/AG6846_5_BI033021/Raw_AG6846_5_BI033021.csv'
)
raw, meta = ethovision_tools.csv_load(pnfn, columns='All', method='raw')
meta['stim_dur'] = 2
meta['stim_off'] = meta['stim_on'] + 2
meta['stim_mean_dur'] = 2

percentage = lambda x: (np.nansum(x) / len(x)) * 100
m_clip = behavior.stim_clip_grab(raw,
                                 meta,
                                 y_col='im',
                                 stim_dur=2,
                                 baseline=2,
                                 summarization_fun=percentage)
print(np.mean(m_clip['disc'][:, 1]))

# %%
Example #6
0
summary = ethovision_tools.meta_sum_csv(basepath, inc, exc)

# %% Plot comparing im & im2 (improved)

inc = [
    'AG',
    'Str',
    'A2A',
    'Ai32',
    '10x10',
]
exc = ['exclude', 'gpe_', 'mw']
basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'
pns = dataloc.raw_csv(basepath, inc, exc)
for pn in pns:
    raw, meta = ethovision_tools.csv_load(pn, method='preproc')

    plt.figure()
    plt.plot(raw['time'], raw['im'])
    plt.plot(raw['time'], raw['im2'], '--')
    percent_match = sum(raw['im'] & raw['im2']) / sum(raw['im']) * 100
    plt.title('%s r= %1.3f, %2.1f %% Hit' %
              (meta['anid'][0], meta['im_im2_pearson'][0], percent_match))

    basepath = pn.parent
    # boris,raw,meta=ethovision_tools.boris_prep(basepath,[inc],[exc],plot_cols=['time','im','im2'],
    #                             event_col=['im','im2'],event_thresh=0.5, method='preproc')

# %%  Compare ethovision immobility measurement to using DLC side camera stats
# and or velocity threshold.
    'human_scored_rear',
    'side_length_px',
    'head_hind_5hz_pw',
    'snout_hind_px_height',
    'snout_hind_px_height_detrend',
    'front_hind_px_height_detrend',
    'side_length_px_detrend',
]  #'dlc_front_over_rear_length',

valid_video_pn = test_video_pn

test_video_boris_obs = test_video_boris_obs

# %% IF needed:
pn = dataloc.raw_csv(test_video_pn[0])
raw, meta = ethovision_tools.csv_load(pn, method='preproc')
raw, meta = ethovision_tools.add_dlc_helper(raw, meta, pn.parent)

# %% Create training and test data from rear_scored videos:
train = model.combine_raw_csv_for_modeling(train_video_pn,
                                           train_video_boris_obs,
                                           use_cols,
                                           rescale=True,
                                           avg_model_detrend=True,
                                           z_score_x_y=True,
                                           flip_y=True)

test = model.combine_raw_csv_for_modeling(test_video_pn,
                                          test_video_boris_obs,
                                          use_cols,
                                          rescale=True,
Example #8
0
inc = [
    [
        'AG',
        '10x10_gpe_pbs',
    ],
]
exc = [ex0]
basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'

for ii, ee in zip(inc, exc):
    pns = dataloc.raw_csv(basepath, ii, ee)

    for pn in pns:
        temp = {}
        raw, meta = ethovision_tools.csv_load(pn, method='preproc')
        plots.plot_openloop_day(raw, meta, save=True, close=False)

# %% Combined openloop day summary:
inc = [
    [
        'AG',
        '10x10_gpe_pbs',
    ],
]
exc = [[
    'exclude', 'and_GPe', 'and_Str', 'Left', 'Right', 'Other XLS', 'Exclude',
    'mW', 'mw', 'AG6343'
]]
data = behavior.open_loop_summary_collect(basepath, [inc[0]], [exc[0]])
# %%
Example #9
0
data = pd.DataFrame([],
                    columns=[
                        'anid', 'proto', 'cell_area_opsin', 'amb_vel',
                        'amb_meander', 'amb_bouts', 'amb_directed'
                    ])
temp = data
min_bout = 1
use_dlc = False
use_cols = ['time', 'vel', 'im', 'dir', 'ambulation', 'meander']
for ii, ee in zip(inc, exc):
    pns = dataloc.raw_csv(basepath, ii, ee)
    for pn in pns:
        temp = {}
        raw, meta = ethovision_tools.csv_load(pn,
                                              columns=use_cols,
                                              method='preproc')
        temp['anid'] = meta['anid'][0]
        temp['cell_area_opsin'] = '%s_%s_%s' % (
            meta['cell_type'][0], meta['stim_area'][0], meta['opsin_type'][0])
        temp['proto'] = meta['protocol'][0]
        stim_dur = round(np.mean(meta['stim_dur']))
        vel_clip = behavior.stim_clip_grab(raw,
                                           meta,
                                           y_col='vel',
                                           stim_dur=stim_dur)
        clip_ave = behavior.stim_clip_average(vel_clip)

        #### Calculate stim-triggered %time mobile:
        percentage = lambda x: (np.nansum(x) / len(x)) * 100
        raw['m'] = ~raw['im']
# %% Print summary of metadata retrieved by query:
# inc = [['AG','A2a','ChR2','Str']] # 'zone_1_30mW'
# exc = [['exclude','_and_SNr','_and_Str','20min_10Hz',
#         'grooming','20min_4Hz','Exclude']]
basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'

summary = ethovision_tools.meta_sum_csv(basepath, inc, exc)
print(summary)
plt.hist(summary.stim_n)
# %% Load example data
inc = [['AG', 'GPe', 'CAG', 'Arch', '10x30']]
exc = [['exclude', '_and_Str', 'Left', 'Right', 'Other XLS', 'Exclude']]
basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'
pns = dataloc.raw_csv(basepath, inc[0], exc[0])
raw, meta = ethovision_tools.csv_load(pns[1])

# %% Attempt to chunk velocity by stim start times:
useday = 'stim'
if useday == 'nostim':
    #Conditioning days with no stim:
    inc = [['AG', 'GPe', 'CAG', 'Arch', 'pp30_cond_dish']]  #
    exc = [['exclude', '_and_Str', 'Left', 'Right', 'pp30_cond_dish_fc_stim']]
else:
    #Days with stim:
    inc = [['AG', 'GPe', 'CAG', 'Arch', 'pp30_cond_dish_fc_stim']]  #
    exc = [['exclude', '_and_Str', 'Left', 'Right']]

basepath = '/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/'
pns = dataloc.raw_csv(basepath, inc[0], exc[0])
summary = ethovision_tools.meta_sum_csv(basepath, inc, exc)
Example #11
0
def rear_nn_auroc_perf(
        ffn,
        boris_obs,
        prob_thresh=0.5,
        low_pass_freq=None,
        weights_fn='/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/DLC Examples/train_rear_model/bi_rearing_nn_weightsv2',
        tab_fn='/home/brian/Dropbox/Gittis Lab Data/OptoBehavior/DLC Examples/train_rear_model/to_nnv2.pkl',
        rf_model_fn=None):
    '''
        Specify an experiment folder (ffn) and the observation name to use in the
        Rearing observations.boris file with human scored rearing data in that folder.
        Assumes many other files are in that folder (raw csv, metatdata, etc,) 
        and that videos have already been both: 1) scored by a human observer and 2)
        pre-processed with deeplabcut.
    '''

    use_cols = [
        'vel',
        'area',
        'delta_area',  # 'dlc_front_over_rear_length'
        'dlc_side_head_x',
        'dlc_side_head_y',
        'dlc_front_centroid_x',
        'dlc_front_centroid_y',
        'dlc_rear_centroid_x',
        'dlc_rear_centroid_y',
        'dlc_snout_x',
        'dlc_snout_y',
        'dlc_side_left_fore_x',
        'dlc_side_left_fore_y',
        'dlc_side_right_fore_x',
        'dlc_side_right_fore_y',
        'dlc_side_left_hind_x',
        'dlc_side_left_hind_y',
        'dlc_side_right_hind_x',
        'dlc_side_right_hind_y',
        'dlc_top_head_x',
        'dlc_top_head_y',
        'dlc_top_body_center_x',
        'dlc_top_body_center_y',
        'dlc_top_tail_base_x',
        'dlc_top_tail_base_y',
        'video_resolution',
        'human_scored_rear',
        'side_length_px',
        'head_hind_5hz_pw',
        'snout_hind_px_height',
        'snout_hind_px_height_detrend',
        'front_hind_px_height_detrend',
        'side_length_px_detrend',
    ]

    dep_var = 'human_scored_rear'
    raw, meta = ethovision_tools.csv_load(dataloc.raw_csv(ffn),
                                          columns=['time'],
                                          method='raw')
    boris_fn = Path(ffn).joinpath('Rearing Observations.boris')
    f = open(boris_fn, "r")
    boris = json.loads(f.read())
    f.close()
    dat = combine_raw_csv_for_modeling([ffn], [boris_obs],
                                       use_cols,
                                       rescale=True,
                                       avg_model_detrend=True,
                                       z_score_x_y=True,
                                       flip_y=True)
    if 'Unnamed: 0' in dat.columns:
        dat.drop('Unnamed: 0', axis=1, inplace=True)
    dat.fillna(method='ffill', inplace=True)
    dat.fillna(method='bfill', inplace=True)
    pred = tabular_predict_from_nn(tab_fn, weights_fn, xs=dat)

    if isinstance(rf_model_fn, str):
        ensembling = True
        xs = dat.drop(dep_var, axis=1)
        rf_pred = tabular_predict_from_rf(rf_model_fn, xs)
        raw['rf_pred'] = rf_pred
    else:
        ensembling = False
    # pdb.set_trace()

    #
    human_rear_score = behavior.boris_to_logical_vector(
        raw, boris, boris_obs, 'a', 'd')
    raw['human_scored'] = human_rear_score
    raw['nn_pred'] = pred[:, 1]

    if ensembling == True:
        raw['final_pred'] = (raw['nn_pred'] + raw['rf_pred']) / 2
    else:
        raw['final_pred'] = raw['nn_pred']

    #Lowpass filter prediction:
    if not (low_pass_freq == None):
        raw['final_pred'] = signals.pad_lowpass_unpad(raw['final_pred'],
                                                      low_pass_freq,
                                                      meta['fs'][0])

    b, r, m = ethovision_tools.boris_prep_from_df(
        raw,
        meta,
        plot_cols=['time', 'final_pred', 'human_scored'],
        event_col=['final_pred'],
        event_thresh=prob_thresh,
    )
    auroc = metrics.roc_auc_score(raw['human_scored'].values.astype(np.int16),
                                  raw['final_pred'].values)
    print('%1.5f AUROC' % auroc)
    return auroc, raw['final_pred'], human_rear_score
Example #12
0
def combine_raw_csv_for_modeling(raw_pns,
                                 boris_obs_names,
                                 use_cols,
                                 uniform_boris_fn='Rearing Observations.boris',
                                 rescale=False,
                                 avg_model_detrend=False,
                                 z_score_x_y=False,
                                 flip_y=False,
                                 meta_to_raw=[]):

    combined = pd.DataFrame([], columns=use_cols)

    for pn, obs in zip(raw_pns, boris_obs_names):
        p = Path(pn)
        inc = [['AG']]
        exc = [['exclude']]
        ethovision_tools.unify_raw_to_csv(p, inc, exc)
        ethovision_tools.raw_csv_to_preprocessed_csv(p,
                                                     inc,
                                                     exc,
                                                     force_replace=False)
        pns = dataloc.raw_csv(p, inc[0], exc[0])
        raw, meta = ethovision_tools.csv_load(pns, method='raw')

        fn_ka = p.joinpath(uniform_boris_fn)
        f = open(fn_ka, 'r')
        boris = json.loads(f.read())
        # Get human scored rearing events and add as vector!
        human_scored_rearing = behavior.boris_to_logical_vector(
            raw, boris, obs, 'a', 'd')

        dlc = ethovision_tools.add_dlc_helper(raw, meta, p, force_replace=True)
        dlc = dlc[0]
        # pdb.set_trace()
        if meta['exp_room_number'][0] == 228:
            x_scale = 512 / 480
            y_scale = 1.455  #Scale pixels #Video takes up only half of screen in these recordings
            vid_res = np.ones((dlc.shape[0], 1)) * 704  #Video width 704 x 480
            # pdb.set_trace()
        else:
            x_scale = 1
            y_scale = 1
            vid_res = np.ones(
                (dlc.shape[0], 1)) * 1280  #Video width   height = 512

        if rescale == True:
            for col in dlc.columns:
                if ('dlc' in col) and ('x' in col):
                    dlc[col] = dlc[col].values * x_scale

                if ('dlc' in col) and ('y' in col):
                    dlc[col] = dlc[col].values * y_scale

        dlc['video_resolution'] = vid_res
        dlc['human_scored_rear'] = human_scored_rearing
        # dlc['head_hind_px_height']= dlc ['dlc_rear_centroid_y'] - dlc['dlc_side_head_y']

        dlc['front_hind_px_height'] = dlc['dlc_rear_centroid_y'] - dlc[
            'dlc_front_centroid_y']
        dlc['head_hind_5hz_pw'] = signals.get_spectral_band_power(
            dlc['front_hind_px_height'], meta['fs'][0], 4.5, 6.5)
        dlc['snout_hind_px_height'] = dlc['dlc_rear_centroid_y'] - dlc[
            'dlc_snout_y']
        dlc['side_length_px'] = signals.calculateDistance(
            dlc['dlc_front_centroid_x'].values,
            dlc['dlc_front_centroid_y'].values,
            dlc['dlc_rear_centroid_x'].values,
            dlc['dlc_rear_centroid_y'].values)
        # dlc['top_length_px']=signals.calculateDistance(dlc['dlc_top_head_x'].values,
        #                                                dlc['dlc_top_head_y'].values,
        #                                                dlc ['dlc_top_tail_base_x'].values,
        #                                                dlc ['dlc_top_tail_base_y'].values)
        #Detrend effect of mouse distance from camera using an average pre-fitted
        #z-score approach:
        if avg_model_detrend == True:
            detrend_cols = [
                'snout_hind_px_height', 'front_hind_px_height',
                'side_length_px'
            ]
            for col in detrend_cols:
                y = dlc[col]
                x = dlc['x']
                dlc[col + '_detrend'] = average_z_score(x, y)

        if z_score_x_y == True:
            for col in dlc.columns:
                if ('dlc' in col) and (('x' in col) or ('y' in col)):
                    temp = dlc[col].values
                    temp = temp - np.nanmean(temp)
                    temp = temp / np.nanstd(temp)
                    dlc[col] = temp

        if flip_y == True:
            for col in dlc.columns:
                if ('dlc' in col) and ('y' in col):
                    dlc[col] = -1 * dlc[col]
                    # dlc[col] = dlc[col] - np.nanmin(dlc[col])

        for col in meta_to_raw:
            dlc[col] = meta[col][0]

        temp = dlc[use_cols]
        combined = pd.concat([combined, temp])

    #Add time lags (?)
    combined.reset_index(drop=True, inplace=True)
    return combined
Example #13
0
def batch_analyze(basepath, inc, exc):
    '''
    With comments
    '''
    data = pd.DataFrame([],
                        columns=[
                            'anid', 'proto', 'cell_area_opsin', 'amb_vel',
                            'amb_meander', 'amb_bouts', 'amb_directed'
                        ])
    temp = data
    min_bout = 1
    use_dlc = False
    use_cols = ['time', 'vel', 'im', 'dir', 'ambulation', 'meander']
    for ii, ee in zip(inc, exc):
        pns = dataloc.raw_csv(basepath, ii, ee)
        for pn in pns:
            temp = {}
            raw, meta = ethovision_tools.csv_load(pn,
                                                  columns=use_cols,
                                                  method='preproc')
            temp['anid'] = meta['anid'][0]
            temp['cell_area_opsin'] = '%s_%s_%s' % (meta['cell_type'][0],
                                                    meta['stim_area'][0],
                                                    meta['opsin_type'][0])
            temp['proto'] = meta['protocol'][0]
            stim_dur = round(np.mean(meta['stim_dur']))
            vel_clip = behavior.stim_clip_grab(raw,
                                               meta,
                                               y_col='vel',
                                               stim_dur=stim_dur)
            clip_ave = behavior.stim_clip_average(vel_clip)

            #### Calculate stim-triggered %time mobile:
            percentage = lambda x: (np.nansum(x) / len(x)) * 100
            raw['m'] = ~raw['im']
            m_clip = behavior.stim_clip_grab(raw,
                                             meta,
                                             y_col='m',
                                             stim_dur=stim_dur,
                                             summarization_fun=percentage)

            #### Calculate ambulation bout properties:
            raw['run'] = (raw['ambulation'] == True) & (raw['vel'] > 5)
            # raw['flight']=(raw['vel'] > (4* np.mean(raw['vel']))) #Flight, Yilmaz & Meister 2013
            raw['flight'] = (raw['vel'] > (3 * np.mean(raw['vel'])))
            if any(raw['run']):
                amb_bouts = behavior.bout_analyze(raw,
                                                  meta,
                                                  'flight',
                                                  stim_dur=stim_dur,
                                                  min_bout_dur_s=min_bout,
                                                  use_dlc=use_dlc)
                temp['amb_meander'] = np.nanmean(amb_bouts['meander'], axis=0)
                temp['amb_directed'] = np.nanmean(amb_bouts['directed'],
                                                  axis=0)
                temp['amb_bouts'] = np.nanmean(amb_bouts['rate'], axis=0)
            else:
                temp['amb_meander'] = [np.nan, np.nan, np.nan]
                temp['amb_directed'] = [np.nan, np.nan, np.nan]
                temp['amb_bouts'] = [0, 0, 0]
            #### Calculate immobile bout properties:
            im_bouts = behavior.bout_analyze(raw,
                                             meta,
                                             'im',
                                             stim_dur=stim_dur,
                                             min_bout_dur_s=min_bout,
                                             use_dlc=use_dlc)

            data = data.append(temp, ignore_index=True)