Esempio n. 1
0
    def sig_bands(self, stats='ks', weeks=['C01', 'C24']):
        self.ks_stats = nestdict()
        self.week_distr = nestdict()

        for pt in self.pts:
            for ff in self.bands:
                print('Computing ' + ' ' + ff)
                _, self.ks_stats[pt][ff], self.week_distr[pt][
                    ff] = self.feat_frame.scatter_state(weeks=weeks,
                                                        pt=pt,
                                                        feat=ff,
                                                        circ=self.circ,
                                                        plot=False,
                                                        plot_type='scatter',
                                                        stat=stats)

        self.K_weeks = weeks
        self.K_stat_type = stats
        self.K_stats = np.array(
            [[[ks_stats[pt][band][side][0] for side in ['Left', 'Right']]
              for band in bands] for pt in pts]).reshape(6, -1, order='F')
        self.P_val = np.array(
            [[[ks_stats[pt][band][side][1] for side in ['Left', 'Right']]
              for band in bands] for pt in pts]).reshape(6, -1, order='F')

        #Do the change values here too
        self.pre_feat_vals = np.array([[[
            self.week_distr[pt][band][side][self.K_weeks[0]]
            for side in ['Left', 'Right']
        ] for band in self.bands] for pt in self.pts]).reshape(6,
                                                               -1,
                                                               order='F')
        self.post_feat_vals = np.array([[[
            self.week_distr[pt][band][side][self.K_weeks[1]]
            for side in ['Left', 'Right']
        ] for band in self.bands] for pt in self.pts]).reshape(6,
                                                               -1,
                                                               order='F')
Esempio n. 2
0
    def find_pt_extremes(self):
        hdrs_info = nestdict()
        week_labels = ClinFrame.week_labels()

        for pt in self.pts:
            pt_hdrs_traj = [
                a for a in ClinFrame.DSS_dict['DBS' + pt]['HDRS17raw']
            ][8:]

            hdrs_info[pt]['max']['index'] = np.argmax(pt_hdrs_traj)
            hdrs_info[pt]['min']['index'] = np.argmin(pt_hdrs_traj)
            hdrs_info[pt]['max']['week'] = week_labels[np.argmax(pt_hdrs_traj)
                                                       + 8]
            hdrs_info[pt]['min']['week'] = week_labels[np.argmin(pt_hdrs_traj)
                                                       + 8]

            hdrs_info[pt]['max']['HDRSr'] = pt_hdrs_traj[hdrs_info[pt]['max']
                                                         ['index']]
            hdrs_info[pt]['min']['HDRSr'] = pt_hdrs_traj[hdrs_info[pt]['min']
                                                         ['index']]
            hdrs_info[pt]['traj']['HDRSr'] = pt_hdrs_traj
Esempio n. 3
0
#import sys
#sys.path.append('/home/virati/Dropbox/projects/Research/MDD-DBS/Ephys/DBSpace/')
import DBSpace as dbo
from DBSpace import nestdict
from DBSpace.control.stream_dEEG import streamEEG

import itertools
from sklearn.metrics import confusion_matrix

import matplotlib.pyplot as plt
import numpy as np

import pickle

perf_dict = nestdict()
import seaborn as sns

pts = ['906']
condits = ['Volt']
class_type = 'l1'
#%%
pt_test = [None] * len(pts)
pt_test_labels = [None] * len(pts)

for pp, pt in enumerate(pts):
    pt_test[pp] = []
    pt_test_labels[pp] = []

    for condit in condits:
        print('Doing ' + pt + ' ' + condit)
Esempio n. 4
0
None does not do this
All does a linear detrend across all concatenated observations. This is dumb and should not be done. Will eliminate this since it makes no sense
'''

# Initial
# Now we set up our DBSpace environment
#ClinFrame = ClinVect.CFrame(norm_scales=True)
ClinFrame = ClinVect.CStruct()
#BRFrame = BRDF.BR_Data_Tree(preFrame='Chronic_Frame.pickle')
BRFrame = pickle.load(
    open(
        '/home/virati/Dropbox/projects/Research/MDD-DBS/Data/Chronic_FrameMay2020.pickle',
        "rb"))
do_shuffled_null = False
#%%
pt_coeff = nestdict()
for do_pt in do_pts:
    main_readout = decoder.var_decoder(BRFrame=BRFrame,
                                       ClinFrame=ClinFrame,
                                       pts=do_pt,
                                       clin_measure=test_scale,
                                       shuffle_null=False,
                                       FeatureSet='main')
    main_readout.filter_recs(rec_class='main_study')
    main_readout.split_train_set(0.6)

    #null_slopes,null_r2 = main_readout.model_analysis(do_null=True,n_iter=100)
    main_slope, main_r2 = main_readout.model_analysis()

    print(do_pt + ' Slope: ' + str(main_slope))
    print('p<' + str(np.sum(null_slopes > main_slope[0]) / 100))
Esempio n. 5
0
                    #filter the two
                    plt.plot(tvec[sel_tvec], Lchann)
                    plt.plot(tvec[sel_tvec], Rchann)

                    plt.subplot(313)
                    #plt.scatter(Lchann,Rchann,c=tvec[sel_tvec],marker='.',cmap=cm,alpha=0.1)
                    plt.xlim((-5, 5))
                    plt.ylim((-5, 5))
                    plt.title('Phase Portrait')

                    chirp['Raw'] = [Lchann, Rchann]
    return chirp


#%%
SGs = nestdict()
for mm, modal in enumerate(['LFP']):
    for pp, pt in enumerate(['905']):
        SGs[modal][pt] = defaultdict(dict)
        for cc, condit in enumerate(['OnTarget', 'OffTarget']):
            Data = []
            Data = ts.import_BR(Ephys[pt][condit]['Filename'], snip=(0, 0))
            #Data = dbo.load_BR_dict(Ephys[modal][pt][condit]['Filename'],sec_end=0)
            #Compute the TF representation of the above imported data
            F, T, SG, BANDS = Data.compute_tf()
            SG_Dict = dbo.gen_SG(Data.extract_dict(), overlap=False)
            #Fvect = dbo.calc_feats()
            #for iv, interval in enumerate():

            [datatv, dataraw] = Data.raw_ts()
Esempio n. 6
0
# 3d plotting fun
from mayavi import mlab

import numpy as np
import scipy.ndimage as ndimage
import json

#%%

with open("../../assets/experiments/metadata/Targeting_Conditions.json", "r") as file:
    ephys_meta = json.load(file)


#%%
Ephys = nestdict()
Phase = "TurnOn"
if Phase == "TurnOn":
    Ephys["901"]["OnTarget"][
        "Filename"
    ] = "/home/virati/MDD_Data/BR/901/Session_2014_05_16_Friday/DBS901_2014_05_16_17_10_31__MR_0.txt"
    Ephys["901"]["OffTarget"][
        "Filename"
    ] = "/home/virati/MDD_Data/BR/901/Session_2014_05_16_Friday/DBS901_2014_05_16_16_25_07__MR_0.txt"
    Ephys["901"]["OnTarget"]["segments"]["Bilat"] = (600, 630)
    Ephys["901"]["OnTarget"]["segments"]["PreBilat"] = (500, 530)
    Ephys["901"]["OffTarget"]["segments"]["Bilat"] = (600, 630)
    Ephys["901"]["OffTarget"]["segments"]["PreBilat"] = (480, 510)

    Ephys["901"]["OffTarget"]["segments"]["C1"] = (
        Ephys["901"]["OffTarget"]["segments"]["Bilat"][0],
Esempio n. 7
0
import DBSpace as dbo
import matplotlib.pyplot as plt
import numpy as np
import scipy.ndimage as ndimage
import scipy.signal as sig
from DBSpace import nestdict
from matplotlib import cm

#%%
# Bring in experiment meta file

with open("DO_landmarks.json", "r") as file:
    Ephys = json.load(file)

SGs = nestdict()
pt_list = ["907"]
for pp, pt in enumerate(pt_list):
    for cc, condit in enumerate(["OnTarget", "OffTarget"]):
        Data_In = dbo.load_BR_dict(Ephys[pt][condit]["Filename"], sec_offset=0)

        SGs[pt][condit] = dbo.gen_SG(Data_In)
#%%
# Below is obviously broken for non-906 since the segment 'C's aren't defined
for pp, pt in enumerate(pt_list):
    plt.figure()
    for cc, condit in enumerate(["OffTarget"]):
        do_segs = ["C1", "C2", "C3", "C4"]
        for seg in do_segs:
            # find indices for times
            start_idx = min(
Esempio n. 8
0
all_pts = ['901', '903', '905', '906', '907', '908']
all_condits = ['OnT', 'OffT']
all_sides = ['L', 'R', 'L+R']
voltage = '3'

DO_all = itertools.product(all_pts, all_condits, all_sides)
DO_positive = [
    ('901', 'OnT', 'L'), ('901', 'OnT', 'L+R'), ('901', 'OffT', 'L'),
    ('901', 'OffT', 'L+R'), ('903', 'OffT', 'L'), ('903', 'OffT', 'L+R'),
    ('905', 'OnT', 'R'), ('905', 'OnT', 'L+R'), ('905', 'OffT', 'R'),
    ('905', 'OffT', 'L+R'), ('906', 'OffT', 'R'), ('906', 'OffT', 'L+R')
]  #This reflects the STIM conditions that evoked DOs

DO_negative = [x for x in DO_all if x not in DO_positive]

dti_file = nestdict()
data = nestdict()
tractos = nestdict()

data_arr = np.zeros((6, 2, 2, 182, 218, 182))
combined = nestdict()

#I think I'm trying to incorporate 3d brain model?
#fsaverage = datasets.fetch_surf_fsaverage5()

for pp, pt in enumerate(all_pts):
    for cc, condit in enumerate(['OnT', 'OffT']):
        for ss, side in enumerate(['L', 'R']):
            cntct = Etrode_map[condit][pt][ss] + 1
            dti_file[pp][condit][
                side] = '/home/virati/Dropbox/projects/Research/MDD-DBS/Data/Anatomy/DTI/MDT_DBS_2_7V_Tractography/DBS' + str(
Esempio n. 9
0
do_calc = 'mean'

do_side = 'Left'

#%%

def normalize(x):
    return x / (np.max(x)+1)

chann_label = ['Left','Right']
side_idx = {'Left':0,'Right':1}

stim_vs = {0:(10,30),2:(70,90),4:(130,140),6:(190,210),8:(260,280)} #the timeperiods being studied as the 'voltage' times
do_stimvs = [0,2,4,6,8]

exp_results = nestdict()


preproc_flows = [['Pxx','Osc'],['Pxx_corr','Osc_corr']]
do_feats = {'Standard':['Delta','Theta','Alpha','Beta','Gamma'],'Adjusted':['Delta','Theta','Alpha','Beta*','Gamma1']}

for gel,fname in v_files.items():
    _ = spot_check(fname,tlims=(0,-1),plot_sg=True)
    
    # Go to each voltage in the sweep
    for stim_v,iv in stim_vs.items():
        exp_results[stim_v] = spot_check(fname,tlims=iv,plot_sg=False)
        plt.suptitle(gel)
        
        #try some timedomain corrections
        #precorr_td = {chann:normalize(exp_results[stim_v]['TS'][:,cc]) for cc,chann in enumerate(chann_label)}
Esempio n. 10
0
    def day_vs_nite(self):

        fdnmeta = self.BRFrame.file_meta
        Circ = {'day': [], 'night': []}

        bands = dbo.feat_order
        #feats['Left'] = [((rr['FeatVect'][feat]['Left']),rr['Circadian']) for rr in fdnmeta]
        #feats['Right'] = [((rr['FeatVect'][feat]['Right']),rr['Circadian']) for rr in fdnmeta]

        sleeps = ['day', 'night']

        for light in sleeps:
            Circ[light] = [[[(rr['FeatVect'][feat]['Left'],
                              rr['FeatVect'][feat]['Right'])
                             for feat in dbo.feat_order] for rr in fdnmeta
                            if rr['Patient'] == pt and rr['Circadian'] == light
                            ] for pt in self.do_pts]
        #night_recs = [[[(rr['FeatVect'][feat]['Left'],rr['FeatVect'][feat]['Right']) for feat in dbo.feat_order] for rr in fdnmeta if rr['Patient'] == pt and rr['Circadian'] == 'night'] for pt in self.do_pts]

        #feats['Left'] = [[((rr['FeatVect'][feat]['Left']),rr['Circadian']) for rr in fdnmeta if rr['Patient'] == pt] for pt in dbo.all_pts]
        #feats['Right'] = [[((rr['FeatVect'][feat]['Right']),rr['Circadian']) for rr in fdnmeta if rr['Patient'] == pt] for pt in dbo.all_pts]

        #Get a days only list
        #pdb.set_trace()
        pt_day_nite = nestdict()
        for pp, pt in enumerate(self.do_pts):
            day_matr = np.array(Circ['day'][pp]).reshape(-1, 10, order='F')
            night_matr = np.array(Circ['night'][pp]).reshape(-1, 10, order='F')
            for feat in range(10):
                outstat = stats.ranksums(day_matr[:, feat],
                                         night_matr[:, feat])[1]
                outvar = (np.var(day_matr[:, feat]), np.var(night_matr[:,
                                                                       feat]))

                pt_day_nite[pt][feat]['Pval'] = outstat
                pt_day_nite[pt][feat]['Var'] = outvar

        print(pt_day_nite)

        #main_stats = np.array([[[pt_day_nite[pt][feat][side][0] for side in ['Left','Right']] for feat in range(10)] for pt in self.do_pts]).reshape(6,-1,order='F')
        P_val = np.array(
            [[pt_day_nite[pt][feat]['Pval'] for feat in range(10)]
             for pt in self.do_pts])
        dn_var = np.array(
            [[pt_day_nite[pt][feat]['Var'] for feat in range(10)]
             for pt in self.do_pts])

        P_val = P_val.reshape(6, -1, order='F')
        #var = P_val.reshape(6,-1,order='F')

        #plt.subplot(2,1,1)
        #plt.pcolormesh(main_stats);plt.colorbar()
        #plt.xticks(np.arange(10)+0.5,bands + bands,rotation=90)
        #plt.yticks(np.arange(6)+0.5,pts)
        #plt.subplot(2,1,2)
        #plt.pcolormesh((P_val < (0.05)).astype(np.int));plt.colorbar()
        #P_val[P_val > (0.05/10)] = 1

        plt.figure()
        plt.pcolormesh((P_val < (0.05 / 10)).astype(np.float32),
                       cmap=plt.get_cmap('Set1_r'))
        plt.colorbar()
        plt.yticks(np.arange(6) + 0.5, self.do_pts)
        plt.xticks(np.arange(10) + 0.5, bands + bands, rotation=90)
        plt.title('P-value of Day-Nite Difference')

        plt.figure()
        plt.subplot(211)
        plt.pcolormesh(dn_var[:, :, 0])
        plt.colorbar()

        plt.yticks(np.arange(6) + 0.5, self.do_pts)
        plt.xticks(np.arange(10) + 0.5, bands + bands, rotation=90)
        plt.title('Day')

        plt.subplot(212)
        plt.pcolormesh(dn_var[:, :, 1])
        plt.colorbar()
        plt.yticks(np.arange(6) + 0.5, self.do_pts)
        plt.xticks(np.arange(10) + 0.5, bands + bands, rotation=90)
        plt.title('Night')
Esempio n. 11
0
from DBSpace.readout import ClinVect

import seaborn as sns
#sns.set_context("paper")

sns.set(font_scale=4)
sns.set_style("white")
#Bring in our data first

import numpy as np
import pickle
from matplotlib.patches import Rectangle, Circle
from numpy import ndenumerate

## Parameters for the analysis
ks_stats = nestdict()
pts = ['901','903','905','906','907','908']
bands = ['Delta','Theta','Alpha','Beta*','Gamma1']
all_feats = ['L-' + band for band in bands] + ['R-' + band for band in bands]


# for each patient, let's find the highest and lowest HDRS17 value and the week we find it
ClinFrame = ClinVect.CFrame(norm_scales=True)
hdrs_info = ClinFrame.min_max_weeks()
#
#hdrs_info = nestdict()
#week_labels = ClinFrame.week_labels()
#
#for pt in pts:
#    pt_hdrs_traj = [a for a in ClinFrame.DSS_dict['DBS'+pt]['HDRS17raw']][8:]
#    

#%%
# Here, we're going to focus on Alpha only
pt_list = ['906', '907', '908']
condit_list = ['OnT', 'OffT']
clabel = {'OnT': 'BONT', 'OffT': 'BOFT'}
#epochs = ['Off_3',clabel[condit]]
with open('/home/virati/big_coher_matrix.pickle', 'rb') as handle:
    import_dict = pickle.load(handle)
csd_dict = import_dict['CSD']
plv_dict = import_dict['PLV']

#%% Reshape the inputs into matrices for each patient x condition
band_idx = 2
msCoh = nestdict()

coh_stack = nestdict()
for pt in pt_list:
    hist_plots = plt.figure()
    conn_plots = plt.figure()

    for cc, condit in enumerate(condit_list):
        csd_matrix = {'Off_3': [], clabel[condit]: []}
        for epoch in ['Off_3', clabel[condit]]:
            csd_matrix[epoch] = np.swapaxes(
                np.array([[
                    csd_dict[pt][condit][epoch][ii][jj] for jj in range(257)
                ] for ii in range(257)]), 2, 3)

        # Actually, let's return the baseline (Off_3) median subtracted versions of all stim-conditions
Esempio n. 13
0
##PLOTS

#focus_feats =['Delta','Theta','Alpha','Beta']
#Now we're ready for the plots
#%%
#First thing is the per-patient weekly averages plotted for left and right
#_ = analysis.mean_psds(weeks=["C01","C24"],patients='all')

do_weeks = ["C01","C24"]

#%%
# Do comparison of two timepoints here
#analysis.scatter_state(week='all',pt=['908'],feat='SHarm')
#analysis.scatter_state(week='all',pt=['908'],feat='Stim')
ks_stats = nestdict()
pts = ['901','903','905','906','907','908']
bands = ['Delta','Theta','Alpha','Beta*','Gamma1']
all_feats = ['L-' + band for band in bands] + ['R-' + band for band in bands]

circ = 'day'
week_distr = nestdict()
for pt in pts:
    for ff in bands:
        print('Computing ' + ' ' + ff)
        _,ks_stats[pt][ff],week_distr[pt][ff] =analysis.scatter_state(weeks=do_weeks,pt=pt,feat=ff,circ=circ,plot=False,plot_type='scatter',stat='ks')
        #plt.title('Plotting feature ' + ff)
    #analysis.scatter_state(week=['C01','C23'],pt='all',feat='Alpha',circ='night',plot_type='boxplot')

# Make our big stats 2d grid for all features across all patients
K_stats = np.array([[[ks_stats[pt][band][side][0] for side in ['Left','Right']] for band in bands] for pt in pts]).reshape(6,-1,order='F')
import matplotlib.pyplot as plt
import pickle
import numpy as np
import cmocean

from sklearn.decomposition import PCA, SparsePCA
import string

import umap
import pdb

pts = ['906', '907']
on_label = {'OnT': 'BONT', 'OffT': 'BOFT'}

csd_dict = nestdict()
plv_dict = nestdict()

for pt in pts:
    with open(
            '/home/virati/Dropbox/projects/Research/MDD-DBS/Data/DBS' + pt +
            '_coh_dict.pickle', 'rb') as handle:
        import_dict = pickle.load(handle)

        csd_dict[pt] = import_dict['CSD'][pt]
        plv_dict[pt] = import_dict['PLV'][pt]

#%%
for pt in pts:
    for condit in ['OnT', 'OffT']:
        for epoch in ['Off_3', on_label[condit]]:
Esempio n. 15
0
plt.vlines(np.median(all_nulls),
           0,
           line_height + 20,
           color='green',
           label='Null',
           linewidth=5)
algo_std = hdrs_sem = np.sqrt(np.var(all_nulls))
plt.hlines(line_height + 20,
           np.median(all_nulls) - algo_std,
           np.median(all_nulls) + algo_std,
           color='green',
           linewidth=5)

#%%
#pairwise hypothe testing
ks_results = nestdict()
for aa, algo in enumerate(algo_list[0:end_algo]):
    for bb, algo2 in enumerate(algo_list[0:end_algo]):
        ks_results[algo][algo2] = stats.ks_2samp(algo_aucs[:, aa],
                                                 algo_aucs[:, bb])

#%%
#plt.hist(algo_aucs[:,0],stacked=False,color='red',bins=bins,label='HDRS')
#plt.hist(algo_aucs[:,1],stacked=False,color='blue',bins=bins,label='CB')
#plt.hist(algo_aucs[:,2],stacked=False,color='green',bins=bins,label='Uniform')
#
#
#plt.vlines(np.median(algo_aucs[:,0]),0,line_height,color='red',label='HDRS',linewidth=5)
#plt.vlines(np.median(algo_aucs[:,1]),0,line_height,color='blue',label='CB',linewidth=5)
#plt.vlines(np.median(algo_aucs[:,2]),0,line_height,color='green',label='CB',linewidth=5)
#