Пример #1
0
 def setup_analysis(self, path, roi_list, directory, conditions, subjects):
     
     self.directory = directory
     self.conditions = conditions
     self.subjects = subjects
     
     conn = ConnectivityLoader(path, self.subjects, self.directory, roi_list)
     conn.get_results(self.conditions)
     self.ds = conn.get_dataset()
     
     return self
Пример #2
0
def write_correlation_matrices(directory, condition):
    subjects = np.loadtxt(
        '/media/robbis/DATA/fmri/monks/attributes_struct.txt', dtype=np.str)

    roi_list = np.loadtxt(
        '/media/robbis/DATA/fmri/templates_fcmri/findlab_rois.txt',
        delimiter=',',
        dtype=np.str)

    path = '/media/robbis/DATA/fmri/monks/0_results/'
    conn = ConnectivityLoader(path, subjects, directory, roi_list)
    nan_mask = conn.get_results(['Samatha', 'Vipassana'])
    #nan_mask = conn.get_results(['Rest'])
    ds = conn.get_dataset()
    mask_ = np.float_(~np.bool_(nan_mask))
    mask_ = np.triu(mask_, k=1)
    mask_indices = np.nonzero(mask_)

    ds_ = ds[np.logical_and(ds.targets == condition, ds.sa.groups == 'E')]

    array_ = ds_.samples.mean(0)

    mask_[mask_indices] = array_
    matrix = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0))

    names_lr, colors_lr, index_, coords, networks = get_atlas_info('findlab')

    plot_connectomics(
        matrix,
        20 + 8 * np.abs(matrix.sum(axis=1))**2,
        save_path=os.path.join(path, directory),
        prename=condition + '_correlation',
        save=True,
        colormap='bwr',
        vmin=np.abs(matrix).max() * -1,
        vmax=np.abs(matrix).max(),
        node_names=names_lr,
        node_colors=colors_lr,
        node_coords=coords,
        node_order=index_,
        networks=networks,
        threshold=0.5,
        title=condition + ' Correlation',
        zscore=False,
    )

    w_aggregate = aggregate_networks(matrix, roi_list.T[-2])
    _, idx = np.unique(networks, return_index=True)

    plot_connectomics(w_aggregate,
                      5 * np.abs(w_aggregate.sum(axis=1))**2,
                      save_path=os.path.join(path, directory),
                      prename=condition + '_aggregate_correlation',
                      save=True,
                      colormap='bwr',
                      vmin=-1 * w_aggregate.max(),
                      vmax=w_aggregate.max(),
                      node_names=np.unique(networks),
                      node_colors=colors_lr[idx],
                      node_coords=coords[idx],
                      node_order=np.arange(0, len(idx)),
                      networks=np.unique(networks),
                      threshold=4,
                      zscore=False)
Пример #3
0
def write_correlation_matrices(directory, condition):
    subjects = np.loadtxt('/media/robbis/DATA/fmri/monks/attributes_struct.txt',
                      dtype=np.str)
    
    roi_list = np.loadtxt('/media/robbis/DATA/fmri/templates_fcmri/findlab_rois.txt', 
                          delimiter=',',
                          dtype=np.str)
    
    
    path = '/media/robbis/DATA/fmri/monks/0_results/'
    conn = ConnectivityLoader(path, subjects, directory, roi_list)
    nan_mask = conn.get_results(['Samatha', 'Vipassana'])
    #nan_mask = conn.get_results(['Rest'])
    ds = conn.get_dataset()
    mask_ = np.float_(~np.bool_(nan_mask))
    mask_ = np.triu(mask_, k=1)
    mask_indices = np.nonzero(mask_)
    
    ds_ = ds[np.logical_and(ds.targets == condition, ds.sa.groups == 'E')]
    
    array_ = ds_.samples.mean(0)
    
    mask_[mask_indices] = array_    
    matrix = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0))
    
    names_lr, colors_lr, index_, coords, networks = get_atlas_info('findlab')
    
    plot_connectomics(matrix,
                          20+8*np.abs(matrix.sum(axis=1))**2, 
                          save_path=os.path.join(path, directory), 
                          prename=condition+'_correlation', 
                          save=True,
                          colormap='bwr',
                          vmin=np.abs(matrix).max()*-1,
                          vmax=np.abs(matrix).max(),
                          node_names=names_lr,
                          node_colors=colors_lr,
                          node_coords=coords,
                          node_order=index_,
                          networks=networks,
                          threshold=0.5,
                          title=condition+' Correlation',
                          zscore=False,       
                          )
    
    w_aggregate = aggregate_networks(matrix, roi_list.T[-2])
    _, idx = np.unique(networks, return_index=True)
    
    plot_connectomics(w_aggregate, 
                      5*np.abs(w_aggregate.sum(axis=1))**2, 
                      save_path=os.path.join(path, directory), 
                      prename=condition+'_aggregate_correlation', 
                      save=True,
                      colormap='bwr',
                      vmin=-1*w_aggregate.max(),
                      vmax=w_aggregate.max(),
                      node_names=np.unique(networks),
                      node_colors=colors_lr[idx],
                      node_coords=coords[idx],
                      node_order=np.arange(0, len(idx)),
                      networks=np.unique(networks),
                      threshold=4,
                      zscore=False                
                      )
Пример #4
0
        print r + " ----- " + g
        err = cv(ds[ds.sa.meditation == g])
        print cv.ca.stats

##########################################
file_ = open(os.path.join("/media/robbis/DATA/fmri/monks/", "0_results", "results_decoding_new.txt"), "w")
line_ = ""
results_dir = ["20140513_163451_connectivity_fmri"]
# results_dir = ['20151030_141350_connectivity_filtered_first_no_gsr_findlab_fmri']

for r in results_dir:
    print "··········· " + r + " ·············"
    conn = ConnectivityLoader(path, subjects, r, roi_list)
    nan_mask = conn.get_results(["Samatha", "Vipassana"])
    # nan_mask = conn.get_results(['Rest'])
    ds = conn.get_dataset()
    """
    fx = mean_group_sample(['subjects', 'meditation'])
    ds = ds.get_mapped(fx)  
    """
    clf = LinearCSVMC(C=1)
    # clf = RbfCSVMC()
    ds.targets = ds.sa.groups

    # ds.samples = decomposition.KernelPCA(kernel="poly", n_components=30).fit_transform(ds.samples)

    fsel = SensitivityBasedFeatureSelection(OneWayAnova(), FractionTailSelector(0.01, mode="select", tail="upper"))

    fclf = FeatureSelectionClassifier(clf, fsel)

    ds.samples = sscore(ds.samples, axis=1)
Пример #5
0
style_ = 'Samatha'

cv_repetitions = 250
cv_fraction = 0.5

num_exp_subjects = subjects[subjects.T[1] == group_].shape[0]
cv = ShuffleSplit(num_exp_subjects,
                  n_iter=cv_repetitions,
                  test_size=cv_fraction)
algorithm = SVR(kernel='linear', C=1)

# Load data
conn = ConnectivityLoader(path, subjects, r, roi_list)
conn.get_results(conditions)
ds = conn.get_dataset()
ds = ds[np.logical_and(ds.sa.meditation == style_, ds.sa.groups == group_)]

# Select data
X = ds.samples
y = np.float_(ds.sa.expertise) * 0.01

# preprocess
X_ = zscore(X, axis=1)  # Sample-wise
y_ = zscore(y)

c = Correlation(X_)
corr = c.transform(X_, y_)[0]

arg_ = np.argsort(np.abs(corr))[::-1]
arg_ = arg_[:80]  # Focusing on first 500 features