def get_data(self, balanced=1, batch_size=20, tra_val_split=0.8, use_validation=True): self.use_validation = use_validation Data = DATA() Data.Fetch_OASIS(balanced=balanced) Data.Train_Test(TRAIN_TEST_SPLIT, random=RANDOM_SEED) selectors = self.ROI_nums Data.Add_MRI(selectors) Data.Split_Data() # GET TRAINING AND TEST SETS X = Data.features_train y = Data.labels_train y[y > 0] = 1 y[y <= 0] = 0 y.shape = (len(y), 1) X_test = Data.features_test y_test = Data.labels_test y_test[y_test > 0] = 1 y_test[y_test <= 0] = 0 y_test.shape = (len(y_test), 1) self.y_test = y_test self.X_test = X_test # SPLIT TRAINING INTO TRAINING/VALIDATION len_yt = y.shape[0] if use_validation: training_size = floor(tra_val_split * len_yt) else: training_size = len_yt - 1 y_tra = y[:training_size] X_tra = X[:training_size, ...] y_val = y[training_size:] X_val = X[training_size:, ...] # CREATE TENSORFLOW DATASETS self.train_ds = tf.data.Dataset.from_tensor_slices( (X_tra, y_tra)).shuffle(3000).batch(batch_size) if use_validation: self.val_ds = tf.data.Dataset.from_tensor_slices( (X_val, y_val)).batch(batch_size) else: self.val_ds = tf.data.Dataset.from_tensor_slices( (X_val, y_val)).batch(1)
from DataPrep import DATA from ATLAS import ATLAS Atlas = ATLAS() FileDir = os.getcwd()+'/SVM_Outputs/' file_object = open(FileDir+'SVM_Log.txt', "a") for i in range(3,46): ROI = Atlas.dataset_files.labels[i] FigName = "SVM_"+str(i)+"_"+ROI+".png" Data = DATA() Data.Train_Test(0.8) selectors = [i] #brain" #Hippo mask [34,35], whole brain "brain" Data.Add_MRI(selectors) Data.Split_Data() C_space = np.logspace(-4,1,50) train_score = [] test_score = [] print("Feature Size = "+str(Data.features_train.shape[1])) print("Started Training for "+FigName+"....") for C in C_space: SVM = svm.SVC(kernel='linear', C=C) cvs = cross_validate(SVM,Data.features_train,Data.labels_train, cv=4,return_train_score=True) train_score.append(np.mean(cvs["train_score"])) test_score.append(np.mean(cvs["test_score"])) # print("C="+str(C)+"Train"+str(np.mean(cvs["train_score"]))+"Valid"+str(np.mean(cvs["test_score"]))) maxC = C_space[test_score.index(max(test_score))]
def get_data(self, balanced=1, tra_val_split=0.8, use_validation=True): self.use_validation = use_validation Data = DATA() Data.Fetch_OASIS(balanced=balanced) Data.Train_Test(TRAIN_TEST_SPLIT, random=RANDOM_SEED) Data.Split_Data() # GET TRAINING AND TEST SETS features_train = Data.features_train y = Data.labels_train y[y > 0] = 1 y[y < 0] = 0 y.shape = (len(y), 1) features_test = Data.features_test y_test = Data.labels_test y_test[y_test > 0] = 1 y_test[y_test <= 0] = 0 y_test.shape = (len(y_test), 1) Data.load_images() gm_imgs_3D = Data.gm_imgs_3D wm_imgs_3D = Data.wm_imgs_3D gm_imgs_3D = gm_imgs_3D[..., np.newaxis] wm_imgs_3D = wm_imgs_3D[..., np.newaxis] idx_train = Data.idx_train idx_test = Data.idx_test gm_imgs_3D_train = gm_imgs_3D[idx_train, ...] wm_imgs_3D_train = wm_imgs_3D[idx_train, ...] gm_imgs_3D_test = gm_imgs_3D[idx_test, ...] wm_imgs_3D_test = wm_imgs_3D[idx_test, ...] # SPLIT TRAINING INTO TRAINING/VALIDATION len_yt = y.shape[0] if use_validation: train_size = floor(tra_val_split * len_yt) else: train_size = len_yt - 1 y_tra = y[:train_size] features_tra = features_train[:train_size, ...] y_val = y[train_size:] features_val = features_train[train_size:, ...] gm_imgs_3D_val = gm_imgs_3D_train[train_size:, ...] wm_imgs_3D_val = wm_imgs_3D_train[train_size:, ...] gm_imgs_3D_tra = gm_imgs_3D_train[:train_size, ...] wm_imgs_3D_tra = wm_imgs_3D_train[:train_size, ...] X_tra = [gm_imgs_3D_tra] + [wm_imgs_3D_tra] + [features_tra] X_val = [gm_imgs_3D_val] + [wm_imgs_3D_val] + [features_val] X_test = [gm_imgs_3D_test] + [wm_imgs_3D_test] + [features_test] self.y_test = y_test self.X_test = X_test self.y_tra = y_tra self.X_tra = X_tra self.y_val = y_val self.X_val = X_val
def get_data(self, balanced=1, tra_val_split=0.8, use_validation=True): self.use_validation = use_validation Data = DATA() Data.Fetch_OASIS(balanced=balanced) Data.Train_Test(TRAIN_TEST_SPLIT, random=RANDOM_SEED) Data.Split_Data() # GET TRAINING AND TEST SETS features_train = Data.features_train y = Data.labels_train y[y > 0] = 1 y[y < 0] = 0 y.shape = (len(y), 1) features_test = Data.features_test y_test = Data.labels_test y_test[y_test > 0] = 1 y_test[y_test <= 0] = 0 y_test.shape = (len(y_test), 1) Data.load_images() selectors = self.ROI_nums Data.get_3D_ROI(selectors) ROIs_3D_gm = Data.ROIs_3D_gm ROIs_3D_wm = Data.ROIs_3D_wm idx_train = Data.idx_train idx_test = Data.idx_test ROIs_3D_gm_train = [] ROIs_3D_gm_test = [] ROIs_3D_wm_train = [] ROIs_3D_wm_test = [] for k in range(self.len_ROI): ROIs_3D_gm_train.append(ROIs_3D_gm[k][idx_train, ...]) ROIs_3D_gm_test.append(ROIs_3D_gm[k][idx_test, ...]) ROIs_3D_wm_train.append(ROIs_3D_wm[k][idx_train, ...]) ROIs_3D_wm_test.append(ROIs_3D_wm[k][idx_test, ...]) # SPLIT TRAINING INTO TRAINING/VALIDATION len_yt = y.shape[0] if use_validation: train_size = floor(tra_val_split * len_yt) else: train_size = len_yt - 1 y_tra = y[:train_size] features_tra = features_train[:train_size, ...] y_val = y[train_size:] features_val = features_train[train_size:, ...] train_ROIs_3D_gm = [] val_ROIs_3D_gm = [] train_ROIs_3D_wm = [] val_ROIs_3D_wm = [] for k in range(self.len_ROI): train_ROIs_3D_gm.append(ROIs_3D_gm_train[k][:train_size, ...]) val_ROIs_3D_gm.append(ROIs_3D_gm_train[k][train_size:, ...]) train_ROIs_3D_wm.append(ROIs_3D_wm_train[k][:train_size, ...]) val_ROIs_3D_wm.append(ROIs_3D_wm_train[k][train_size:, ...]) X_tra = train_ROIs_3D_gm + train_ROIs_3D_wm + [features_tra] X_val = val_ROIs_3D_gm + val_ROIs_3D_wm + [features_val] X_test = ROIs_3D_gm_test + ROIs_3D_wm_test + [features_test] self.y_test = y_test self.X_test = X_test self.y_tra = y_tra self.X_tra = X_tra self.y_val = y_val self.X_val = X_val