def main(): parser = argparse.ArgumentParser( description= '''This is a beta script for Partial Volume Correction in PET/MRI system. ''', epilog="""All's well that ends well.""") # parser.add_argument('--dir_pet', metavar='', type=str, default="breast1_pet", # help='Name of PET subject.(breast1_pet)<str>') # parser.add_argument('--dir_mri', metavar='', type=str, default="breast1_water", # help='Name of MRI subject.(breast1_water)<str>') parser.add_argument('--dir_folder', metavar='', type=str, default="crohns", help='Name of dataset.(crohns)<str>') parser.add_argument( '--blur_method', metavar='', type=str, default="nib_smooth", help= 'The blurring method of syn PET(nib_smooth)<str> [kernel_conv/skimage_gaus/nib_smooth]' ) parser.add_argument('--blur_para', metavar='', type=str, default="4", help='Parameters of blurring data(4)<str>') parser.add_argument('--slice_x', metavar='', type=int, default="1", help='Slices of input(1)<int>[1/3]') parser.add_argument( '--enhance_blur', metavar='', type=bool, default=False, help='Whether stack different blurring methods to train the model') parser.add_argument('--id', metavar='', type=str, default="eeVee", help='ID of the current model.(eeVee)<str>') parser.add_argument('--epoch', metavar='', type=int, default=500, help='Number of epoches of training(2000)<int>') parser.add_argument('--n_filter', metavar='', type=int, default=64, help='The initial filter number(64)<int>') parser.add_argument('--depth', metavar='', type=int, default=4, help='The depth of U-Net(4)<int>') parser.add_argument('--batch_size', metavar='', type=int, default=10, help='The batch_size of training(10)<int>') parser.add_argument('--model_name', metavar='', type=str, default='', help='The name of model to be predicted. ()<str>') args = parser.parse_args() # common setting model_name = args.model_name enhance_blur = args.enhance_blur gbl_set_value("depth", args.depth) gbl_set_value("n_epoch", args.epoch + 1) gbl_set_value("n_filter", args.n_filter) gbl_set_value("depth", args.depth) gbl_set_value("batch_size", args.batch_size) gbl_set_value("slice_x", args.slice_x) # file-specific dir_folder = './data/dataset/' + args.dir_folder + '/' list_pet = glob.glob(dir_folder + 'pet/*.nii.gz') list_mri = glob.glob(dir_folder + 'mri/*.nii.gz') n_files = len(list_pet) for idx in range(n_files): filename_start = list_pet[idx].rfind('/') filename_end = list_pet[idx].find('_') filename = list_pet[idx][filename_start + 1:filename_end] # print(filename) # print(list_pet[idx]) time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M") model_id = filename + time_stamp dir_pet = list_pet[idx] dir_mri = dir_folder + 'mri/' + filename + '_water.nii.gz' print(dir_pet) print(dir_mri) dir_syn = './folder_results/' + args.dir_folder + '/synthesized/' if not os.path.exists(dir_syn): os.makedirs(dir_syn) dir_model = './folder_results/' + args.dir_folder + '/models/' if not os.path.exists(dir_model): os.makedirs(dir_model) gbl_set_value("dir_mri", dir_mri) gbl_set_value("dir_pet", dir_pet) gbl_set_value('dir_syn', dir_syn) gbl_set_value('dir_model', dir_model) gbl_set_value("model_id", model_id) # Load data file_pet = nib.load(dir_pet) file_mri = nib.load(dir_mri) data_pet = file_pet.get_fdata() data_mri = file_mri.get_fdata() gbl_set_value("img_shape", data_pet.shape) print("Loading Completed!") if model_name == '': if not enhance_blur: X, Y = data_generator(data_mri, args.blur_method, args.blur_para) else: X, Y = enhance_data_generator(data_mri) print(X.shape) print("Blurring Completed!") model = train_a_unet(X, Y) print("Training Completed!") predict(model, data_pet) print("Predicting Completed!") send_emails(model_id) print("Notification completed!") else: gbl_set_value("model_id", model_name[5:]) model = load_existing_model(model_name) predict(model, data_pet) print("Predicting Completed!")
def main(): parser = argparse.ArgumentParser( description= '''This is a beta script for Partial Volume Correction in PET/MRI system. ''', epilog="""All's well that ends well.""") parser.add_argument('--train_case', metavar='', type=int, default=1, help='The training dataset case(1)<int>[1,2,3,4]') parser.add_argument('--test_case', metavar='', type=int, default=2, help='The testing dataset case(2)<int>[1,2,3,4]') parser.add_argument('--slice_x', metavar='', type=int, default="1", help='Slices of input(1)<int>[1/3]') parser.add_argument('--id', metavar='', type=str, default="chansey", help='ID of the current model.(eeVee)<str>') parser.add_argument('--epoch', metavar='', type=int, default=240, help='Number of epoches of training(300)<int>') parser.add_argument('--n_filter', metavar='', type=int, default=64, help='The initial filter number(64)<int>') parser.add_argument('--depth', metavar='', type=int, default=3, help='The depth of U-Net(4)<int>') parser.add_argument('--batch_size', metavar='', type=int, default=5, help='The batch_size of training(10)<int>') args = parser.parse_args() train_case = args.train_case test_case = args.test_case dir_syn = './walmart/' dir_model = './walmart/' if not os.path.exists(dir_syn): os.makedirs(dir_syn) if not os.path.exists(dir_model): os.makedirs(dir_model) time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M") model_id = args.id + time_stamp gbl_set_value("depth", args.depth) gbl_set_value("dir_syn", dir_syn) gbl_set_value("dir_model", dir_model) gbl_set_value("model_id", model_id) gbl_set_value("n_epoch", args.epoch + 1) gbl_set_value("n_filter", args.n_filter) gbl_set_value("depth", args.depth) gbl_set_value("batch_size", args.batch_size) gbl_set_value("slice_x", args.slice_x) # Load data train_path = './data/MRCT/Case' + str(train_case) + '/' test_path = './data/MRCT/Case' + str(test_case) + '/' path_X = glob.glob(train_path + '*Align*.nii')[-1] path_Y = glob.glob(train_path + '*CT*.nii')[-1] file_X = nib.load(path_X) file_Y = nib.load(path_Y) data_X = file_X.get_fdata() data_Y = file_Y.get_fdata() #MaxMin-norm data_X_norm = MaxMinNorm(data_X) data_Y_norm = MaxMinNorm(data_Y) gbl_set_value("img_shape", data_X.shape) X, Y = write_XY(data_X_norm, data_Y_norm) print(X.shape, Y.shape) print("Loading Completed!") model = train_a_unet(X, Y) print("Training Completed!") predict_MRCT(model, test_path) print("Predicting Completed!")
def main(): parser = argparse.ArgumentParser( description= '''This is a beta script for Partial Volume Correction in PET/MRI system. ''', epilog="""All's well that ends well.""") parser.add_argument('--X', metavar='', type=str, default="X_mnist", help='X file name.(X_mnist)<str>') parser.add_argument('--Y', metavar='', type=str, default="Y_mnist", help='Y file name.(Y_mnist)<str>') parser.add_argument('--id', metavar='', type=str, default="eeVee", help='ID of the current model.(eeVee)<str>') parser.add_argument('--epoch', metavar='', type=int, default=500, help='Number of epoches of training(2000)<int>') parser.add_argument('--n_filter', metavar='', type=int, default=64, help='The initial filter number(64)<int>') parser.add_argument('--depth', metavar='', type=int, default=4, help='The depth of U-Net(4)<int>') parser.add_argument('--batch_size', metavar='', type=int, default=10, help='The batch_size of training(10)<int>') args = parser.parse_args() model_name = args.model_name dir_X = './data/' + args.dir_X + '.npy' dir_Y = './data/' + args.dir_Y + '.npy' time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M") model_id = args.id + time_stamp gbl_set_value("depth", args.depth) gbl_set_value("dir_X", dir_X) gbl_set_value("dir_Y", dir_Y) gbl_set_value("model_id", model_id) gbl_set_value("n_epoch", args.epoch + 1) gbl_set_value("n_filter", args.n_filter) gbl_set_value("depth", args.depth) gbl_set_value("batch_size", args.batch_size) gbl_set_value("slice_x", args.slice_x) gbl_set_value("flag_bypass", False) # Load data file_X = np.load(dir_X) file_Y = np.load(dir_Y) gbl_set_value("img_shape", file_X.shape) print("Loading Completed!") X, Y = sa_data_generator(file_X) print("Data Preparation Completed!") model = train_a_unet(X, Y) print("Training Completed!") # predict(model, X) # print("Predicting Completed!") send_emails(model_id) print("Notification completed!")
def main(): parser = argparse.ArgumentParser( description= '''This is a beta script for Partial Volume Correction in PET/MRI system. ''', epilog="""All's well that ends well.""") parser.add_argument('--dir_pet', metavar='', type=str, default="breast1_pet", help='Name of PET subject.(breast1_pet)<str>') parser.add_argument('--dir_mri', metavar='', type=str, default="breast1_water", help='Name of MRI subject.(breast1_water)<str>') parser.add_argument( '--blur_method', metavar='', type=str, default="nib_smooth", help= 'The blurring method of synthesizing PET(nib_smooth)<str> [kernel_conv/skimage_gaus/nib_smooth]' ) parser.add_argument('--blur_para', metavar='', type=str, default="4", help='Parameters of blurring data(4)<str>') parser.add_argument('--slice_x', metavar='', type=int, default="1", help='Slices of input(1)<int>[1/3]') parser.add_argument( '--enhance_blur', metavar='', type=bool, default=False, help='Whether stack different blurring methods to train the model') parser.add_argument('--id', metavar='', type=str, default="eeVee", help='ID of the current model.(eeVee)<str>') parser.add_argument('--epoch', metavar='', type=int, default=500, help='Number of epoches of training(2000)<int>') parser.add_argument('--n_filter', metavar='', type=int, default=64, help='The initial filter number(64)<int>') parser.add_argument('--depth', metavar='', type=int, default=4, help='The depth of U-Net(4)<int>') parser.add_argument('--batch_size', metavar='', type=int, default=10, help='The batch_size of training(10)<int>') parser.add_argument('--model_name', metavar='', type=str, default='', help='The name of model to be predicted. ()<str>') parser.add_argument('--run_aim', metavar='', type=str, default='train', help='Why do you run this program? (train)<str>') args = parser.parse_args() model_name = args.model_name dir_mri = './data/' + args.dir_mri + '.nii' dir_pet = './data/' + args.dir_pet + '.nii' time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M") model_id = args.id + time_stamp enhance_blur = args.enhance_blur gbl_set_value("depth", args.depth) gbl_set_value("dir_mri", dir_mri) gbl_set_value("dir_pet", dir_pet) gbl_set_value("model_id", model_id) gbl_set_value("n_epoch", args.epoch + 1) gbl_set_value("n_filter", args.n_filter) gbl_set_value("depth", args.depth) gbl_set_value("batch_size", args.batch_size) gbl_set_value("slice_x", args.slice_x) gbl_set_value("run_aim", args.run_aim) # Load data file_pet = nib.load(dir_pet) file_mri = nib.load(dir_mri) data_pet = file_pet.get_fdata() data_mri = file_mri.get_fdata() gbl_set_value("img_shape", data_pet.shape) print("Loading Completed!") if model_name == '': if not enhance_blur: X, Y = data_generator(data_mri, args.blur_method, args.blur_para) else: X, Y = enhance_data_generator(data_mri) print(X.shape) print("Blurring Completed!") model = train_a_unet(X, Y) print("Training Completed!") predict(model, data_pet) print("Predicting Completed!") # send_emails(model_id) # print("Notification completed!") else: gbl_set_value("model_id", model_name[5:]) model = load_existing_model(model_name) predict(model, data_pet) print("Predicting Completed!")