Example #1
0
    def data_generate(self):

        # input
        path_x = self.data_x.toPlainText()

        # blur
        fwhm_kernel = []
        if self.fwhm4.isChecked():
            fwhm_kernel.append(4)
        if self.fwhm5.isChecked():
            fwhm_kernel.append(5)
        if self.fwhm6.isChecked():
            fwhm_kernel.append(6)
        if self.fwhm7.isChecked():
            fwhm_kernel.append(7)
        if self.fwhm8.isChecked():
            fwhm_kernel.append(8)

        # adjacent slice
        slice_x = 1
        if self.slice3.isChecked():
            slice_x = 3
        gbl_set_value("slice_x", slice_x)

        # generate
        list_X = glob.glob(path_x+'/*.nii')
        X, Y = blurring_data_generator(list_X, fwhm_kernel)


        QtWidgets.QMessageBox.information(self.generate, "test", list_X[0])
def main():
    parser = argparse.ArgumentParser(
        description=
        '''This is a beta script for Partial Volume Correction in PET/MRI system. ''',
        epilog="""All's well that ends well.""")
    # parser.add_argument('--dir_pet', metavar='', type=str, default="breast1_pet",
    #                     help='Name of PET subject.(breast1_pet)<str>')
    # parser.add_argument('--dir_mri', metavar='', type=str, default="breast1_water",
    #                     help='Name of MRI subject.(breast1_water)<str>')
    parser.add_argument('--dir_folder',
                        metavar='',
                        type=str,
                        default="crohns",
                        help='Name of dataset.(crohns)<str>')
    parser.add_argument(
        '--blur_method',
        metavar='',
        type=str,
        default="nib_smooth",
        help=
        'The blurring method of syn PET(nib_smooth)<str> [kernel_conv/skimage_gaus/nib_smooth]'
    )
    parser.add_argument('--blur_para',
                        metavar='',
                        type=str,
                        default="4",
                        help='Parameters of blurring data(4)<str>')
    parser.add_argument('--slice_x',
                        metavar='',
                        type=int,
                        default="1",
                        help='Slices of input(1)<int>[1/3]')
    parser.add_argument(
        '--enhance_blur',
        metavar='',
        type=bool,
        default=False,
        help='Whether stack different blurring methods to train the model')
    parser.add_argument('--id',
                        metavar='',
                        type=str,
                        default="eeVee",
                        help='ID of the current model.(eeVee)<str>')

    parser.add_argument('--epoch',
                        metavar='',
                        type=int,
                        default=500,
                        help='Number of epoches of training(2000)<int>')
    parser.add_argument('--n_filter',
                        metavar='',
                        type=int,
                        default=64,
                        help='The initial filter number(64)<int>')
    parser.add_argument('--depth',
                        metavar='',
                        type=int,
                        default=4,
                        help='The depth of U-Net(4)<int>')
    parser.add_argument('--batch_size',
                        metavar='',
                        type=int,
                        default=10,
                        help='The batch_size of training(10)<int>')

    parser.add_argument('--model_name',
                        metavar='',
                        type=str,
                        default='',
                        help='The name of model to be predicted. ()<str>')

    args = parser.parse_args()

    # common setting
    model_name = args.model_name
    enhance_blur = args.enhance_blur
    gbl_set_value("depth", args.depth)
    gbl_set_value("n_epoch", args.epoch + 1)
    gbl_set_value("n_filter", args.n_filter)
    gbl_set_value("depth", args.depth)
    gbl_set_value("batch_size", args.batch_size)
    gbl_set_value("slice_x", args.slice_x)

    # file-specific
    dir_folder = './data/dataset/' + args.dir_folder + '/'
    list_pet = glob.glob(dir_folder + 'pet/*.nii.gz')
    list_mri = glob.glob(dir_folder + 'mri/*.nii.gz')

    n_files = len(list_pet)
    for idx in range(n_files):
        filename_start = list_pet[idx].rfind('/')
        filename_end = list_pet[idx].find('_')
        filename = list_pet[idx][filename_start + 1:filename_end]
        # print(filename)
        # print(list_pet[idx])

        time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M")
        model_id = filename + time_stamp

        dir_pet = list_pet[idx]
        dir_mri = dir_folder + 'mri/' + filename + '_water.nii.gz'
        print(dir_pet)
        print(dir_mri)
        dir_syn = './folder_results/' + args.dir_folder + '/synthesized/'
        if not os.path.exists(dir_syn):
            os.makedirs(dir_syn)
        dir_model = './folder_results/' + args.dir_folder + '/models/'
        if not os.path.exists(dir_model):
            os.makedirs(dir_model)

        gbl_set_value("dir_mri", dir_mri)
        gbl_set_value("dir_pet", dir_pet)
        gbl_set_value('dir_syn', dir_syn)
        gbl_set_value('dir_model', dir_model)
        gbl_set_value("model_id", model_id)

        # Load data
        file_pet = nib.load(dir_pet)
        file_mri = nib.load(dir_mri)

        data_pet = file_pet.get_fdata()
        data_mri = file_mri.get_fdata()

        gbl_set_value("img_shape", data_pet.shape)

        print("Loading Completed!")

        if model_name == '':
            if not enhance_blur:
                X, Y = data_generator(data_mri, args.blur_method,
                                      args.blur_para)
            else:
                X, Y = enhance_data_generator(data_mri)
                print(X.shape)

            print("Blurring Completed!")
            model = train_a_unet(X, Y)
            print("Training Completed!")

            predict(model, data_pet)
            print("Predicting Completed!")

            send_emails(model_id)
            print("Notification completed!")

        else:
            gbl_set_value("model_id", model_name[5:])
            model = load_existing_model(model_name)

            predict(model, data_pet)
            print("Predicting Completed!")
Example #3
0
def main():
    parser = argparse.ArgumentParser(
        description=
        '''This is a beta script for Partial Volume Correction in PET/MRI system. ''',
        epilog="""All's well that ends well.""")
    parser.add_argument('--X',
                        metavar='',
                        type=str,
                        default="X_mnist",
                        help='X file name.(X_mnist)<str>')
    parser.add_argument('--Y',
                        metavar='',
                        type=str,
                        default="Y_mnist",
                        help='Y file name.(Y_mnist)<str>')
    parser.add_argument('--id',
                        metavar='',
                        type=str,
                        default="eeVee",
                        help='ID of the current model.(eeVee)<str>')
    parser.add_argument('--epoch',
                        metavar='',
                        type=int,
                        default=500,
                        help='Number of epoches of training(2000)<int>')
    parser.add_argument('--n_filter',
                        metavar='',
                        type=int,
                        default=64,
                        help='The initial filter number(64)<int>')
    parser.add_argument('--depth',
                        metavar='',
                        type=int,
                        default=4,
                        help='The depth of U-Net(4)<int>')
    parser.add_argument('--batch_size',
                        metavar='',
                        type=int,
                        default=10,
                        help='The batch_size of training(10)<int>')

    args = parser.parse_args()

    model_name = args.model_name

    dir_X = './data/' + args.dir_X + '.npy'
    dir_Y = './data/' + args.dir_Y + '.npy'

    time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M")
    model_id = args.id + time_stamp

    gbl_set_value("depth", args.depth)
    gbl_set_value("dir_X", dir_X)
    gbl_set_value("dir_Y", dir_Y)
    gbl_set_value("model_id", model_id)
    gbl_set_value("n_epoch", args.epoch + 1)
    gbl_set_value("n_filter", args.n_filter)
    gbl_set_value("depth", args.depth)
    gbl_set_value("batch_size", args.batch_size)
    gbl_set_value("slice_x", args.slice_x)
    gbl_set_value("flag_bypass", False)

    # Load data
    file_X = np.load(dir_X)
    file_Y = np.load(dir_Y)

    gbl_set_value("img_shape", file_X.shape)

    print("Loading Completed!")

    X, Y = sa_data_generator(file_X)
    print("Data Preparation Completed!")

    model = train_a_unet(X, Y)
    print("Training Completed!")

    # predict(model, X)
    # print("Predicting Completed!")

    send_emails(model_id)
    print("Notification completed!")
Example #4
0
def main():
    parser = argparse.ArgumentParser(
        description=
        '''This is a beta script for sCT generation from low filed MRI images. ''',
        epilog="""All's well that ends well.""")

    parser.add_argument('--slice_x',
                        metavar='',
                        type=int,
                        default=1,
                        help='channel of input(1)<int>[1/3]')
    parser.add_argument('--input_size',
                        metavar='',
                        type=int,
                        default=512,
                        help='The size of the input image, eg 224 and 512')
    parser.add_argument('--id',
                        metavar='',
                        type=str,
                        default="chansey",
                        help='ID of the current model.(eeVee)<str>')
    parser.add_argument('--epoch',
                        metavar='',
                        type=int,
                        default=1000,
                        help='Number of epoches of training(300)<int>')
    parser.add_argument('--n_filter',
                        metavar='',
                        type=int,
                        default=64,
                        help='The initial filter number of Unet(64)<int>')
    parser.add_argument('--depth',
                        metavar='',
                        type=int,
                        default=4,
                        help='The depth of Unet(4)<int>')
    parser.add_argument('--batch_size',
                        metavar='',
                        type=int,
                        default=4,
                        help='The batch_size of training(10)<int>')
    parser.add_argument('--tv_weight',
                        metavar='',
                        type=float,
                        default=1e-4,
                        help='record the tv_weight')
    parser.add_argument('--content_weight',
                        metavar='',
                        type=float,
                        default=2500,
                        help='record the content_weight')
    parser.add_argument('--style_weight',
                        metavar='',
                        type=float,
                        default=11000,
                        help='record the style_weight')
    parser.add_argument('--binary_weight',
                        metavar='',
                        type=float,
                        default=100,
                        help='record the style_weight')
    parser.add_argument('--group_id',
                        metavar='',
                        type=int,
                        default=1,
                        help='The id of groups(1)<int>[1/2/3/4/5/6/7/8/9/10]')
    parser.add_argument('--round_id',
                        metavar='',
                        type=str,
                        default='1st_round',
                        help='The id of rounds(1)<str>[1st_round/2nd_round]')
    parser.add_argument('--case_id',
                        metavar='',
                        type=int,
                        default=10,
                        help='The number pf cases used for training(10)<int>')
    parser.add_argument(
        '--pretrained_flag',
        metavar='',
        type=int,
        default=0,
        help='The flag of pretrained model or new model(0)<int>')
    parser.add_argument('--pretrained_path',
                        metavar='',
                        type=str,
                        default="---the pretrained path---",
                        help='The path of pretrained model<str>')
    parser.add_argument(
        '--discriminator_path',
        metavar='',
        type=str,
        default=
        '/code/data/per/gp_1/10-cases/1st_round/dis/model/loss_model_chansey-2020-07-15-23-10.hdf5',
        help='the discriminator path')

    args = parser.parse_args()

    data_path = '/code/data/per/gp_' + str(args.group_id) + '/' + str(
        args.case_id) + '-cases/data_for_gen/'

    train_num = len(os.listdir(data_path + '/train/train_y/CT/'))
    val_num = len(os.listdir(data_path + '/val/val_y/CT/'))

    dir_syn = '/code/data/per/gp_' + str(args.group_id) + '/' + str(
        args.case_id) + '-cases/' + args.round_id + '/gen/model/'
    dir_model = '/code/data/per/gp_' + str(args.group_id) + '/' + str(
        args.case_id) + '-cases/' + args.round_id + '/gen/model/'

    if not os.path.exists(dir_syn):
        os.makedirs(dir_syn)

    if not os.path.exists(dir_model):
        os.makedirs(dir_model)

    time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M")
    model_id = args.id + time_stamp
    gbl_set_value("depth", args.depth)
    gbl_set_value("dir_syn", dir_syn)
    gbl_set_value("dir_model", dir_model)
    gbl_set_value("model_id", model_id)
    gbl_set_value("n_epoch", args.epoch + 1)
    gbl_set_value("n_filter", args.n_filter)
    gbl_set_value("depth", args.depth)
    gbl_set_value("batch_size", args.batch_size)
    gbl_set_value("slice_x", args.slice_x)
    gbl_set_value("n_slice_train", train_num)
    gbl_set_value("n_slice_val", val_num)
    gbl_set_value('tv_weight', args.tv_weight)
    gbl_set_value('content_weight', args.content_weight)
    gbl_set_value('style_weight', args.style_weight)
    gbl_set_value('binary_weight', args.binary_weight)
    gbl_set_value('discriminator_path', args.discriminator_path)
    gbl_set_value('input_size', args.input_size)
    gbl_set_value("pretrained_flag", args.pretrained_flag)
    gbl_set_value("pretrained_path", args.pretrained_path)

    model = train_a_unet(data_path)
    print("Training Completed!")
Example #5
0
def main():
    parser = argparse.ArgumentParser(
        description=
        '''This is a beta script for Partial Volume Correction in PET/MRI system. ''',
        epilog="""All's well that ends well.""")
    parser.add_argument('--train_case',
                        metavar='',
                        type=int,
                        default=1,
                        help='The training dataset case(1)<int>[1,2,3,4]')
    parser.add_argument('--test_case',
                        metavar='',
                        type=int,
                        default=2,
                        help='The testing dataset case(2)<int>[1,2,3,4]')

    parser.add_argument('--slice_x',
                        metavar='',
                        type=int,
                        default="1",
                        help='Slices of input(1)<int>[1/3]')
    parser.add_argument('--id',
                        metavar='',
                        type=str,
                        default="chansey",
                        help='ID of the current model.(eeVee)<str>')

    parser.add_argument('--epoch',
                        metavar='',
                        type=int,
                        default=240,
                        help='Number of epoches of training(300)<int>')
    parser.add_argument('--n_filter',
                        metavar='',
                        type=int,
                        default=64,
                        help='The initial filter number(64)<int>')
    parser.add_argument('--depth',
                        metavar='',
                        type=int,
                        default=3,
                        help='The depth of U-Net(4)<int>')
    parser.add_argument('--batch_size',
                        metavar='',
                        type=int,
                        default=5,
                        help='The batch_size of training(10)<int>')

    args = parser.parse_args()

    train_case = args.train_case
    test_case = args.test_case

    dir_syn = './walmart/'
    dir_model = './walmart/'

    if not os.path.exists(dir_syn):
        os.makedirs(dir_syn)

    if not os.path.exists(dir_model):
        os.makedirs(dir_model)

    time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M")
    model_id = args.id + time_stamp
    gbl_set_value("depth", args.depth)
    gbl_set_value("dir_syn", dir_syn)
    gbl_set_value("dir_model", dir_model)
    gbl_set_value("model_id", model_id)
    gbl_set_value("n_epoch", args.epoch + 1)
    gbl_set_value("n_filter", args.n_filter)
    gbl_set_value("depth", args.depth)
    gbl_set_value("batch_size", args.batch_size)
    gbl_set_value("slice_x", args.slice_x)

    # Load data
    train_path = './data/MRCT/Case' + str(train_case) + '/'
    test_path = './data/MRCT/Case' + str(test_case) + '/'

    path_X = glob.glob(train_path + '*Align*.nii')[-1]
    path_Y = glob.glob(train_path + '*CT*.nii')[-1]

    file_X = nib.load(path_X)
    file_Y = nib.load(path_Y)

    data_X = file_X.get_fdata()
    data_Y = file_Y.get_fdata()

    #MaxMin-norm
    data_X_norm = MaxMinNorm(data_X)
    data_Y_norm = MaxMinNorm(data_Y)

    gbl_set_value("img_shape", data_X.shape)
    X, Y = write_XY(data_X_norm, data_Y_norm)

    print(X.shape, Y.shape)
    print("Loading Completed!")

    model = train_a_unet(X, Y)
    print("Training Completed!")

    predict_MRCT(model, test_path)
    print("Predicting Completed!")
Example #6
0
def main():
    parser = argparse.ArgumentParser(
        description=
        '''This is a beta script for Partial Volume Correction in PET/MRI system. ''',
        epilog="""All's well that ends well.""")
    parser.add_argument('--dir_pet',
                        metavar='',
                        type=str,
                        default="breast1_pet",
                        help='Name of PET subject.(breast1_pet)<str>')
    parser.add_argument('--dir_mri',
                        metavar='',
                        type=str,
                        default="breast1_water",
                        help='Name of MRI subject.(breast1_water)<str>')
    parser.add_argument(
        '--blur_method',
        metavar='',
        type=str,
        default="nib_smooth",
        help=
        'The blurring method of synthesizing PET(nib_smooth)<str> [kernel_conv/skimage_gaus/nib_smooth]'
    )
    parser.add_argument('--blur_para',
                        metavar='',
                        type=str,
                        default="4",
                        help='Parameters of blurring data(4)<str>')
    parser.add_argument('--slice_x',
                        metavar='',
                        type=int,
                        default="1",
                        help='Slices of input(1)<int>[1/3]')
    parser.add_argument(
        '--enhance_blur',
        metavar='',
        type=bool,
        default=False,
        help='Whether stack different blurring methods to train the model')
    parser.add_argument('--id',
                        metavar='',
                        type=str,
                        default="eeVee",
                        help='ID of the current model.(eeVee)<str>')

    parser.add_argument('--epoch',
                        metavar='',
                        type=int,
                        default=500,
                        help='Number of epoches of training(2000)<int>')
    parser.add_argument('--n_filter',
                        metavar='',
                        type=int,
                        default=64,
                        help='The initial filter number(64)<int>')
    parser.add_argument('--depth',
                        metavar='',
                        type=int,
                        default=4,
                        help='The depth of U-Net(4)<int>')
    parser.add_argument('--batch_size',
                        metavar='',
                        type=int,
                        default=10,
                        help='The batch_size of training(10)<int>')

    parser.add_argument('--model_name',
                        metavar='',
                        type=str,
                        default='',
                        help='The name of model to be predicted. ()<str>')
    parser.add_argument('--run_aim',
                        metavar='',
                        type=str,
                        default='train',
                        help='Why do you run this program? (train)<str>')

    args = parser.parse_args()

    model_name = args.model_name

    dir_mri = './data/' + args.dir_mri + '.nii'
    dir_pet = './data/' + args.dir_pet + '.nii'

    time_stamp = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M")
    model_id = args.id + time_stamp
    enhance_blur = args.enhance_blur
    gbl_set_value("depth", args.depth)
    gbl_set_value("dir_mri", dir_mri)
    gbl_set_value("dir_pet", dir_pet)
    gbl_set_value("model_id", model_id)
    gbl_set_value("n_epoch", args.epoch + 1)
    gbl_set_value("n_filter", args.n_filter)
    gbl_set_value("depth", args.depth)
    gbl_set_value("batch_size", args.batch_size)
    gbl_set_value("slice_x", args.slice_x)
    gbl_set_value("run_aim", args.run_aim)

    # Load data
    file_pet = nib.load(dir_pet)
    file_mri = nib.load(dir_mri)

    data_pet = file_pet.get_fdata()
    data_mri = file_mri.get_fdata()

    gbl_set_value("img_shape", data_pet.shape)

    print("Loading Completed!")

    if model_name == '':
        if not enhance_blur:
            X, Y = data_generator(data_mri, args.blur_method, args.blur_para)
        else:
            X, Y = enhance_data_generator(data_mri)
            print(X.shape)

        print("Blurring Completed!")
        model = train_a_unet(X, Y)
        print("Training Completed!")

        predict(model, data_pet)
        print("Predicting Completed!")

        # send_emails(model_id)
        # print("Notification completed!")

    else:
        gbl_set_value("model_id", model_name[5:])
        model = load_existing_model(model_name)

        predict(model, data_pet)
        print("Predicting Completed!")