예제 #1
0
def prepare_feature_for_regression(setting,
                                   exp_dict=None,
                                   feature_set=None,
                                   cn_list=None):
    feature_array = None
    tre_array = None
    if exp_dict['data'] == 'DIR-Lab_COPD':
        setting_exp = su.initialize_setting(exp_dict['experiment'],
                                            data=exp_dict['data'])
        if cn_list is None:
            cn_list = setting_exp['cn_range']
        setting_exp['neighborhood_radius'] = int(
            exp_dict['neighborhood_radius'])
        landmarks, landmarks_merged, _ = read_landmarks_pkl(
            setting_exp, cn_list=cn_list, exp_list=[exp_dict['experiment']])
        feature_list = su.get_feature_set(feature_set)
        for i_feature, feature in enumerate(feature_list):
            if i_feature == 0:
                feature_array = landmarks_merged[
                    exp_dict['experiment']][feature]
                tre_array = landmarks_merged[
                    exp_dict['experiment']]['TRE_nonrigid']
            else:
                feature_array = np.concatenate(
                    (feature_array,
                     landmarks_merged[exp_dict['experiment']][feature]),
                    axis=1)

    else:
        raise ValueError('data: ' + exp_dict['data'] + ' is not valid')

    exp_dict_name = exp_dict['data'] + '_' + exp_dict[
        'experiment'] + '_' + exp_dict['neighborhood_radius']
    if exp_dict['iteration'] is not None:
        exp_dict_name = exp_dict_name + '_itr'
        for itr in exp_dict['iteration']:
            exp_dict_name = exp_dict_name + '_' + str(itr)
    exp_dict_name = exp_dict_name + '_' + feature_set

    return feature_array, tre_array, exp_dict_name
예제 #2
0
def do_elastix_registration():
    cn_range = np.arange(1, 11)
    where_to_run = 'sharkCluster'  # 'local' , 'sharkCluster' , 'shark'
    database = 'DIR-Lab_COPD'
    current_experiment = 'elastix1'
    if not su.load_setting(current_experiment, data=database, where_to_run=where_to_run):
        registration_method = 'elastix'
        setting = su.initialize_setting(current_experiment, data=database, where_to_run=where_to_run, registration_method=registration_method)
        setting['affine_experiment'] = 'elastix1'   # you can choose to use the affine result of the current experiment or from another experiment.
        # This is useful when you want to tune the nonrigid registration. if it is None, then the affine folder in the current experiment is used
        setting['affine_experiment_step'] = 0
        setting['BSplineGridExperiment'] = 'elastix1'
        setting['AffineParameter'] = ['par0049.affine.txt']
        setting['MaskName_Affine'] = ['Torso']
        setting['MaskName_BSpline'] = 'Lung_Atlas'
        setting['MaskName_BSpline_Final'] = 'Lung_Atlas'
        setting['BSplineGridParameter'] = 'par0049.bspline.grid.txt'
        setting['BSplineParameter'] = 'par0049_stdT-advanced.txt'
        setting['BSplineParameter_final'] = 'par0049_stdTL-advanced.txt'
        setting['numberOfThreads'] = 7
        su.write_setting(setting)
    else:
        setting = su.load_setting(current_experiment, data=database, where_to_run=where_to_run)

    setting['cluster_phase'] = 1  # 0: affine, 1: initial perturb + BSpline, 2:final perturb + BSpline
    setting = check_input_arguments(setting)  # if script have some arguments, it goes to 'sharkCluster' mode. Now you can modify the code after submitting the jobs.

    if setting['whereToRun'] == 'local' or setting['whereToRun'] == 'shark':
        backup_script_address = backup_script(setting, os.path.realpath(__file__))
        for cn in range(1, 11):
            # reg_elx.affine(setting, cn=cn)
            # reg_elx.affine_transform(setting, cn=cn)
            hi = 1
            for out in range(0, 1):
                # reg_elx.perturbation(setting, cn=cn, out=out)
                # reg_elx.bspline(setting, cn=cn, out=out)
                # reg_elx.correct_initial_transform(setting, cn=cn, out=out)
                # reg_elx.bspline_transform(setting, cn=cn, out=out, dvf=True)
                hi = 1
            for outfinal in range(1, 21):
                # reg_elx.perturbation(setting, cn=cn, outfinal=outfinal)
                # reg_elx.bspline_final(setting, cn=cn, outfinal=outfinal)
                # reg_elx.correct_initial_transform(setting, cn=cn, outfinal=outfinal)
                # reg_elx.bspline_final_transform(setting, cn=cn, outfinal=outfinal)
                hi = 1

    elif setting['whereToRun'] == 'sharkCluster':
        parser = argparse.ArgumentParser(description='Process some integers.')
        parser.add_argument('--cn', metavar='N', type=int, nargs='+',
                            help='an integer for the accumulator')
        parser.add_argument('--out', metavar='N', type=int, nargs='+',
                            help='an integer for the accumulator')
        parser.add_argument('--outfinal', metavar='N', type=int, nargs='+',
                            help='an integer for the accumulator')
        parser.add_argument('--phase', metavar='N', type=int, nargs='+',
                            help='an integer for the accumulator')
        args = parser.parse_args()
        if args.phase is not None:
            # clusterMode is in the run mode
            phase = args.phase[0]
            if args.cn is not None:
                cn = args.cn[0]
            if args.out is not None:
                out = args.out[0]
            if args.outfinal is not None:
                outfinal = args.outfinal[0]

            if phase == 0:
                logging.debug('phase={}, cn={} '.format(phase, cn))
                reg_elx.affine(setting, cn=cn)
            if phase == 1:
                logging.debug('phase={}, cn={}, out={} '.format(phase, cn, out))
                reg_elx.perturbation(setting, cn=cn, out=out)
                reg_elx.bspline(setting, cn=cn, out=out)
                reg_elx.bspline_transform(setting, cn=cn, out=out, dvf=True)

            if phase == 2:
                logging.debug('phase={}, cn={}, outfinal={} '.format(phase, cn, outfinal))
                reg_elx.perturbation(setting, cn=cn, outfinal=outfinal)
                reg_elx.bspline_final(setting, cn=cn, outfinal=outfinal)

        else:
            # clusterMode is in the preparing_jobs mode
            backup_script_address = backup_script(setting, os.path.realpath(__file__))
            phase = setting['cluster_phase']
            if phase == 0:
                for cn in cn_range:
                    if not os.path.isfile(su.address_generator(setting, 'affineTransformParameter', cn=cn)):
                        job_name = setting['current_experiment'] + '_' + 'affine_cn_' + str(cn)
                        reg_elx.write_and_submit_job(setting, job_name=job_name, phase=phase, cn=cn, script_address=backup_script_address)
            if phase == 1:
                for cn in cn_range:
                    for out in range(0, 1):
                        job_name = setting['current_experiment'] + '_' + 'nonRigid_cn_' + str(cn) + '_out_' + str(out)
                        reg_elx.write_and_submit_job(setting, job_name=job_name, phase=phase, cn=cn, out=out, script_address=backup_script_address)
            if phase == 2:
                for cn in cn_range:
                    for outfinal in range(1, 21):
                        # if not os.path.isfile(su.address_generator(setting, 'DVF_nonRigid_composed_final', IN=cn, outfinal=outfinal)):
                        job_name = setting['current_experiment'] + '_' + 'nonRigid_cn_' + str(cn) + '_outfinal_' + str(outfinal)
                        reg_elx.write_and_submit_job(setting, job_name=job_name, phase=phase, cn=cn, outfinal=outfinal, script_address=backup_script_address)
예제 #3
0
def do_ANTs_registration():
    cn_range = np.arange(1, 11)
    out_range = np.arange(0, 21)
    outfinal_range = np.arange(1, 21)
    where_to_run = 'sharkCluster'  # 'local' , 'sharkCluster' , 'shark'
    database = 'DIR-Lab_COPD'
    current_experiment = 'ANTs1'
    if not su.load_setting(
            current_experiment, data=database, where_to_run=where_to_run):
        registration_method = 'ANTs'
        setting = su.initialize_setting(
            current_experiment,
            data=database,
            where_to_run=where_to_run,
            registration_method=registration_method)
        # setting['initial_nonrigid_iterations'] = [150, 120, 100, 30]
        setting['NonRigidParameter'] = 'nonrigid.txt'
        setting['NonRigidParameter_final'] = 'nonrigid_final.txt'
        setting['ImageType_Registration'] = 'Im_Masked_Normalized'
        setting['MaskName_Affine'] = ['Torso']
        setting['useMask'] = False
        setting['MaskName_BSpline'] = 'Lung_Filled'  # Lung_Filled
        setting['MaskName_BSpline_Final'] = 'Lung_Filled'
        setting[
            'affine_experiment'] = 'elastix1'  # you can choose to use the affine result of the current experiment or from another experiment.
        # This is useful when you want to tune the nonrigid registration. if it is None, then the affine folder in the current experiment is used
        setting['affine_experiment_step'] = 0
        su.write_setting(setting)
    else:
        setting = su.load_setting(current_experiment,
                                  data=database,
                                  where_to_run=where_to_run)

    setting[
        'cluster_phase'] = 1  # 0: affine, 1: initial perturb + BSpline_SyN, 2:final perturb + BSpline_Syn
    setting['cluster_task_despendency'] = True
    setting = check_input_arguments(
        setting
    )  # if script have some arguments, it goes to 'sharkCluster' mode. Now you can modify the code after submitting the jobs.

    if setting['whereToRun'] == 'local' or setting['whereToRun'] == 'shark':
        backup_script_address = backup_script(setting,
                                              os.path.realpath(__file__))
        for cn in [2]:
            # regANTs.affine_ANTs(setting, cn=cn)
            # regANTs.affine_ANTs_transform(setting, cn=cn)
            # regANTs.affine_ANTs_transform_image(setting, cn=cn)
            hi = 1
            for out in range(0, 1):
                # regANTs.perturbation(setting, cn=cn, out=out)
                # regANTs.BSpline_SyN_ANTs(setting, cn=cn, out=out)
                # regANTs.BSpline_SyN_ANTs_transform(setting, cn=cn, out=out)
                # regANTs.BSpline_SyN_ANTs_cleanup(setting, IN=cn, out=out)
                # regANTs.BSpline_SyN_ANTs_transform_image(setting, IN=cn, out=out)
                # regANTs.convert_nii2mha(setting, cn=cn, out=out)
                hi = 1

            for outfinal in range(1, 21):
                # regANTs.perturbation(setting, cn=cn, outfinal=outfinal)
                # regANTs.bspline_syn_ants_final(setting, cn=cn, outfinal=outfinal)
                # regANTs.BSpline_SyN_ANTs_final_transform(setting, cn=cn, outfinal=outfinal)
                # regANTs.BSpline_SyN_ANTs_cleanup_final(setting, cn=cn, outfinal=outfinal)
                hi = 1

    elif setting['whereToRun'] == 'sharkCluster':
        parser = argparse.ArgumentParser(description='Process some integers.')
        parser.add_argument('--cn',
                            metavar='N',
                            type=int,
                            nargs='+',
                            help='an integer for the accumulator')
        parser.add_argument('--out',
                            metavar='N',
                            type=int,
                            nargs='+',
                            help='an integer for the accumulator')
        parser.add_argument('--outfinal',
                            metavar='N',
                            type=int,
                            nargs='+',
                            help='an integer for the accumulator')
        parser.add_argument('--phase',
                            metavar='N',
                            type=int,
                            nargs='+',
                            help='an integer for the accumulator')
        args = parser.parse_args()
        if args.phase is not None:
            # clusterMode is in the run mode
            phase = args.phase[0]
            if args.cn is not None:
                cn = args.cn[0]
            if args.out is not None:
                out = args.out[0]
            if args.outfinal is not None:
                outfinal = args.outfinal[0]

            if phase == 0:
                logging.debug('phase={}, cn={} '.format(phase, cn))
                regANTs.affine_ANTs(setting, cn=cn)
                regANTs.affine_ANTs_transform(setting, cn=cn)
                regANTs.affine_ANTs_transform_image(setting, cn=cn)
            if phase == 1:
                logging.debug('phase={}, cn={}, out={} '.format(
                    phase, cn, out))
                regANTs.perturbation(setting, cn=cn, out=out)
                regANTs.BSpline_SyN_ANTs(setting, cn=cn, out=out)
                regANTs.BSpline_SyN_ANTs_transform(setting, cn=cn, out=out)
                regANTs.BSpline_SyN_ANTs_cleanup(setting, IN=cn, out=out)
                regANTs.BSpline_SyN_ANTs_transform_image(setting,
                                                         IN=cn,
                                                         out=out)

            if phase == 2:
                logging.debug('phase={}, cn={}, outfinal={} '.format(
                    phase, cn, outfinal))
                regANTs.perturbation(setting, cn=cn, outfinal=outfinal)
                regANTs.bspline_syn_ants_final(setting,
                                               cn=cn,
                                               outfinal=outfinal)
                regANTs.BSpline_SyN_ANTs_final_transform(setting,
                                                         cn=cn,
                                                         outfinal=outfinal)
                regANTs.BSpline_SyN_ANTs_cleanup_final(setting,
                                                       cn=cn,
                                                       outfinal=outfinal)
        else:
            # clusterMode is in the preparing_jobs mode
            backup_script_address = backup_script(setting,
                                                  os.path.realpath(__file__))
            phase = setting['cluster_phase']
            if phase == 0:
                for cn in cn_range:
                    if not os.path.isfile(
                            su.address_generator(setting, 'affineDVF', cn=cn)):
                        job_name = setting[
                            'current_experiment'] + '_' + 'affine_cn_' + str(
                                cn)
                        regANTs.write_and_submit_job(
                            setting,
                            job_name=job_name,
                            phase=phase,
                            cn=cn,
                            script_address=backup_script_address)
            if phase == 1:
                for cn in range(9, 11):
                    for out in range(1, 21):
                        job_name = setting[
                            'current_experiment'] + '_' + 'nonRigid_cn_' + str(
                                cn) + '_out_' + str(out)
                        regANTs.write_and_submit_job(
                            setting,
                            job_name=job_name,
                            phase=phase,
                            cn=cn,
                            out=out,
                            script_address=backup_script_address)
            if phase == 2:
                for cn in range(9, 11):
                    for outfinal in range(1, 21):
                        if not os.path.isfile(
                                su.address_generator(
                                    setting,
                                    'DVF_nonRigid_composed_final',
                                    cn=cn,
                                    outfinal=outfinal)):
                            job_name = setting[
                                'current_experiment'] + '_' + 'nonRigid_cn_' + str(
                                    cn) + '_outfinal_' + str(outfinal)
                            regANTs.write_and_submit_job(
                                setting,
                                job_name=job_name,
                                phase=phase,
                                cn=cn,
                                outfinal=outfinal,
                                script_address=backup_script_address)
예제 #4
0
def main(current_experiment=None):
    where_to_run = 'local'  # 'local' , 'sharkCluster' , 'shark'
    database = 'DIR-Lab_COPD'
    current_experiment = 'elastix1'  # 'elastix1', 'ANTs1'
    if not su.load_setting(
            current_experiment, data=database, where_to_run=where_to_run):
        registration_method = 'ANTs'  # elastix , ANTs
        setting = su.initialize_setting(
            current_experiment,
            data=database,
            where_to_run=where_to_run,
            registration_method=registration_method)
    else:
        setting = su.load_setting(current_experiment,
                                  data=database,
                                  where_to_run=where_to_run)

    cn_list = setting['cn_range']
    feature_list = [
        'stdT', 'E_T', 'stdT_final', 'E_T_final', 'Jac', 'MIND', 'CV', 'NC',
        'MI'
    ]
    feature_pool_list = [
        'stdT', 'E_T', 'stdT_final', 'E_T_final', 'Jac', 'MIND', 'CV'
    ]

    cn_remove = []
    for cn in cn_list:
        # ------------ pre-processing -------------
        pre.dirlab.dir_lab_copd(cn)
        pre.cylinder_mask(setting, cn=cn)
        pre.chest_segmentation.ptpulmo_segmentation_dirlab(
            data='DIR-Lab_COPD', cn=cn, segment_organ='Torso')
        pre.chest_segmentation.ptpulmo_segmentation_dirlab(
            data='DIR-Lab_COPD', cn=cn, segment_organ='Lung')
        pre.chest_segmentation.lung_fill_hole_dirlab(data='DIR-Lab_COPD',
                                                     cn=cn)
        # pre.chest_segmentation.lung_fill_hole_erode(setting, cn=cn)
        # pre.cylinder_mask(setting, cn=cn, overwrite=False)
        pre.ants_preprocessing.image_normalization(setting, cn=cn)

        #  ------------ registration --------------
        # preferably do the registration by running either do_elastix_registration or do_ANTs_registration
        # do_elastix_registration

        # ------------ post-processing -------------
        reg_ants.convert_nii2mha(setting, cn=cn)

        # ---------- feature extraction -----------
        ut.std_t(setting, cn=cn, mode='initial')
        ut.std_t(setting, cn=cn, mode='final')
        ut.jac(setting, cn=cn)
        output_error = ut.compute_error(setting, cn=cn, write_error_image=True)
        if output_error == 2:
            cn_remove.append(cn)
        ut.ncmi(setting, cn=cn)
        ut.feature_pooling(setting, cn=cn, feature_list=feature_pool_list)

    cn_list = [cn for cn in cn_list if cn not in cn_remove]

    landmarks = lu.load_features(setting,
                                 feature_list=feature_list,
                                 cn_list=cn_list)
    exp_tre_list = ['elastix1-TRE_nonrigid', 'ANTs1-TRE_nonrigid']
    exp_loss = ['ANTs1']
    cn_list = setting['cn_range']
    lu.plot.boxplot_tre(setting, cn_list=cn_list, exp_tre_list=exp_tre_list)
    lu.plot.table_tre(setting, cn_list=cn_list, exp_tre_list=exp_tre_list)
    lu.plot.table_tre_cn(setting, cn_list=cn_list, exp='ANTs1-TRE_nonrigid')
    ants_loss.plot_loss(setting,
                        cn_list=np.arange(1, 11),
                        exp_list=exp_loss,
                        out=0)