示例#1
0
import os
import os.path as osp
from src.tools.utils import get_logger
from tools.utils import mkdir_p
from src.tools.plot import ClipPlotSeriesWithBack
from src.tools.utils import read_file_contents_list, save_file_contents_list, get_logger
from src.tools.data_io import ScanWrapper
import numpy as np
import yaml
import pandas as pd
import matplotlib.pyplot as plt
import yaml
from tools.plot import mean_diff_plot, scatter_plot

logger = get_logger('Analyze Round FOV')

in_native_folder = '/nfs/masi/xuk9/SPORE/CAC_class/data/s14_ori_final_resample'
file_list_txt = '/nfs/masi/xuk9/SPORE/CAC_class/file_lists/result_temporal'

out_folder_axial_clip = '/nfs/masi/xuk9/SPORE/CAC_class/axial_clip_native'
mkdir_p(out_folder_axial_clip)


def axial_clip_plot_native():
    file_name_list = read_file_contents_list(file_list_txt)
    for file_name in file_name_list:
        in_img_path = os.path.join(in_native_folder, file_name)
        cliper_obj = ClipPlotSeriesWithBack(in_img_path, None, None, 10, 35,
                                            15, 1, -3000, 1000, None, None,
                                            None)
        cliper_obj.clip_plot_img_only(out_folder_axial_clip)
	trainloader = DataLoader(dataset=trainset, batch_size=args.batch_size, shuffle=True, **train_kws)
	valiloader = DataLoader(dataset=valiset, batch_size=args.batch_size, shuffle=False, **test_kws)
	testloader = DataLoader(dataset=testset, batch_size=args.batch_size, shuffle=False, **test_kws)

	model = make_model(H=args.I_size, W=args.I_size, input_num=5, target_num=10, input_channel=1, d_channel=5, d_channel_ff=10, dropout=args.dropout) \
						.to(device=args.device, dtype=args.value_dtype)

	optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.weight_decay)
	lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.9)

	loss_df = pd.DataFrame([],index=pd.Index(range(args.max_epochs), name='Epoch'), columns=['Train_loss', 'Vali_loss', 'Vali_infer_loss'])
	
	log_file = os.path.join(args.result_folder, 'log.txt')
	loss_file = os.path.join(args.result_folder, 'loss.csv')
	logger = get_logger(log_file)

	for epoch in range(args.max_epochs):
		lr = optimizer.param_groups[0]['lr']
		logger.debug('[{:s}] Epoch {:03d}, Learning rate: {}'.format(args.model, epoch+1, lr))
 
		loss_df.iloc[epoch,0] = train_epoch(model, trainloader, optimizer, args, logger)
		loss_df.iloc[epoch,1] = eval_epoch(model, valiloader, args, logger)
		loss_df.iloc[epoch,2] = infer_epoch(model, valiloader, args, logger)

		if (epoch+1) > 10:
			lr_scheduler.gamma = 0.95
		lr_scheduler.step()

		if (epoch+1) % 10 == 0:
			save_model(epoch, optimizer, model, args)
示例#3
0
from src.tools.paral_average import AverageValidRegion
from src.tools.data_io import ScanWrapper, DataFolder
import argparse
import yaml
import pandas as pd
import os.path as osp
import os
import numpy as np
from src.tools.utils import get_logger, read_file_contents_list, mkdir_p

logger = get_logger('grad_cam_analysis')

affine_img_dir = '/nfs/masi/xuk9/SPORE/CAC_class/data/affine/s2_no_nan'
file_list_txt = '/nfs/masi/xuk9/SPORE/CAC_class/file_lists/complete_list'

out_folder = '/nfs/masi/xuk9/SPORE/CAC_class/data/affine/s2_no_nan_average'
mkdir_p(out_folder)


def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--yaml-config',
                        type=str,
                        default='simg_bmi_regression_0_cam.yaml')
    args = parser.parse_args()

    file_list = read_file_contents_list(file_list_txt)
    # folder_obj = DataFolder(ori_img_dir, file_list)
    folder_obj = DataFolder(affine_img_dir, file_list)

    ave_obj = AverageValidRegion(folder_obj, 10)
import pandas as pd
import numpy as np
import argparse
import os
import os.path as osp
from src.tools.utils import get_logger, read_file_contents_list
import yaml
from sklearn.metrics import mean_squared_error
from math import sqrt
import statsmodels.api as sm
import matplotlib.pyplot as plt
from sklearn.metrics import r2_score
from tools.utils import mkdir_p

logger = get_logger('Regression analysis')

lower_bound = 12
upper_bound = 50


def mean_diff_plot(pred_list, gt_list, rmse_list, out_png):
    f, ax = plt.subplots(figsize=(8, 6))
    sm.graphics.mean_diff_plot(pred_list, gt_list, ax=ax)
    ax.set_title(
        f'RMSE: {np.mean(rmse_list):.4f}, R2: {r2_score(gt_list, pred_list):.4f}'
    )

    logger.info(f'Save png to {out_png}')
    plt.savefig(out_png, bbox_inches='tight', pad_inches=0.1)
    plt.close()
示例#5
0
import os
import os.path as osp
from src.tools.utils import get_logger
from tools.utils import mkdir_p
from src.tools.plot import ClipPlotSeriesWithBack
from src.tools.utils import read_file_contents_list, save_file_contents_list, get_logger
from src.tools.data_io import ScanWrapper
import numpy as np
import yaml
import pandas as pd
import matplotlib.pyplot as plt
import yaml
from tools.plot import mean_diff_plot, scatter_plot

logger = get_logger('Analyze missing ROI')

in_body_mask = '/nfs/masi/xuk9/SPORE/CAC_class/data/atlas/atlas_body_mask/body_seg_resampled.nii.gz'
body_mask_overlap_roi_folder = '/nfs/masi/xuk9/SPORE/CAC_class/data/atlas/valid_region/s6_body_mask_intersect'
file_list_txt = '/nfs/masi/xuk9/SPORE/CAC_class/file_lists/result_temporal'

out_analyze_missing_roi_folder = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/missing_roi'
mkdir_p(out_analyze_missing_roi_folder)

out_missing_ratio_csv = os.path.join(out_analyze_missing_roi_folder,
                                     'missing_ratio.csv')


def _get_sess_missing_ratio(file_name):

    body_mask_data = ScanWrapper(in_body_mask).get_data()
    in_mask_data = ScanWrapper(
import numpy as np
import argparse
import os
import os.path as osp
from src.tools.utils import get_logger
import yaml
from sklearn.metrics import mean_squared_error
from math import sqrt
import statsmodels.api as sm
import matplotlib.pyplot as plt
from sklearn.metrics import r2_score
from tools.utils import mkdir_p
from src.tools.clinical import ClinicalDataReaderSPORE
from tools.utils import read_file_contents_list, save_file_contents_list

logger = get_logger('Run temporary consistency test')

in_file_list_txt = '/nfs/masi/xuk9/SPORE/CAC_class/file_lists/complete_list'
in_csv_file = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/label_full_combined.csv'
in_raw_label_file_xlsx = '/nfs/masi/xuk9/SPORE/clustering/registration/20200512_corrField/male/clinical/label.xlsx'

out_height_weight_added_csv = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/label_add_H_W.csv'
valid_bmi_file_list = '/nfs/masi/xuk9/SPORE/CAC_class/file_lists/bmi_valid'

out_diff_hist_png = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/diff_hist.png'

out_long_exclude_list = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/bmi_exclude_long_sess_list.txt'
out_include_bmi_list = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/bmi_include_list.txt'
out_exclude_bmi_list = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/bmi_exclude_list.txt'
out_inconsistency_list = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/bmi_inconsistent_list.txt'
out_inconsist_subj_data = '/nfs/masi/xuk9/SPORE/CAC_class/clinical/bmi_inconsistent_subj_data.txt'