def test_build_graph(): """Test the method to build a graph from the heatmap.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'gt_folders', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Build a heatmap from the dce data # Reduce the number of bins to enforce low memory consumption nb_bins = [100] * dce_mod.n_serie_ heatmap, bins_heatmap = dce_mod.build_heatmap(gt_mod.extract_gt_data( label_gt[0]), nb_bins=nb_bins) # Build the graph by taking the inverse exponential of the heatmap graph = StandardTimeNormalization._build_graph(heatmap, .5) graph_dense = graph.toarray() data = np.load(os.path.join(currdir, 'data', 'graph.npy')) assert_array_equal(graph_dense, data)
def test_fit(): """Test the routine to fit the parameters of the dce normalization.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'full_dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'full_gt', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Create the object to make the normalization stn = StandardTimeNormalization(dce_mod) # Create a synthetic model to fit on stn.model_ = np.array([30., 30., 32., 31., 31., 30., 35., 55., 70., 80.]) stn.is_model_fitted_ = True # Fit the parameters on the model stn.fit(dce_mod, gt_mod, label_gt[0]) assert_almost_equal(stn.fit_params_['scale-int'], 1.2296657327848537, decimal=PRECISION_DECIMAL) assert_equal(stn.fit_params_['shift-time'], 0.0) data = np.array([191.29, 193.28, 195.28, 195.28, 195.28, 197.28, 213.25, 249.18, 283.12, 298.10]) assert_array_almost_equal(stn.fit_params_['shift-int'], data, decimal=PRECISION_DECIMAL)
def test_partial_fit_model_dict_wrong_type(): """Test either if an error is raised when a parameters is a wrong type in the dictionary.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'gt_folders', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Create the object to make the normalization stn = StandardTimeNormalization(dce_mod) params = {'std': 50., 'exp': 25., 'alpha': .9, 'max_iter': 5.} assert_raises(ValueError, stn.partial_fit_model, dce_mod, ground_truth=gt_mod, cat=label_gt[0], params=params) params = {'std': 50., 'exp': 25, 'alpha': .9, 'max_iter': 5} assert_raises(ValueError, stn.partial_fit_model, dce_mod, ground_truth=gt_mod, cat=label_gt[0], params=params)
def test_shift_heatmap_wrong_shift(): """Test if an error is raised when the shidt provided is not consistent.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'gt_folders', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Build a heatmap from the dce data # Reduce the number of bins to enforce low memory consumption nb_bins = [100] * dce_mod.n_serie_ heatmap, bins_heatmap = dce_mod.build_heatmap(gt_mod.extract_gt_data( label_gt[0]), nb_bins=nb_bins) # Create a list of shift which do not have the same number of entries # than the heatmap - There is 4 series, let's create only 2 shift_arr = np.array([10] * 2) assert_raises(ValueError, StandardTimeNormalization._shift_heatmap, heatmap, shift_arr)
def test_save_model_wrong_ext(): """Test either if an error is raised if the filename as a wrong extension while storing the model.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'full_dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'full_gt', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Create the object to make the normalization stn = StandardTimeNormalization(dce_mod) stn.partial_fit_model(dce_mod, gt_mod, label_gt[0]) # Try to store the file not with an npy file assert_raises(ValueError, stn.save_model, os.path.join(currdir, 'data', 'model.rnd'))
def test_ese_transform_gt_cat(): """Test the transform routine with a given ground-truth.""" # Create the normalization object with the right modality dce_ese = EnhancementSignalExtraction(DCEModality()) # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'gt_folders', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Fit and raise the error data = dce_ese.transform(dce_mod, gt_mod, label_gt[0]) # Check the size of the data assert_equal(data.shape, (12899, 4)) # Check the hash of the data data.flags.writeable = False assert_equal(hash(data.data), -3808597525488161265)
def test_partial_fit_model_2(): """Test the routine to fit two models.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'full_dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'full_gt', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Create the object to make the normalization stn = StandardTimeNormalization(dce_mod) stn.partial_fit_model(dce_mod, gt_mod, label_gt[0]) stn.partial_fit_model(dce_mod, gt_mod, label_gt[0]) # Check the model computed model_gt = np.array([22.26479174, 22.51070962, 24.66027277, 23.43488237, 23.75601817, 22.56173871, 26.86244505, 45.06227804, 62.34273874, 71.35327656]) assert_array_almost_equal(stn.model_, model_gt, decimal=PRECISION_DECIMAL) assert_true(stn.is_model_fitted_)
def test_tqe_compute_fit_aif(): """Test the fit function.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Create the object for Tofts quantification extraction tqe = ToftsQuantificationExtraction(DCEModality(), 1.6, 3.5, random_state=RND_SEED) # Perform the fitting tqe.fit(dce_mod, fit_aif=True) # Check the value fitted assert_almost_equal(tqe.TR_, 0.00324, decimal=DECIMAL_PRECISION) assert_almost_equal(tqe.flip_angle_, 10., decimal=DECIMAL_PRECISION) assert_equal(tqe.start_enh_, 3) cp_r_gt = np.array([3.71038e-02, 2.35853e-02, 4.21997e-13, 1.22529e-02, 2.46203e-02, 1.35724e-01, 3.06310e-01, 3.25429e-01, 2.94957e-01, 2.58964e-01]) assert_array_almost_equal(tqe.cp_t_, cp_r_gt, decimal=DECIMAL_PRECISION)
def test_tqe_compute_fit_no_aif(): """Test the fit function.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Create the object for Tofts quantification extraction tqe = ToftsQuantificationExtraction(DCEModality(), 1.6, 3.5, random_state=RND_SEED) # Perform the fitting tqe.fit(dce_mod, fit_aif=False) # Check the value fitted assert_almost_equal(tqe.TR_, 0.00324, decimal=DECIMAL_PRECISION) assert_almost_equal(tqe.flip_angle_, 10., decimal=DECIMAL_PRECISION) assert_equal(tqe.start_enh_, 3) cp_r_gt = np.array([0., 0., 0., 0.13859428, 6.23675492, 6.90344512, 1.80619315, 2.22619032, 3.69060743, 3.32021637]) assert_array_almost_equal(tqe.cp_t_, cp_r_gt, decimal=DECIMAL_PRECISION)
def test_tqe_conv_signal_conc(): """Test the conversion from signal to concentration.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Create the object for the Tofts extraction tqe = ToftsQuantificationExtraction(DCEModality(), 1.6, 3.5) tqe.fit(dce_mod, fit_aif=False) # Try to perform a conversion signal = np.array([379., 366., 343., 355., 367., 470., 613., 628., 604., 575.]) conc = tqe.signal_to_conc(signal, 343.) conc_gt = np.array([2.15201846e-02, 1.36794587e-02, 2.44758162e-13, 7.10669089e-03, 1.42797742e-02, 7.87199894e-02, 1.77659845e-01, 1.88748637e-01, 1.71075044e-01, 1.50198853e-01]) assert_almost_equal(conc, conc_gt) # Apply the back conversion signal_back = tqe.conc_to_signal(conc, 343.) assert_almost_equal(signal_back, signal, decimal=DECIMAL_PRECISION)
def test_qte_transform_regular(): """Test the transform function for regular model.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Create the gt data gt_mod = GTModality() gt_cat = ['cap'] path_data = [os.path.join( currdir, '../../preprocessing/tests/data/full_gt/cap')] gt_mod.read_data_from_path(gt_cat, path_data) # Create the object for the Tofts extraction tqe = ToftsQuantificationExtraction(DCEModality(), 1.6, 3.5, random_state=RND_SEED) tqe.fit(dce_mod) data = tqe.transform(dce_mod, gt_mod, gt_cat[0], kind='regular') data_gt = np.load(os.path.join(currdir, 'data/tofts_reg_data.npy')) assert_array_almost_equal(data, data_gt, decimal=DECIMAL_PRECISION)
def test_shift_heatmap(): """Test the routine which shift the heatmap.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'gt_folders', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Build a heatmap from the dce data # Reduce the number of bins to enforce low memory consumption nb_bins = [100] * dce_mod.n_serie_ heatmap, bins_heatmap = dce_mod.build_heatmap(gt_mod.extract_gt_data( label_gt[0]), nb_bins=nb_bins) # Create a list of shift which do not have the same number of entries # than the heatmap - There is 4 series, let's create only 2 shift_arr = np.array([10] * 4) heatmap_shifted = StandardTimeNormalization._shift_heatmap(heatmap, shift_arr) data = np.load(os.path.join(currdir, 'data', 'heatmap_shifted.npy')) assert_array_equal(heatmap_shifted, data)
def test_dce_get_pdf_roi(): """Test the function to get a pdf from ROI.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce_folders') # Create the list of path path_data_list = [os.path.join(path_data, 's_2'), os.path.join(path_data, 's_1')] # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data_list) # Create ground truth array pos = np.ones((368, 448), dtype=bool) neg = np.zeros((368, 448), dtype=bool) gt_index = np.rollaxis(np.array([neg, pos, pos, pos, neg]), 0, 3) # Compute the histgram for the required data pdf, bins = dce_mod.get_pdf_list(roi_data=(gt_index)) pdf_roi = np.load(os.path.join(currdir, 'data', 'pdf_roi.npy')) bins_roi = np.load(os.path.join(currdir, 'data', 'bins_roi.npy')) for pdf_s, bins_s, pdf_gt, bins_gt in zip(pdf, bins, pdf_roi, bins_roi): assert_array_equal(pdf_s, pdf_gt) assert_array_equal(bins_s, bins_gt)
def test_ese_fit(): """Test either if an error is raised since that the function is not implemented.""" # Create the normalization object with the right modality dce_ese = EnhancementSignalExtraction(DCEModality()) # Open the DCE data currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Fit and raise the error assert_raises(NotImplementedError, dce_ese.fit, dce_mod)
def test_get_pdf_nb_bins_wrong_type(): """ Test either if an error is raised when an unknown parameter type is passed.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Pass a single integer which is an unknown type assert_raises(ValueError, dce_mod.get_pdf_list, roi_data=None, nb_bins=10)
def test_tqe_bad_mod_transform(): """Test either if an error is raised when a modality to tranform does not correspond to the template modality given at the construction.""" # Create the normalization object with the right modality dce_tqe = ToftsQuantificationExtraction(DCEModality(), T10, CA) # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Fit and raise the error assert_raises(RuntimeError, dce_tqe.transform, dce_mod)
def test_tqe_compute_aif_bad_estimator(): """Test either if an error is raised when a wrong estimator is given to compute the AIF.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Define the eccentricity with to large number estimator = 'rnd' assert_raises(ValueError, ToftsQuantificationExtraction.compute_aif, dce_mod, estimator=estimator)
def test_gn_fit_wrong_modality(): """ Test either if an error is raised in case that a wrong modality is provided for fitting. """ # Create a DCEModality object currdir = os.path.dirname(os.path.abspath(__file__)) path_data_dce = os.path.join(currdir, 'data', 'dce') dce_mod = DCEModality() dce_mod.read_data_from_path(path_data=path_data_dce) # Create the Gaussian normalization object gaussian_norm = GaussianNormalization(T2WModality()) # Try to make the fitting with another based modality assert_raises(ValueError, gaussian_norm.fit, dce_mod)
def test_ese_wrong_gt_mod(): """Test either if an error is raised when a wrong modality is given as ground-truth.""" # Create the normalization object with the right modality dce_ese = EnhancementSignalExtraction(DCEModality()) # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Fit and raise the error assert_raises(ValueError, dce_ese.transform, dce_mod, dce_mod, 'prostate')
def test_get_pdf_nb_bins_str_unknown(): """ Test either if an error is raised when the string for `nb_bins` is unknown.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Pass an unknown string for `nb_bins` assert_raises(ValueError, dce_mod.get_pdf_list, roi_data=None, nb_bins='rnd')
def test_update_histogram_wrong_instance(): """Test either if an error is raised with an type for `nb_bins` argument.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Get the pdf with the wrong number of series in nb_bins # There is only two series assert_raises(ValueError, dce_mod.update_histogram, nb_bins=10)
def test_partial_fit_model_wt_gt_and_cat(): """Test either if a warning is raised when a gt is not provided and a cat is.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'full_dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Create the object to make the normalization stn = StandardTimeNormalization(dce_mod) assert_warns(UserWarning, stn.partial_fit_model, dce_mod, cat='prostate')
def test_get_pdf_wrong_bins(): """Test either if an error is raised with inconsistent number of bins.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Get the pdf with the wrong number of series in nb_bins # There is only two series nb_bins = [100, 100, 100] assert_raises(ValueError, dce_mod.get_pdf_list, nb_bins=nb_bins)
def test_ese_transform_wt_gt_and_cat(): """Test either if a warning is raised when a gt is not provided and a cat is.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Create the object to make the normalization dce_ese = EnhancementSignalExtraction(dce_mod) assert_warns(UserWarning, dce_ese.transform, dce_mod, cat='prostate')
def test_normalize_denormalize_3(): """Test the data normalization and denormalization with shift > 0.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'full_dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Load the GT data path_gt = [os.path.join(currdir, 'data', 'full_gt', 'prostate')] label_gt = ['prostate'] gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Create the object to make the normalization stn = StandardTimeNormalization(dce_mod) # Simulate that we fitted the data stn.model_ = np.array([30., 30., 32., 31., 31., 30., 35., 55., 70., 80.]) stn.is_model_fitted_ = True stn.fit_params_ = {'scale-int': 1.2296657327848537, 'shift-time': 3.0, 'shift-int': np.array([191.29, 193.28, 195.28, 195.28, 195.28, 197.28, 213.25, 249.18, 283.12, 298.10])} stn.is_fitted_ = True # Store the data somewhere data_gt_cp = dce_mod.data_.copy() # Normalize the data dce_mod_norm = stn.normalize(dce_mod) # Check if the data are properly normalized dce_mod_norm.data_.flags.writeable = False data = np.load(os.path.join(currdir, 'data', 'data_normalized_dce_3.npy')) assert_equal(hash(dce_mod_norm.data_.data), data) dce_mod_norm.data_.flags.writeable = True dce_mod_2 = stn.denormalize(dce_mod_norm) dce_mod_2.data_.flags.writeable = False assert_equal(hash(dce_mod_2.data_.data), -3781160829709175881)
def test_tqe_compute_aif_max(): """Test the AIF computation when the max esatimator is used.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Compute the AIF signal_aif = ToftsQuantificationExtraction.compute_aif( dce_mod, estimator='max', random_state=RND_SEED) aif_gt = np.array([503., 482., 493., 467., 504., 648., 816., 850., 827., 787.]) assert_array_equal(signal_aif, aif_gt)
def test_tqe_compute_aif_mean(): """Test the AIF computation when the mean esatimator is used.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Compute the AIF signal_aif = ToftsQuantificationExtraction.compute_aif( dce_mod, estimator='mean', random_state=RND_SEED) aif_gt = np.array([347.29533, 332.32211, 317.53709, 322.32994, 336.03532, 441.30315, 586.89144, 598.05404, 585.32235, 562.42261]) assert_array_almost_equal(signal_aif, aif_gt, decimal=DECIMAL_PRECISION)
def test_tqe_compute_aif_default(): """Test the AIF computation when the default parameters are used.""" # Try to fit an object with another modality currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, '../../preprocessing/tests/data/full_dce') # Create an object to handle the data dce_mod = DCEModality() dce_mod.read_data_from_path(path_data) # Compute the AIF signal_aif = ToftsQuantificationExtraction.compute_aif( dce_mod, random_state=RND_SEED) aif_gt = np.array([379., 366., 343., 355., 367., 470., 613., 628., 604., 575.]) assert_array_equal(signal_aif, aif_gt)
def find_normalization_params(pat_dce, pat_gt, label, pat_model): # Create the normalization object and load the model dce_norm = StandardTimeNormalization(DCEModality()) dce_norm.load_model(pat_model) # Read the DCE dce_mod = DCEModality() dce_mod.read_data_from_path(pat_dce) # Read the GT gt_mod = GTModality() gt_mod.read_data_from_path(label, pat_gt) # Find the normalization parameters dce_norm.fit(dce_mod, ground_truth=gt_mod, cat=label[0]) return dce_norm
def test_update_histogram_wrong_bins_type_2(): """Test either if an error is raised with an inconsistent type of data in a list.""" # Load the data with only a single serie currdir = os.path.dirname(os.path.abspath(__file__)) path_data = os.path.join(currdir, 'data', 'dce') # Create an object to handle the data dce_mod = DCEModality() # Read the data dce_mod.read_data_from_path(path_data) # Get the pdf with the wrong number of series in nb_bins # There is only two series nb_bins = [100, 'a'] assert_raises(ValueError, dce_mod.update_histogram, nb_bins=nb_bins)
# Generate the different path to be later treated path_patients_list_dce = [] path_patients_list_gt = [] # Create the generator id_patient_list = (name for name in os.listdir(path_patients) if os.path.isdir(os.path.join(path_patients, name))) for id_patient in id_patient_list: # Append for the DCE data path_patients_list_dce.append(os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append([os.path.join(path_patients, id_patient, path_gt)]) # Create the model iteratively dce_norm = StandardTimeNormalization(DCEModality()) for pat_dce, pat_gt in zip(path_patients_list_dce, path_patients_list_gt): # Read the DCE dce_mod = DCEModality() dce_mod.read_data_from_path(pat_dce) # Read the GT gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, pat_gt) # Fit the model dce_norm.partial_fit_model(dce_mod, ground_truth=gt_mod, cat=label_gt[0]) # Define the path where to store the model path_store_model = '/data/prostate/pre-processing/lemaitre-2016-nov/model'
if os.path.isdir(os.path.join(path_patients, name))] for id_patient in id_patient_list: # Append for the DCE data path_patients_list_dce.append(os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append([os.path.join(path_patients, id_patient, path_gt)]) for p_dce, p_gt, pat in zip(path_patients_list_dce, path_patients_list_gt, id_patient_list): print 'Processing #{}'.format(pat) # Create the Tofts Extractor brix_ext = BrixQuantificationExtraction(DCEModality()) # Read the DCE print 'Read DCE images' dce_mod = DCEModality() dce_mod.read_data_from_path(p_dce) # Read the GT print 'Read GT images' gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, p_gt) # Load the approproate normalization object filename_norm = (pat.lower().replace(' ', '_') + '_norm.p') dce_norm = StandardTimeNormalization.load_from_pickles(
n_jobs=-1) label_region = km2.fit_predict(region_feat_vec) for i, gt in enumerate(label_gt): # get the gt all_label_img[gt[0]][np.nonzero( all_label_img[gt[0]] == gt[1] + 1)] = 1 for sl in range(len(all_label_img)): plt.figure() plt.imshow(all_label_img[sl]) plt.savefig('{}_image_{}.png'.format(idx_patient, sl)) plt.figure() plt.imshow(mod.data_[10, 50:size_image[1]/2, :, sl]) plt.savefig('{}_original_{}.png'.format(idx_patient, sl)) return label_region, label_gt, region_feat_vec # Loop where we read every patient for idx_lopo_cv in range(len(id_patient_list)): # Read the DCE data dce_mod = DCEModality() dce_mod.read_data_from_path(os.path.join(path_data, id_patient_list[idx_lopo_cv], path_dce)) # Segment the aorta lr, lgt, rfv = segmentation_aorta(dce_mod, idx_lopo_cv)
] for id_patient in id_patient_list: # Append for the DCE data path_patients_list_dce.append( os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append( [os.path.join(path_patients, id_patient, path_gt)]) for p_dce, p_gt, pat in zip(path_patients_list_dce, path_patients_list_gt, id_patient_list): print 'Processing #{}'.format(pat) # Create the Tofts Extractor pun_ext = PUNQuantificationExtraction(DCEModality()) # Read the DCE print 'Read DCE images' dce_mod = DCEModality() dce_mod.read_data_from_path(p_dce) # Read the GT print 'Read GT images' gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, p_gt) # Fit the parameters for Brix print 'Extract Weibull' pun_ext.fit(dce_mod, ground_truth=gt_mod, cat=label_gt[0])
for id_patient in id_patient_list: # Append for the DCE data path_patients_list_dce.append(os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append([os.path.join(path_patients, id_patient, gt) for gt in path_gt]) # Load all the data once. Splitting into training and testing will be done at # the cross-validation time for idx_pat in range(len(id_patient_list)): print 'Read patient {}'.format(id_patient_list[idx_pat]) # Load the testing data that correspond to the index of the LOPO # Create the object for the DCE dce_mod = DCEModality() dce_mod.read_data_from_path(path_patients_list_dce[idx_pat]) print 'Read the DCE data for the current patient ...' # Create the corresponding ground-truth gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_patients_list_gt[idx_pat]) print 'Read the GT data for the current patient ...' # Load the approproate normalization object filename_norm = (id_patient_list[idx_pat].lower().replace(' ', '_') + '_norm.p') dce_norm = StandardTimeNormalization.load_from_pickles( os.path.join(path_norm, filename_norm))
if os.path.isdir(os.path.join(path_patients, name))] for id_patient in id_patient_list: # Append for the DCE data path_patients_list_dce.append(os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append([os.path.join(path_patients, id_patient, path_gt)]) for p_dce, p_gt, pat in zip(path_patients_list_dce, path_patients_list_gt, id_patient_list): print 'Processing #{}'.format(pat) # Create the Tofts Extractor tofts_ext = ToftsQuantificationExtraction(DCEModality(), T10, CA) # Read the DCE print 'Read DCE images' dce_mod = DCEModality() dce_mod.read_data_from_path(p_dce) # Read the GT print 'Read GT images' gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, p_gt) # Fit the parameters for Tofts print 'Extract Tofts parameters' tofts_ext.fit(dce_mod, ground_truth=gt_mod, cat=label_gt[0], fit_aif=True)
# Append for the DCE data path_patients_list_dce.append( os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append( [os.path.join(path_patients, id_patient, path_gt)]) # Compute the different AIF aif_patient = [] aif_time = [] for pat_dce, pat_gt in zip(path_patients_list_dce, path_patients_list_gt): print 'Processing {}'.format(pat_dce) # Read the DCE dce_mod = DCEModality() dce_mod.read_data_from_path(pat_dce) # Store the time aif_time.append(dce_mod.time_info_) aif_patient.append( ToftsQuantificationExtraction.compute_aif(dce_mod, estimator='median')) # Get the median time to resample later aif_time = np.array(aif_time) aif_time_median = np.median(aif_time, axis=0) # Resample each aif for idx_aif in range(len(aif_patient)): aif_patient[idx_aif] = np.interp(aif_time_median, aif_time[idx_aif], aif_patient[idx_aif])
from protoclass.data_management import DCEModality from protoclass.data_management import GTModality from protoclass.preprocessing import StandardTimeNormalization # Define the path for the DCE path_dce = '/data/prostate/experiments/Patient 383/DCE' # Define the list of path for the GT path_gt = ['/data/prostate/experiments/Patient 383/GT_inv/prostate'] # Define the associated list of label for the GT label_gt = ['prostate'] # Read the DCE dce_mod = DCEModality() dce_mod.read_data_from_path(path_dce) # Read the GT gt_mod = GTModality() gt_mod.read_data_from_path(label_gt, path_gt) # Create the object to normalize the DCE data dce_norm = StandardTimeNormalization(dce_mod) # Fit the data to get the normalization parameters dce_norm.partial_fit_model(dce_mod, ground_truth=gt_mod, cat='prostate') print dce_norm.model_ # Define the path for the DCE
# Append for the DCE data path_patients_list_dce.append( os.path.join(path_patients, id_patient, path_dce)) # Append for the GT data - Note that we need a list of gt path path_patients_list_gt.append( [os.path.join(path_patients, id_patient, path_gt)]) # Compute the different AIF aif_patient = [] aif_time = [] for pat_dce, pat_gt in zip(path_patients_list_dce, path_patients_list_gt): print 'Processing {}'.format(pat_dce) # Read the DCE dce_mod = DCEModality() dce_mod.read_data_from_path(pat_dce) for idx in range(dce_mod.data_.shape[0]): dce_mod.data_[idx, :] += shift[idx] dce_mod.update_histogram() # Store the time aif_time.append(dce_mod.time_info_) aif_patient.append( ToftsQuantificationExtraction.compute_aif(dce_mod, estimator='median')) # Get the median time to resample later aif_time = np.array(aif_time) aif_time_median = np.median(aif_time, axis=0)