示例#1
0
    def test_validity_check_roi_extraction(self):
        for atlas in AtlasLibrary().ATLAS_DICTIONARY.keys():
            print("Checking atlas {}".format(atlas))
            rois = AtlasLibrary().get_atlas(atlas).roi_list[1:3]
            rois = [roi.label for roi in rois]
            brain_atlas = BrainAtlas(atlas_name=atlas)
            brain_atlas.rois = rois
            X_t = brain_atlas.transform(self.X[0:2])

            "-".join(rois)
            name = os.path.join(self.test_folder, atlas + "_" + "-".join(rois))
            brain_atlas._validity_check_roi_extraction(X_t[0], filename=name)
示例#2
0
 def test_all_atlases(self):
     for atlas in AtlasLibrary().ATLAS_DICTIONARY.keys():
         print("Running tests for atlas {}".format(atlas))
         brain_atlas = PipelineElement(
             "BrainAtlas", atlas_name=atlas, extract_mode="vec"
         )
         brain_atlas.transform(self.X)
示例#3
0
    def test_brain_atlas_load(self):

        brain_atlas = AtlasLibrary().get_atlas(self.atlas_name)

        # manually load brain atlas
        man_map = image.load_img(os.path.join(self.atlas_folder, 'AAL_SPM12/AAL.nii.gz')).get_data()
        self.assertTrue(np.array_equal(man_map, brain_atlas.map))
示例#4
0
 def setUp(self):
     super(NeuroTest, self).setUp()
     self.test_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../test_data/')
     self.atlas_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../neuro/atlases/')
     self.atlas_name = "AAL"
     self.roi_list = ["Hippocampus_R", "Hippocampus_L", "Amygdala_L", "Amygdala_R"]
     self.X = AtlasLibrary().get_nii_files_from_folder(self.test_folder, extension=".nii")
     self.y = np.random.randn(len(self.X))
示例#5
0
    def test_brain_masker(self):

        affine, shape = BrainMask.get_format_info_from_first_image(self.X)
        atlas_obj = AtlasLibrary().get_atlas(self.atlas_name, affine, shape)
        roi_objects = BrainAtlas._get_rois(atlas_obj, which_rois=self.roi_list, background_id=0)

        for roi in roi_objects:
            masker = BrainMask(mask_image=roi, affine=affine, shape=shape, extract_mode="vec")
            own_calculation = masker.transform(self.X[0])
            nilearn_func = NiftiMasker(mask_img=roi.mask, target_affine=affine, target_shape=shape, dtype='float32')
            nilearn_calculation = nilearn_func.fit_transform(self.X[0])

            self.assertTrue(np.array_equal(own_calculation, nilearn_calculation))
示例#6
0
    def test_single_subject_caching(self):

        nb = NeuroBranch("subject_caching_test")
        # increase complexity by adding batching
        nb += PipelineElement("ResampleImages", batch_size=4)

        test_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   "../test_data/")
        X = AtlasLibrary().get_nii_files_from_folder(test_folder,
                                                     extension=".nii")
        y = np.random.randn(len(X))

        cache_folder = self.cache_folder_path
        cache_folder = os.path.join(cache_folder, "subject_caching_test")
        nb.base_element.cache_folder = cache_folder

        nr_of_expected_pickles_per_config = len(X)

        def transform_and_check_folder(config, expected_nr_of_files):
            nb.set_params(**config)
            nb.transform(X, y)
            nr_of_generated_cache_files = len(
                glob.glob(os.path.join(cache_folder, "*.p")))
            self.assertTrue(
                nr_of_generated_cache_files == expected_nr_of_files)

        # fit with first config
        # expect one cache file per input file
        transform_and_check_folder({"ResampleImages__voxel_size": 5},
                                   nr_of_expected_pickles_per_config)

        # after fitting with second config, we expect two times the number of input files to be in cache
        transform_and_check_folder({"ResampleImages__voxel_size": 10},
                                   2 * nr_of_expected_pickles_per_config)

        # fit with first config again, we expect to not have generate other cache files, because they exist
        transform_and_check_folder({"ResampleImages__voxel_size": 5},
                                   2 * nr_of_expected_pickles_per_config)

        # clean up afterwards
        CacheManager.clear_cache_files(cache_folder)
示例#7
0
    def test_neuro_hyperpipe_parallelized_batched_caching(self):

        test_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   "../test_data/")
        X = AtlasLibrary().get_nii_files_from_folder(test_folder,
                                                     extension=".nii")
        y = np.random.randn(len(X))

        cache_path = self.cache_folder_path

        self.hyperpipe = Hyperpipe(
            "complex_case",
            inner_cv=KFold(n_splits=5),
            outer_cv=KFold(n_splits=3),
            optimizer="grid_search",
            cache_folder=cache_path,
            metrics=["mean_squared_error"],
            best_config_metric="mean_squared_error",
            output_settings=OutputSettings(project_folder="./tmp"),
        )

        nb = NeuroBranch("SubjectCaching", nr_of_processes=1)
        # increase complexity by adding batching
        nb += PipelineElement("ResampleImages", {"voxel_size": [3, 5, 10]},
                              batch_size=4)
        nb += PipelineElement("BrainMask", batch_size=4)

        self.hyperpipe += nb

        self.hyperpipe += PipelineElement("StandardScaler", {})
        self.hyperpipe += PipelineElement("PCA", {"n_components": [3, 4]})
        self.hyperpipe += PipelineElement("SVR", {"kernel": ["rbf", "linear"]})

        self.hyperpipe.fit(X, y)

        # assert cache is empty again
        nr_of_p_files = len(
            glob.glob(os.path.join(self.hyperpipe.cache_folder, "*.p")))
        print(nr_of_p_files)
        self.assertTrue(nr_of_p_files == 0)
示例#8
0
 def _find_rois(element):
     roi_list = element.base_element.rois
     atlas_obj = AtlasLibrary().get_atlas(element.base_element.atlas_name)
     roi_objects = BrainAtlas._get_rois(atlas_obj, roi_list)
     return [roi.label for roi in roi_objects], atlas_obj
示例#9
0
)

"""
AVAILABLE ATLASES
    'AAL'
    'HarvardOxford_Cortical_Threshold_25'
    'HarvardOxford_Subcortical_Threshold_25'
    'HarvardOxford_Cortical_Threshold_50'
    'HarvardOxford_Subcortical_Threshold_50'
    'Yeo_7'
    'Yeo_7_Liberal'
    'Yeo_17'
    'Yeo_17_Liberal'
"""
# to list all roi names of a specific atlas, you can do the following
AtlasLibrary().list_rois("AAL")
AtlasLibrary().list_rois("HarvardOxford_Cortical_Threshold_25")
AtlasLibrary().list_rois("HarvardOxford_Subcortical_Threshold_25")

# PICK AN ATLAS
atlas = PipelineElement(
    "BrainAtlas",
    rois=["Hippocampus_L", "Amygdala_L"],
    atlas_name="AAL",
    extract_mode="vec",
    batch_size=20,
)

# EITHER ADD A NEURO BRANCH OR THE ATLAS ITSELF
neuro_branch = NeuroBranch("NeuroBranch")
neuro_branch += atlas
示例#10
0
    def test_combi_from_single_and_group_caching(self):

        # 1. load data
        test_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   "../test_data/")
        X = AtlasLibrary().get_nii_files_from_folder(test_folder,
                                                     extension=".nii")
        nr_of_expected_pickles_per_config = len(X)
        y = np.random.randn(len(X))

        # 2. specify cache directories
        cache_folder_base = self.cache_folder_path
        cache_folder_neuro = os.path.join(cache_folder_base,
                                          "subject_caching_test")

        CacheManager.clear_cache_files(cache_folder_base)
        CacheManager.clear_cache_files(cache_folder_neuro)

        # 3. set up Neuro Branch
        nb = NeuroBranch("SubjectCaching", nr_of_processes=3)
        # increase complexity by adding batching
        nb += PipelineElement("ResampleImages", batch_size=4)
        nb += PipelineElement("BrainMask", batch_size=4)
        nb.base_element.cache_folder = cache_folder_neuro

        # 4. setup usual pipeline
        ss = PipelineElement("StandardScaler", {})
        pca = PipelineElement("PCA", {"n_components": [3, 10, 50]})
        svm = PipelineElement("SVR", {"kernel": ["rbf", "linear"]})

        pipe = PhotonPipeline([("NeuroBranch", nb), ("StandardScaler", ss),
                               ("PCA", pca), ("SVR", svm)])

        pipe.caching = True
        pipe.fold_id = "12345643463434"
        pipe.cache_folder = cache_folder_base

        def transform_and_check_folder(config, expected_nr_of_files_group,
                                       expected_nr_subject):
            pipe.set_params(**config)
            pipe.fit(X, y)
            nr_of_generated_cache_files = len(
                glob.glob(os.path.join(cache_folder_base, "*.p")))
            self.assertTrue(
                nr_of_generated_cache_files == expected_nr_of_files_group)

            nr_of_generated_cache_files_subject = len(
                glob.glob(os.path.join(cache_folder_neuro, "*.p")))
            self.assertTrue(
                nr_of_generated_cache_files_subject == expected_nr_subject)

        config1 = {
            "NeuroBranch__ResampleImages__voxel_size": 5,
            "PCA__n_components": 7,
            "SVR__C": 2,
        }
        config2 = {
            "NeuroBranch__ResampleImages__voxel_size": 3,
            "PCA__n_components": 4,
            "SVR__C": 5,
        }

        # first config we expect to have a cached_file for the standard scaler and the pca
        # and we expect to have two files (one resampler, one brain mask) for each input data
        transform_and_check_folder(config1, 2,
                                   2 * nr_of_expected_pickles_per_config)

        # second config we expect to have two cached_file for the standard scaler (one time for 5 voxel input and one
        # time for 3 voxel input) and two files two for the first and second config pcas,
        # and we expect to have 2 * nr of input data for resampler plus one time masker
        transform_and_check_folder(config2, 4,
                                   4 * nr_of_expected_pickles_per_config)

        # when we transform with the first config again, nothing should happen
        transform_and_check_folder(config1, 4,
                                   4 * nr_of_expected_pickles_per_config)

        # when we transform with an empty config, a new entry for pca and standard scaler should be generated, as well
        # as a new cache item for each input data from the neuro branch for each itemin the neuro branch
        with self.assertRaises(ValueError):
            transform_and_check_folder({}, 6,
                                       6 * nr_of_expected_pickles_per_config)

        CacheManager.clear_cache_files(cache_folder_base)
        CacheManager.clear_cache_files(cache_folder_neuro)
示例#11
0
                 eval_final_performance=True,
                 output_settings=settings)
"""
AVAILABLE ATLASES
    'AAL'
    'HarvardOxford_Cortical_Threshold_25'
    'HarvardOxford_Subcortical_Threshold_25'
    'HarvardOxford_Cortical_Threshold_50'
    'HarvardOxford_Subcortical_Threshold_50'
    'Yeo_7'
    'Yeo_7_Liberal'
    'Yeo_17'
    'Yeo_17_Liberal'
"""
# to list all roi names of a specific atlas, you can do the following
AtlasLibrary().list_rois('AAL')
AtlasLibrary().list_rois('HarvardOxford_Cortical_Threshold_25')
AtlasLibrary().list_rois('HarvardOxford_Subcortical_Threshold_25')

# PICK AN ATLAS
atlas = PipelineElement('BrainAtlas',
                        rois=['Hippocampus_L', 'Amygdala_L'],
                        atlas_name="AAL",
                        extract_mode='vec',
                        batch_size=20)

# EITHER ADD A NEURO BRANCH OR THE ATLAS ITSELF
neuro_branch = NeuroBranch('NeuroBranch')
neuro_branch += atlas

# ADD NEURO ELEMENTS TO HYPERPIPE
示例#12
0
 def test_all_masks(self):
     for mask in AtlasLibrary().MASK_DICTIONARY.keys():
         brain_mask = PipelineElement(
             "BrainMask", mask_image=mask, extract_mode="vec"
         )
         brain_mask.transform(self.X)
示例#13
0
"""
AVAILABLE ATLASES
    'AAL'
    'HarvardOxford_Cortical_Threshold_25'
    'HarvardOxford_Subcortical_Threshold_25'
    'HarvardOxford_Cortical_Threshold_50'
    'HarvardOxford_Subcortical_Threshold_50'
    'Yeo_7'
    'Yeo_7_Liberal'
    'Yeo_17'
    'Yeo_17_Liberal'
    'Schaefer2018_*Parcels_*Networks' (replace first asterisk with 100, 200, ..., 1000 and second with 7 or 17)
"""
# to list all roi names of a specific atlas, you can do the following
AtlasLibrary().list_rois("AAL")
AtlasLibrary().list_rois("HarvardOxford_Cortical_Threshold_25")
AtlasLibrary().list_rois("HarvardOxford_Subcortical_Threshold_25")
AtlasLibrary().list_rois("Schaefer2018_100Parcels_7Networks")

# PICK AN ATLAS
# V1.1 ----------------------------------------------------------------
atlas = PipelineElement(
    "BrainAtlas",
    rois=["Hippocampus_L", "Hippocampus_R", "Amygdala_L", "Amygdala_R"],
    atlas_name="AAL",
    extract_mode="vec",
    batch_size=20,
)

示例#14
0
"""
AVAILABLE ATLASES
    'AAL'
    'HarvardOxford_Cortical_Threshold_25'
    'HarvardOxford_Subcortical_Threshold_25'
    'HarvardOxford_Cortical_Threshold_50'
    'HarvardOxford_Subcortical_Threshold_50'
    'Yeo_7'
    'Yeo_7_Liberal'
    'Yeo_17'
    'Yeo_17_Liberal'
    'Schaefer2018_*Parcels_*Networks' (replace first asterisk with 100, 200, ..., 1000 and second with 7 or 17)
"""
# to list all roi names of a specific atlas, you can do the following
AtlasLibrary().list_rois('AAL')
AtlasLibrary().list_rois('HarvardOxford_Cortical_Threshold_25')
AtlasLibrary().list_rois('HarvardOxford_Subcortical_Threshold_25')
AtlasLibrary().list_rois('Schaefer2018_100Parcels_7Networks')

# PICK AN ATLAS
# V1.1 ----------------------------------------------------------------
atlas = PipelineElement('BrainAtlas',
                        rois=['Hippocampus_L', 'Hippocampus_R', 'Amygdala_L', 'Amygdala_R'],
                        atlas_name="AAL", extract_mode='vec', batch_size=20)


neuro_branch_v1 = NeuroBranch('NeuroBranch', nr_of_processes=3)
neuro_branch_v1 += atlas

# V1.2 ----------------------------------------------------------------