示例#1
0
    def test_load_from_file(self):
        X, y = load_breast_cancer(True)
        my_pipe = Hyperpipe(
            'load_results_file_test',
            metrics=['accuracy'],
            best_config_metric='accuracy',
            output_settings=OutputSettings(project_folder='./tmp'))
        my_pipe += PipelineElement("StandardScaler")
        my_pipe += PipelineElement("SVC")
        my_pipe.fit(X, y)

        results_file = os.path.join(my_pipe.output_settings.results_folder,
                                    "photon_result_file.p")
        my_result_handler = ResultsHandler()
        my_result_handler.load_from_file(results_file)
        self.assertIsInstance(my_result_handler.results, MDBHyperpipe)
示例#2
0
    def test_inverse_transform(self):
        settings = OutputSettings(
            project_folder=self.tmp_folder_path, overwrite_results=True
        )

        # DESIGN YOUR PIPELINE
        pipe = Hyperpipe(
            "Limbic_System",
            optimizer="grid_search",
            metrics=["mean_absolute_error"],
            best_config_metric="mean_absolute_error",
            outer_cv=ShuffleSplit(n_splits=1, test_size=0.2),
            inner_cv=ShuffleSplit(n_splits=1, test_size=0.2),
            verbosity=2,
            cache_folder=self.cache_folder_path,
            eval_final_performance=True,
            output_settings=settings,
        )

        # PICK AN ATLAS
        atlas = PipelineElement(
            "BrainAtlas",
            rois=["Hippocampus_L", "Amygdala_L"],
            atlas_name="AAL",
            extract_mode="vec",
            batch_size=20,
        )

        # EITHER ADD A NEURO BRANCH OR THE ATLAS ITSELF
        neuro_branch = NeuroBranch("NeuroBranch")
        neuro_branch += atlas
        pipe += neuro_branch

        pipe += PipelineElement("LinearSVR")

        pipe.fit(self.X, self.y)

        # GET IMPORTANCE SCORES
        handler = ResultsHandler(pipe.results)
        importance_scores_optimum_pipe = handler.results.best_config_feature_importances

        manual_img, _, _ = pipe.optimum_pipe.inverse_transform(
            importance_scores_optimum_pipe, None
        )
        img = image.load_img(
            os.path.join(
                self.tmp_folder_path,
                "Limbic_System_results/optimum_pipe_feature_importances_backmapped.nii.gz",
            )
        )
        self.assertTrue(np.array_equal(manual_img.get_data(), img.get_data()))
示例#3
0
    def fit(self, X, y=None, **kwargs):
        if len(self.hyperpipes_to_fit) == 0:
            raise Exception(
                "No hyperpipes to fit. Did you call 'generate_mappings'?")

        # Get data from BrainAtlas first and save to .npz
        # ToDo: currently not supported for hyperparameters inside neurobranch
        self.neuro_element.fit(X)

        # extract regions
        X_extracted, _, _ = self.neuro_element.transform(X)
        X_extracted = self._reshape_roi_data(X_extracted)

        # save neuro branch to file
        joblib.dump(
            self.neuro_element,
            os.path.join(self.folder, "neuro_element.pkl"),
            compress=1,
        )

        hyperpipe_infos = dict()
        hyperpipe_results = dict()

        for roi_name, hyperpipe in self.hyperpipes_to_fit.items():
            hyperpipe.verbosity = self.verbosity
            hyperpipe.fit(X_extracted[self.roi_indices[roi_name]], y, **kwargs)
            hyperpipe_infos[roi_name] = {
                "hyperpipe_name":
                hyperpipe.name,
                "model_filename":
                os.path.join(
                    os.path.basename(hyperpipe.output_settings.results_folder),
                    "photon_best_model.photon",
                ),
                "roi_index":
                self.roi_indices[roi_name],
            }
            hyperpipe_results[roi_name] = ResultsHandler(
                hyperpipe.results).get_performance_outer_folds()

        self.hyperpipe_infos = hyperpipe_infos

        # write results
        with open(
                os.path.join(
                    self.folder,
                    self.original_hyperpipe_name + "_atlas_mapper_meta.json"),
                "w",
        ) as fp:
            json.dump(self.hyperpipe_infos, fp)
        df = pd.DataFrame(hyperpipe_results)
        df.to_csv(
            os.path.join(
                self.folder,
                self.original_hyperpipe_name + "_atlas_mapper_results.csv"))

        # write performance to atlas niftis
        performances = list()

        for roi_name, roi_res in hyperpipe_results.items():
            n_voxels = len(X_extracted[self.roi_indices[roi_name]][0])
            performances.append(
                np.repeat(roi_res[self.best_config_metric], n_voxels))

        backmapped_img, _, _ = self.neuro_element.inverse_transform(
            performances)
        backmapped_img.to_filename(
            os.path.join(self.folder, "atlas_mapper_performances.nii.gz"))

        if self.create_surface_plots:
            self.surface_plots(backmapped_img)
示例#4
0
    rois=["Hippocampus_L", "Amygdala_L"],
    atlas_name="AAL",
    extract_mode="vec",
    batch_size=20,
)

# EITHER ADD A NEURO BRANCH OR THE ATLAS ITSELF
neuro_branch = NeuroBranch("NeuroBranch")
neuro_branch += atlas


# ADD NEURO ELEMENTS TO HYPERPIPE

pipe += neuro_branch

pipe += PipelineElement("LinearSVR")

pipe.fit(X, y)

# GET IMPORTANCE SCORES
handler = ResultsHandler(pipe.results)

# get feature importances (training set) for your best configuration (for all outer folds)
# this function returns the importance scores for the best configuration of each outer fold in a list
importance_scores_outer_folds = handler.get_importance_scores()
importance_scores_optimum_pipe = handler.results.best_config_feature_importances

img, _, _ = pipe.optimum_pipe.inverse_transform(importance_scores_optimum_pipe, None)
img.to_filename("./tmp/best_config_feature_importances.nii.gz")
debug = True
示例#5
0
    def fit(self, X, y=None, **kwargs):
        """
        Transform data on NeuroElement and fit hyperpipes.
        :param X: input data
        :param y: targets
        :param kwargs:
        :return:
        """

        # disable fitting with loading from file/folder
        if not self.hyperpipes_to_fit and self.hyperpipe:
            self._generate_mappings()
        else:
            msg = "Cannot fit AtlasMapper with hyperpipe as NoneType."
            logger.error(msg)
            raise ValueError(msg)

        # Get data from BrainAtlas first and save to .npz
        # ToDo: currently not supported for hyperparameters inside neurobranch
        self.neuro_element.fit(X)

        # extract regions
        X_extracted, _, _ = self.neuro_element.transform(X)
        X_extracted = AtlasMapper._reshape_roi_data(X_extracted)

        # save neuro element to file
        joblib.dump(self.neuro_element,
                    os.path.join(self.folder, 'neuro_element.pkl'),
                    compress=1)

        hyperpipe_infos = dict()
        hyperpipe_results = dict()

        # ToDo: parallel fitting
        for roi_name, hyperpipe in self.hyperpipes_to_fit.items():
            hyperpipe.verbosity = self.hyperpipe.verbosity
            hyperpipe.fit(X_extracted[self.roi_indices[roi_name]], y, **kwargs)
            hyperpipe_infos[roi_name] = {
                'hyperpipe_name':
                hyperpipe.name,
                'model_filename':
                os.path.join(
                    os.path.basename(hyperpipe.output_settings.results_folder),
                    'photon_best_model.photon'),
                'roi_index':
                self.roi_indices[roi_name]
            }
            hyperpipe_results[roi_name] = ResultsHandler(
                hyperpipe.results).get_performance_outer_folds()

        self.hyperpipe_infos = hyperpipe_infos

        # write results
        with open(
                os.path.join(self.folder,
                             self.hyperpipe.name + '_atlas_mapper_meta.json'),
                'w') as fp:
            json.dump(self.hyperpipe_infos, fp)
        df = pd.DataFrame(hyperpipe_results)
        df.to_csv(
            os.path.join(self.folder,
                         self.hyperpipe.name + '_atlas_mapper_results.csv'))

        # write performance to atlas niftis
        performances = list()

        for roi_name, roi_res in hyperpipe_results.items():
            n_voxels = len(X_extracted[self.roi_indices[roi_name]][0])
            performances.append(
                np.repeat(
                    roi_res[self.hyperpipe.optimization.best_config_metric],
                    n_voxels))

        backmapped_img, _, _ = self.neuro_element.inverse_transform(
            performances)
        backmapped_img.to_filename(
            os.path.join(self.folder, 'atlas_mapper_performances.nii.gz'))

        if self.create_surface_plots:
            self.surface_plots(backmapped_img)