def _fit(self, dataset): self.dataset = dataset self.masker = self.masker or dataset.masker if not isinstance(self.masker, NiftiMasker): raise ValueError( f"A {type(self.masker)} mask has been detected. " "Only NiftiMaskers are allowed for this Estimator. " "This is because aggregation, such as averaging values across ROIs, " "will produce invalid results.") est = pymare.estimators.StoufferCombinationTest() if self.use_sample_size: sample_sizes = np.array( [np.mean(n) for n in self.inputs_["sample_sizes"]]) weights = np.sqrt(sample_sizes) weight_maps = np.tile(weights, (self.inputs_["z_maps"].shape[1], 1)).T pymare_dset = pymare.Dataset(y=self.inputs_["z_maps"], v=weight_maps) else: pymare_dset = pymare.Dataset(y=self.inputs_["z_maps"]) est.fit_dataset(pymare_dset) est_summary = est.summary() results = { "z": _boolean_unmask(est_summary.z.squeeze(), self.inputs_["aggressive_mask"]), "p": _boolean_unmask(est_summary.p.squeeze(), self.inputs_["aggressive_mask"]), } return results
def _fit(self, dataset): self.dataset = dataset self.masker = self.masker or dataset.masker if not isinstance(self.masker, NiftiMasker): LGR.warning( f"A {type(self.masker)} mask has been detected. " "Masks which average across voxels will likely produce biased results when used " "with this Estimator.") est = pymare.estimators.VarianceBasedLikelihoodEstimator( method=self.method) pymare_dset = pymare.Dataset(y=self.inputs_["beta_maps"], v=self.inputs_["varcope_maps"]) est.fit_dataset(pymare_dset) est_summary = est.summary() results = { "tau2": _boolean_unmask(est_summary.tau2.squeeze(), self.inputs_["aggressive_mask"]), "z": _boolean_unmask(est_summary.get_fe_stats()["z"].squeeze(), self.inputs_["aggressive_mask"]), "p": _boolean_unmask(est_summary.get_fe_stats()["p"].squeeze(), self.inputs_["aggressive_mask"]), "est": _boolean_unmask(est_summary.get_fe_stats()["est"].squeeze(), self.inputs_["aggressive_mask"]), } return results
def _fit(self, dataset): self.dataset = dataset self.masker = self.masker or dataset.masker sample_sizes = np.array( [np.mean(n) for n in self.inputs_["sample_sizes"]]) n_maps = np.tile(sample_sizes, (self.inputs_["beta_maps"].shape[1], 1)).T pymare_dset = pymare.Dataset(y=self.inputs_["beta_maps"], n=n_maps) est = pymare.estimators.SampleSizeBasedLikelihoodEstimator( method=self.method) est.fit_dataset(pymare_dset) est_summary = est.summary() results = { "tau2": _boolean_unmask(est_summary.tau2.squeeze(), self.inputs_["aggressive_mask"]), "z": _boolean_unmask(est_summary.get_fe_stats()["z"].squeeze(), self.inputs_["aggressive_mask"]), "p": _boolean_unmask(est_summary.get_fe_stats()["p"].squeeze(), self.inputs_["aggressive_mask"]), "est": _boolean_unmask(est_summary.get_fe_stats()["est"].squeeze(), self.inputs_["aggressive_mask"]), } return results
def _fit(self, dataset): self.dataset = dataset self.masker = self.masker or dataset.masker if not isinstance(self.masker, NiftiMasker): LGR.warning( f"A {type(self.masker)} mask has been detected. " "Masks which average across voxels will likely produce biased results when used " "with this Estimator.") pymare_dset = pymare.Dataset(y=self.inputs_["beta_maps"], v=self.inputs_["varcope_maps"]) est = pymare.estimators.WeightedLeastSquares(tau2=self.tau2) est.fit_dataset(pymare_dset) est_summary = est.summary() # tau2 is an float, not a map, so it can't go in the results dictionary results = { "z": _boolean_unmask(est_summary.get_fe_stats()["z"].squeeze(), self.inputs_["aggressive_mask"]), "p": _boolean_unmask(est_summary.get_fe_stats()["p"].squeeze(), self.inputs_["aggressive_mask"]), "est": _boolean_unmask(est_summary.get_fe_stats()["est"].squeeze(), self.inputs_["aggressive_mask"]), } return results
def _fit(self, dataset): if self.use_sample_size: sample_sizes = np.array([np.mean(n) for n in self.inputs_["sample_sizes"]]) weights = np.sqrt(sample_sizes) weight_maps = np.tile(weights, (self.inputs_["z_maps"].shape[1], 1)).T pymare_dset = pymare.Dataset(y=self.inputs_["z_maps"], v=weight_maps) else: pymare_dset = pymare.Dataset(y=self.inputs_["z_maps"]) est = pymare.estimators.Stouffers(input="z") est.fit(pymare_dset) est_summary = est.summary() results = { "z": est_summary.z, "p": est_summary.p, } return results
def _fit(self, dataset): pymare_dset = pymare.Dataset(y=self.inputs_["z_maps"]) est = pymare.estimators.FisherCombinationTest() est.fit_dataset(pymare_dset) est_summary = est.summary() results = { "z": est_summary.z, "p": est_summary.p, } return results
def _fit(self, dataset): est = pymare.estimators.Hedges() pymare_dset = pymare.Dataset(y=self.inputs_["beta_maps"], v=self.inputs_["varcope_maps"]) est.fit(pymare_dset) est_summary = est.summary() results = { "tau2": est_summary.tau2, "z": est_summary.get_fe_stats()["z"], "p": est_summary.get_fe_stats()["p"], "est": est_summary.get_fe_stats()["est"], } return results
def _fit(self, dataset): est = pymare.estimators.VarianceBasedLikelihoodEstimator(method=self.method) pymare_dset = pymare.Dataset(y=self.inputs_["beta_maps"], v=self.inputs_["varcope_maps"]) est.fit(pymare_dset) est_summary = est.summary() results = { "tau2": est_summary.tau2, "z": est_summary.get_fe_stats()["z"], "p": est_summary.get_fe_stats()["p"], "est": est_summary.get_fe_stats()["est"], } return results
def _fit(self, dataset): sample_sizes = np.array([np.mean(n) for n in self.inputs_["sample_sizes"]]) n_maps = np.tile(sample_sizes, (self.inputs_["beta_maps"].shape[1], 1)).T pymare_dset = pymare.Dataset(y=self.inputs_["beta_maps"], n=n_maps) est = pymare.estimators.SampleSizeBasedLikelihoodEstimator(method=self.method) est.fit(pymare_dset) est_summary = est.summary() results = { "tau2": est_summary.tau2, "z": est_summary.get_fe_stats()["z"], "p": est_summary.get_fe_stats()["p"], "est": est_summary.get_fe_stats()["est"], } return results
def _fit(self, dataset): masker = self.masker or dataset.masker if not isinstance(masker, NiftiMasker): raise ValueError( f"A {type(masker)} mask has been detected. " "Only NiftiMaskers are allowed for this Estimator. " "This is because aggregation, such as averaging values across ROIs, " "will produce invalid results." ) pymare_dset = pymare.Dataset(y=self.inputs_["z_maps"]) est = pymare.estimators.FisherCombinationTest() est.fit_dataset(pymare_dset) est_summary = est.summary() results = { "z": boolean_unmask(est_summary.z.squeeze(), self.inputs_["aggressive_mask"]), "p": boolean_unmask(est_summary.p.squeeze(), self.inputs_["aggressive_mask"]), } return results
(lower_bound, upper_bound), (N_STUDIES + 1, N_STUDIES + 1), color="green", linewidth=3, label="Between-Study 95% CI", ) ax.set_ylim((0, N_STUDIES + 2)) ax.set_xlabel("Mean (95% CI)") ax.set_ylabel("Study") ax.legend() fig.tight_layout() ############################################################################### # Create a Dataset object containing the data # -------------------------------------------- dset = pymare.Dataset(y=y, X=None, v=v, n=n, add_intercept=True) # Here is a dictionary to house results across models results = {} ############################################################################### # Fit models # ----------------------------------------------------------------------------- # When you have ``z`` or ``p``: # # - :class:`pymare.estimators.Stouffers` # - :class:`pymare.estimators.Fishers` # # When you have ``y`` and ``v`` and don't want to estimate between-study variance: # # - :class:`pymare.estimators.WeightedLeastSquares`