Esempio n. 1
0
    def _run_interface(self, runtime):

        beta_images = [nb.load(beta) for beta in self.inputs.betas]
        residuals = [nb.load(r).get_data() for r in self.inputs.residuals]
        design_matrices = [h5load(dm) for dm in self.inputs.design_matrices]

        mean_beta = np.mean([beta.get_data() for beta in beta_images], 0)
        nb.save(
            nb.Nifti1Image(mean_beta, beta_images[0].get_affine(), beta_images[0].get_header()),
            "sandwiched_beta.nii.gz",
        )

        V = np.zeros(mean_beta.shape + (mean_beta.shape[-1],))
        W = np.sum([bottleneck.ss(r, -1) for r in residuals], 0) / (len(residuals) - 1)

        for X, resid in zip(design_matrices, residuals):
            # W = resid.T.dot(resid)
            X_T_inv = np.linalg.pinv(np.dot(X.T, X))
            top_sandwich = np.outer(np.dot(X_T_inv, X.T), W).T.reshape((np.prod(W.shape), X_T_inv.shape[1], X.shape[0]))
            sandwich = np.dot(top_sandwich, np.dot(X, X_T_inv))
            V = V + sandwich.reshape(V.shape)

        V = V / len(design_matrices)

        nb.save(
            nb.Nifti1Image(V, beta_images[0].get_affine(), beta_images[0].get_header()), "sandwiched_variance.nii.gz"
        )

        return runtime
Esempio n. 2
0
    def _run_interface(self, runtime):

        image = nb.load(self.inputs.data)
        data = image.get_data()
        design_matrix = h5load(self.inputs.design_matrix)

        X = design_matrix

        print X.shape, data.shape

        X_T_inv = np.linalg.pinv(np.dot(X.T, X))
        calc_beta = np.dot(X_T_inv, X.T)

        beta = np.dot(calc_beta, data.reshape(np.prod(data.shape[:-1]), data.shape[-1]).T)

        predicted = np.dot(X, beta)
        predicted = predicted.T.reshape((data.shape[:-1] + (X.shape[0],)))

        beta = beta.T.reshape(data.shape[:-1] + (X.shape[1],))
        resid = data - predicted
        ss = bottleneck.ss(resid, -1)

        # ss = resid.T.dot(resid)

        ols_var = np.outer(X_T_inv, ss)

        ols_var = ols_var.T.reshape(data.shape[:-1] + (ols_var.shape[0],))

        nb.save(nb.Nifti1Image(beta, image.get_affine(), image.get_header()), os.path.abspath("betas.nii.gz"))
        nb.save(nb.Nifti1Image(ols_var, image.get_affine(), image.get_header()), os.path.abspath("variances.nii.gz"))
        nb.save(nb.Nifti1Image(resid, image.get_affine(), image.get_header()), os.path.abspath("residuals.nii.gz"))

        return runtime
Esempio n. 3
0
 def fitness(self, aim, a):
     if self.fittness_function=='ssd':
         diff=a-aim[np.newaxis,:]
         return np.sum(diff*diff,axis = 1)
     elif self.fittness_function=='ssd_bn':
         return bn.ss(a-aim[np.newaxis,:])
     elif self.fittness_function=='max':
         diff=a-aim[np.newaxis,:]
         #seems it faster then - np.max(np.abs(diff),axis=1)
         mx=np.max(diff, axis=1)
         mn=np.abs(np.min(diff, axis=1))
         return np.max((mx,mn), axis=0)
     elif self.fittness_function=='abs':
         return np.sum( np.abs(a-aim[np.newaxis,:]), axis=1)
Esempio n. 4
0
 def brute_radius_search(self, v, radius2=None, limit=None):
     v = v.flatten().astype(self._data_dtype)
     v_norm2 = bottleneck.ss(v)  # same as sum(v * v)
     d_norm2 = self.get_dataset('norm2', mmap_mode='r')
     dists = d_norm2 + v_norm2 - 2 * np.dot(self.data, v)
     #assert dists.ndim == 1 and not bottleneck.anynan(dists)
     ids = self.ids
     if radius2:
         mask = (dists < radius2)
         dists = dists[mask]
         ids = ids[mask]
     if limit:
         if limit == 1:
             imin = np.argmin(dists)
             return [(dists[imin], ids[imin])]
         else:
             # limit to the smallest values
             smallest_indices = bottleneck.argpartsort(dists, limit)[:limit]
             dists = dists[smallest_indices]
             ids = ids[smallest_indices]
     order = np.argsort(dists)
     return [(dists[i], ids[i]) for i in order]
Esempio n. 5
0
 def time_ss(self, dtype, shape):
     bn.ss(self.arr)
Esempio n. 6
0
 def time_ss(self, dtype, shape, order, axis):
     bn.ss(self.arr, axis=axis)
Esempio n. 7
0
 def __call__(self, *arg):
     f_res = np.array([self.f(x, *arg) for x in self.x])
     s = (f_res - self.y) / self.y_err
     return bn.ss(s)
Esempio n. 8
0
    def _run_interface(self, runtime):

        data_list = [nb.load(fn).get_data() for fn in self.inputs.data_files]
        onsets = self.inputs.onsets

        paradigm = EventRelatedParadigm(["a"] * len(onsets), onsets)
        frametimes = np.arange(0, data_list[0].shape[-1] * self.inputs.TR, self.inputs.TR)
        X, names = dm.dmtx_light(frametimes, paradigm, drift_model="polynomial", hfcut=128, hrf_model="canonical")

        self.X = X

        X_T_inv = np.linalg.pinv(np.dot(X.T, X))
        calc_beta = np.dot(X_T_inv, X.T)

        # Do OLS
        mean_data = np.mean(data_list, 0)
        self.ols_beta = np.dot(calc_beta, mean_data.reshape(np.prod(mean_data.shape[:-1]), mean_data.shape[-1]).T)
        predicted = np.dot(X, self.ols_beta).T.reshape(mean_data.shape)
        resid = mean_data - predicted
        ss = bottleneck.ss(resid, -1)
        self.ols_var = np.outer(X_T_inv, ss)

        # Create individual residuals for sandwhich:
        self.sss = np.zeros(ss.shape)
        self.residuals = []
        for data in data_list:
            beta = np.dot(calc_beta, data.reshape(np.prod(data.shape[:-1]), mean_data.shape[-1]).T)
            self.predicted = np.dot(X, self.ols_beta).T.reshape(mean_data.shape)
            resid = mean_data - self.predicted
            self.residuals.append(resid)
            self.sss += bottleneck.ss(resid, -1)

        if len(data_list) > 5:
            self.sss = self.sss / (len(data_list) - 1)
        else:
            self.sss = self.sss / len(data_list)

        self.sandwich_var = np.outer(np.dot(calc_beta, calc_beta.T), self.sss) / len(data_list)

        self.contrasts = np.array(self.inputs.contrasts)

        self.residuals = np.array(self.residuals).swapaxes(0, -1)

        self.sandwich_var = self.sandwich_var.T.reshape(mean_data.shape[:-1] + (-1,))

        self.ols_beta = self.ols_beta.T.reshape(mean_data.shape[:-1] + (-1,))
        self.ols_var = self.ols_var.T.reshape(mean_data.shape[:-1] + (-1,))

        if self.inputs.variance_to_use == "ols":
            self.z = (self.ols_beta[:, :, 0] / (np.sqrt(self.ols_var[:, :, 0]) / np.sqrt(len(data_list)))).squeeze()
        else:
            self.z = (
                self.ols_beta[:, :, 0] / (np.sqrt(self.sandwich_var[:, :, 0]) / np.sqrt(len(data_list)))
            ).squeeze()

        self.z = self.z.T.reshape(mean_data.shape[:-1] + (-1,))

        nb.save(nb.Nifti1Image(self.ols_beta, np.identity(4)), "ols_beta.nii.gz")
        nb.save(nb.Nifti1Image(self.ols_var, np.identity(4)), "ols_var.nii.gz")
        nb.save(nb.Nifti1Image(self.sandwich_var, np.identity(4)), "sandwich_var.nii.gz")
        nb.save(nb.Nifti1Image(self.z, np.identity(4)), "z_%s.nii.gz" % self.inputs.variance_to_use)
        nb.save(nb.Nifti1Image(self.residuals, np.identity(4)), "residuals.nii.gz")

        save(self.X, "design_matrix.hdf5")

        return runtime