def dummy_test(infile, expfile): # load input test data ifile = open(infile, "br") idic = pickle.load(ifile) ifile.close() surf = {} if "tri" in idic.keys(): surf["tri"] = idic["tri"] if "lat" in idic.keys(): surf["lat"] = idic["lat"] # run mesh_edges out_edge = mesh_edges(surf) # load expected outout data efile = open(expfile, "br") expdic = pickle.load(efile) efile.close() exp_edge = expdic["edg"] testout = [] comp = np.allclose(out_edge, exp_edge, rtol=1e-05, equal_nan=True) testout.append(comp) assert all(flag == True for (flag) in testout)
def test_nifti_input(): nifti = nib.load( tflow.get("MNI152Lin", resolution="02", desc="brain", suffix="mask")) edg = mesh_edges(nifti) assert edg.shape[1] == 2 assert np.amax(edg) <= nifti.get_data().sum() - 1
def get_meshedge_output(surf, foutname): """Runs mesh_edges and returns all relevant output.""" # run mesh_edges surf_out = {} surf_out["edg"] = mesh_edges(surf) with open(foutname, "wb") as handle: pickle.dump(surf_out, handle, protocol=4) # return
def _unmask(self) -> None: """Changes all masked parameters to their input dimensions.""" simple_unmask_parameters = ["t", "coef", "SSE", "r", "ef", "sd", "dfs"] for key in simple_unmask_parameters: attr = getattr(self, key) if attr is not None: setattr(self, key, undo_mask(attr, self.mask, axis=1)) # slm.resl unmask if self.resl is not None: edges = mesh_edges(self.surf) _, idx = _mask_edges(edges, self.mask) self.resl = undo_mask(self.resl, idx, axis=0)
def generate_test_data(): np.random.seed(0) # generate the parameters tri = np.random.randint(1, int(50), size=(100, 3)) coord = np.random.rand(3, 50) edg = mesh_edges({"tri": tri}) n_edges = edg.shape[0] n_vertices = int(tri.shape[0]) cluster_threshold = np.random.rand() mygrid = [ { "num_t": [1, 2, 3], "k": [1, 2, 3], "df": [1, [1, 1]], "mask": [False, True], "reselspvert": [None, True], }, ] myparamgrid = ParameterGrid(mygrid) # Generate data. test_num = 0 for params in myparamgrid: I = { "tri": tri, "edg": edg, "thresh": cluster_threshold, "t": np.random.random_sample((params["num_t"], n_vertices)), "resl": np.random.random_sample((n_edges, 1)), "k": params["k"], "df": params["df"], "coord": coord, } if params["mask"] is True: I["mask"] = np.random.choice(a=[False, True], size=(n_vertices)) else: I["mask"] = np.ones((n_vertices), dtype=bool) if params["reselspvert"] is True: I["reselspvert"] = np.random.rand(n_vertices) else: I["reselspvert"] = None # Here we go: generate slm & run peak_clus & save in-out slm = generate_random_slm(I) D = generate_peak_clus_out(slm, I) test_num += 1 params2files(I, D, test_num)
def _compute_resls(self, Y): """Computes the sum over observations of squares of differences of normalized residuals along each edge. Parameters ---------- Y : numpy.array Response variable residual matrix. Returns ------- numpy.array Sum over observations of squares of differences of normalized residuals along each edge. dict Dictionary containing the mesh connections in either triangle or lattice format. The dictionary's sole key is 'tri' for triangle connections or 'lat' for lattice connections. """ if isinstance(self.surf, BSPolyData): mesh_connections = {"tri": np.array(get_cells(self.surf)) + 1} else: key = "tri" if "tri" in self.surf else "lat" mesh_connections = {key: self.surf[key]} edges = mesh_edges(self.surf, self.mask) n_edges = edges.shape[0] Y = np.atleast_3d(Y) resl = np.zeros((n_edges, Y.shape[2])) for j in range(Y.shape[2]): normr = np.sqrt(self.SSE[((j + 1) * (j + 2) // 2) - 1]) for i in range(Y.shape[0]): u = Y[i, :, j] / normr resl[:, j] += np.diff(u[edges], axis=1).ravel()**2 return resl, mesh_connections