def vb_index_internal_loop(i0, iN, surf_faces, data, norm, print_progress=False): """Computes the Voigt-Bailey index of vertices in a given range Parameters ---------- i0: integer Index of first vertex to be analysed iN: integer Index of last vertex to be analysed surf_faces: (M, 3) numpy array Faces of the mesh. Used to find the neighborhood of a given vertice data: (M, N) numpy array Data to use the to calculate the VB index. M must math the number of vertices in the mesh norm: string Method of reordering. Possibilities are 'geig', 'unnorm', 'rw' and 'sym' print_progress: boolean Print the current progress of the system Returns ------- loc_result: (N) numpy array Resulting VB index of the indices in range. Will have length iN - i0 """ # Calculate how many vertices we will compute diff = iN - i0 loc_result = np.zeros(diff) for idx in range(diff): #Calculate the real index i = idx + i0 # Get neighborhood and its data # TODO: Make this elegant neighbour_idx = np.array(np.sum(surf_faces == i, 1), np.bool) I = np.unique(surf_faces[neighbour_idx, :]) neighborhood = data[I] if len(neighborhood) == 0: print("Warning: no neighborhood") # Calculate the eigenvalues affinity = m.create_affinity_matrix(neighborhood) _, _, _, eigenvalues, _ = m.spectral_reorder(affinity, norm) normalisation_factor = np.average(eigenvalues[1:]) # Store the result of this run loc_result[idx] = eigenvalues[1]/normalisation_factor if print_progress: global counter global n with counter.get_lock(): counter.value += 1 print("{}/{}".format(counter.value, n)) return loc_result
def vb_cluster_internal_loop(idx_cluster_0, idx_cluster_N, surf_faces, data, cluster_index, norm, print_progress=False): """Computes the Vogt-Bailey index of vertices of given clusters Parameters ---------- idx_cluster_0: integer Number of first cluster to be analysed idx_cluster_N: integer Number of last cluster to be analysed surf_faces: (M, 3) numpy array Faces of the mesh. Used to find the neighborhood of a given vertice data: (M, N) numpy array Data to use the to calculate the VB index. M must math the number of vertices in the mesh cluster_index: (M) numpy array Array containing the cluster which each vertex belongs norm: string Method of reordering. Possibilities are 'geig', 'unnorm', 'rw' and 'sym' print_progress: boolean Print the current progress of the system Returns ------- loc_result: list of pairs of (float, (N) numpy array) Resulting VB index and eigenvectors of the clusters in range. """ # Calculate how many vertices we will compute diff = idx_cluster_N - idx_cluster_0 loc_result = [] cluster_labels = np.unique(cluster_index) for idx in range(diff): #Calculate the real index i = idx + idx_cluster_0 if (cluster_labels[i] == 0): loc_result.append(([], [])) continue # Get neighborhood and its data neighborhood = data[cluster_index == cluster_labels[i]] # Calculate the eigenvalues affinity = m.create_affinity_matrix(neighborhood) _, _, _, eigenvalues, eigenvectors = m.spectral_reorder(affinity, norm) normalisation_factor = sum(eigenvalues) / len(eigenvalues - 1) # Store the result of this run # Warning: It is not true that the eigenvectors will be all the same # size, as the clusters might be of different sizes val = eigenvalues[1] / normalisation_factor vel = eigenvectors[:, 1] loc_result.append((val, vel)) if print_progress: global counter global n with counter.get_lock(): counter.value += 1 if counter.value % 1000 == 0: print("{}/{}".format(counter.value, n)) return loc_result
def vb_hybrid_internal_loop(i0, iN, surf_vertices, brain_mask, data, norm, print_progress=False): """Computes the Vogt-Bailey index of vertices in a given range Parameters ---------- i0: integer Index of first vertex to be analysed iN: integer iN - 1 is the index of the last vertex to be analysed surf_vertices: (M, 3) numpy array Coordinates of vertices of the mesh in voxel space brain_mask: (nRows, nCols, nSlices) numpy array Whole brain mask. Used to mask volumetric data data: (nRows, nCols, nSlices, N) numpy array Volumetric data used to calculate the VB index. N is the number of maps norm: string Method of reordering. Possibilities are 'geig', 'unnorm', 'rw' and 'sym' print_progress: boolean Print the current progress of the system Returns ------- loc_result: (N) numpy array Resulting VB index of the indices in range. Will have length iN - i0 """ # Calculate how many vertices we will compute diff = iN - i0 loc_result = np.zeros(diff) for idx in range(diff): #Calculate the real index i = idx + i0 # Get neighborhood and its data # print(data.shape) try: neighborhood = get_neighborhood(data, surf_vertices[i, :], brain_mask) if len(neighborhood) == 0: print("Warning: no neighborhood") loc_result[idx] = np.nan continue affinity = m.create_affinity_matrix(neighborhood) if affinity.shape[0] > 3: #tr_row, tr_col = np.triu_indices(affinity.shape[0], k=1) # Calculate the second smallest eigenvalue _, _, eigenvalue, _ = m.spectral_reorder(affinity, norm) # return [0] # Store the result of this run loc_result[idx] = eigenvalue #loc_result[idx] = np.mean(affinity[tr_row, tr_col]) else: loc_result[idx] = np.nan except m.TimeSeriesTooShortError as error: raise error except Exception: traceback.print_exc() loc_result[idx] = np.nan if print_progress: global counter global n with counter.get_lock(): counter.value += 1 if counter.value % 1000 == 0: print("{}/{}".format(counter.value, n)) return loc_result