def run_main(n_sub, input_type, n_samples=200, window=15, peak_height=2): # Average window around peak group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, False) group_data_gs, _, _ = load_data_and_stack(n_sub, input_type, True) global_signal = zscore(compute_global_signal(group_data)) gs_peaks = find_comp_peaks(global_signal, peak_height) gs_selected_peaks = select_peaks(gs_peaks, window, window, group_data.shape[0], n_samples) peak_avg = average_peak_window(gs_selected_peaks, group_data, window, window) peak_avg_gs = average_peak_window(gs_selected_peaks, group_data_gs, window, window) gs_map = compute_gs_map(global_signal, group_data) write_results(peak_avg, peak_avg_gs, global_signal, gs_map[np.newaxis, :], hdr, input_type, zero_mask)
def run_main(n_comps, n_sub, global_signal, rotate, input_type, pca_type, center, shuffle_ts, simulate_var): group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) # Normalize data group_data = zscore(group_data) # If specified, shuffle ts (for null testing) if shuffle_ts: group_data = shuffle_time(group_data) # If specified, simulate null VAR model time series if simulate_var: group_data = var_simulate(group_data, group_data.shape[0]) # If specified, center along rows if center == 'r': group_data -= group_data.mean(axis=1, keepdims=True) if pca_type == 'complex': group_data = hilbert_transform(group_data) pca_output = pca(group_data, n_comps) if rotate is not None: pca_output = rotation(pca_output, group_data, rotate) write_results(input_type, pca_output, rotate, pca_output['loadings'], n_comps, hdr, pca_type, global_signal, zero_mask)
def run_main(input_ts, n_sub, global_signal, input_type, l_window, r_window, peak_thres, return_peak_ts, n_samples=20): # Load time series and find peaks seed_ts = np.loadtxt(input_ts) ts_peaks = find_comp_peaks(seed_ts, peak_thres) selected_peaks = select_peaks(ts_peaks, l_window, r_window, len(seed_ts), n_samples) # Average window around peak group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) if return_peak_ts: peak_avg, peak_ts = average_peak_window(selected_peaks, group_data, l_window, r_window, return_peak_ts) write_results(peak_avg, hdr, input_type, global_signal, zero_mask, peak_ts) else: peak_avg = average_peak_window(selected_peaks, group_data, l_window, r_window) write_results(peak_avg, hdr, input_type, global_signal, zero_mask)
def run_main(n_comps, n_sub, global_signal, ica_type, real_complex, input_type): group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) # Normalize data if ica_type == 'spatial': group_data = zscore(group_data.T) if real_complex == 'complex': group_data = hilbert_transform(group_data.T).T elif ica_type == 'temporal': group_data = zscore(group_data) if real_complex == 'complex': group_data = hilbert_transform(group_data) # Run ICA if real_complex == 'real': unmixing_matrix, ica_comps = ica(group_data, n_comps) elif real_complex == 'complex': unmixing_matrix, _, ica_comps, _ = complex_FastICA(group_data.T, n_components = n_comps) if ica_type == 'spatial': spatial_map = ica_comps.T ts = unmixing_matrix elif ica_type == 'temporal': spatial_map = unmixing_matrix ts = ica_comps write_results(input_type, spatial_map, ts, hdr, global_signal, zero_mask, real_complex)
def run_main(n_sub, n_comps, gradient_algorithm, global_signal, input_type, perc_thresh): group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) group_data = zscore(group_data) affinity_mat = compute_affinity_matrix(group_data, perc_thresh) if gradient_algorithm == 'laplacian': embed = spectral_embed(affinity_mat, n_comps) else: embed = kernel_pca(affinity_mat, n_comps) write_results(embed, hdr, input_type, gradient_algorithm, global_signal, zero_mask)
def run_main(n_sub, global_signal, input_type, parcellation, dynamic_fc): if parcellation & (input_type == 'gifti'): raise Exception('Parcellation time series are saved as .ptseries.nii cifti ' 'files - change to cifti input_type') group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal, parcellation) group_data = zscore(group_data) if dynamic_fc: # computer outer product of time point vector with itself fc_mat = np.apply_along_axis(outer_ltriangle, 1, group_data) else: fc_mat = compute_fc_matrix(group_data) write_results(fc_mat, global_signal, 'fc_matrix')
def run_main(n_comps, n_sub, global_signal, input_type, cov_type, n_pca_comps=100): group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) # Normalize data group_data = zscore(group_data) # Dimension Reduction pca_output = pca(group_data, n_pca_comps) # Estimate HMM hmm_model, state_ts, mean_maps = gmm_hmm(pca_output, n_comps, cov_type) write_results(input_type, [hmm_model, state_ts], mean_maps, n_comps, hdr, global_signal, zero_mask)
def run_main(lh_vertices, rh_vertices, n_sub, global_signal, input_type): if lh_vertices is None and rh_vertices is None: raise Exception('Atleast one vertex index should be supplied') group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) # Normalize data group_data = zscore(group_data) # Combined LH/RH data is concatenated LH then RH - add n_vertices from LH _, _, n_vert_L, _ = pull_gifti_data(hdr) # Ensure chosen vertices are not 'zeroed out' vertices - i.e. no signal cond_1 = all([v in zero_mask for v in lh_vertices]) cond_2 = all([(v + n_vert_L) in zero_mask for v in rh_vertices]) if not cond_1 or not cond_2: raise Exception('The vertex supplied contains all zeros') seed_ts = compute_seed_ts(lh_vertices, rh_vertices, group_data, zero_mask, n_vert_L) fc_map = compute_fc_map(seed_ts, group_data) write_results(seed_ts, fc_map, global_signal, hdr, input_type, zero_mask)
def run_main(lh_vertices, rh_vertices, seed_ts_fp, n_clusters, norm, n_sub, global_signal, input_type, perc_thres, window_avg, window_size): vertex_input = (lh_vertices is not None) or (rh_vertices is not None) if not vertex_input and seed_ts_fp is None: raise Exception( 'Atleast one vertex index or seed ts should be supplied') if vertex_input and seed_ts_fp is not None: raise Exception('Either vertex indices or seed ts should be supplied') group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) # Normalize data group_data = zscore(group_data) if vertex_input: # Combined LH/RH data is concatenated LH then RH - add n_vertices from LH _, _, n_vert_L, _ = pull_gifti_data(hdr) # Ensure chosen vertices are not 'zeroed out' vertices - i.e. no signal cond_1 = all([v in zero_mask for v in lh_vertices]) cond_2 = all([(v + n_vert_L) in zero_mask for v in rh_vertices]) if not cond_1 or not cond_2: raise Exception('The vertex supplied contains all zeros') seed_ts = compute_seed_ts(lh_vertices, rh_vertices, group_data, zero_mask, n_vert_L) else: seed_ts = np.loadtxt(seed_ts_fp) selected_tps, selected_maps = get_suprathreshold_maps( seed_ts, group_data, perc_thres) cluster_centroid, cluster_indx = cluster_maps(selected_maps, norm, n_clusters) if window_avg: cluster_win_avgs = compute_window_average(group_data, selected_tps, cluster_indx, n_clusters, window_size) else: cluster_win_avgs = None write_results(cluster_centroid, cluster_indx, selected_tps, cluster_win_avgs, window_avg, norm, vertex_input, global_signal, hdr, input_type, zero_mask)
def run_main(input_type, global_signal, n_sub): group_data, hdr, zero_mask = load_data_and_stack(n_sub, input_type, global_signal) lag_results = run_lag_projection(group_data) write_results(input_type, lag_results, lag_results[0][np.newaxis, :], hdr, global_signal, zero_mask)