n_smooth_samples = np.floor(len(rates_all) / d_idx).astype(int) sm_rates = np.zeros((n_smooth_samples, nCells_tot)) for i in range(n_smooth_samples): si = i * d_idx ei = (i + 1) * d_idx sm_rates[i] = np.mean(rates_all[si:ei], axis=0) results = {'session': session, 'h0': [], 'h1': [], 'h2': []} # if greater than 10 cells, dim reduce to 10 dims using Isomap fit_dim = 10 dr_method = 'iso' n_neighbors = 5 dim_red_params = {'n_neighbors': n_neighbors, 'target_dim': fit_dim} if nCells_tot > 10: rates = run_dim_red(sm_rates, params=dim_red_params, method=dr_method) else: rates = sm_rates # threshold out outlier points with low neighborhood density if thrsh: # a) find number of neighbors of each point within radius of 1st percentile of all # pairwise dist. dist = pdist(rates, 'euclidean') rad = np.percentile(dist, 1) neigh = neighbors.NearestNeighbors() neigh.fit(rates) num_nbrs = np.array( map(len, neigh.radius_neighbors(X=rates, radius=rad, return_distance=False)))
rate_params = {'dt': dt_kernel, 'sigma': sigma} method = 'iso' n_neighbors = 5 dim_red_params = {'n_neighbors': n_neighbors, 'target_dim': target_dim} to_plot = True session_rates = spike_counts(session, rate_params, count_type='rate', anat_region='ADn') t0 = time.time() if condition == 'solo': counts, tmp_angles = session_rates.get_spike_matrix(state) sel_counts = counts[:desired_nSamples] proj = run_dim_red(sel_counts, params=dim_red_params, method=method) to_save = { 'seed': sd, state: proj, 'meas_angles': tmp_angles[:desired_nSamples] } fname = '%s_%s_kern_%dms_sigma_%dms_binsep_%s_embed_%s_%ddims_%dneighbors_%d.p' % ( session, area, sigma * 1000, dt_kernel * 1000, state, method, target_dim, n_neighbors, sd) elif condition == 'joint': counts1, _ = session_rates.get_spike_matrix(state) counts2, _ = session_rates.get_spike_matrix(state2) print('Counts for each ', len(counts1), len(counts2)) nSamples = min(len(counts1), len(counts2), desired_nSamples) print('nSamples = ', nSamples) sel1 = counts1[:nSamples]