sync_funcs.find_nearest( y_positions[position_sorted_indices] * const.POSITION_MULT, rp)[0]) region_lines = np.array(region_lines) tns = tn[position_sorted_indices] plt.imshow(np.flipud(tns), aspect='auto') plt.hlines(y=len(t) - region_lines, xmin=0, xmax=len(tns[0]) - 1, linewidth=3, color='w') plt.vlines(x=int(len(tns[0]) / 2), ymin=0, ymax=len(tns) - 1) i = 0 sv.graph_pane(globals(), 'i', 'tn') time_around_beam_break = 8 index = 0 fig1 = plt.figure(1) fig2 = plt.figure(2) output = None all_indices = np.arange(len(avg_firing_rate_around_suc_trials)) frames_around_beam_break = 120 * time_around_beam_break args = [ all_indices, avg_firing_rate_around_suc_trials, template_info, spike_info, start_pokes_after_delay, frames_around_beam_break, fig1, fig2 ] show_rasters_decrease = fr_funcs.show_rasters_for_live_update
label='Template = {}'.format(str(n))) #labels.append(label) maxima.append((np.argmax(t_s) - 100) * 8.33) avg = np.mean(maxima) var = np.var(maxima) pdf_x = np.linspace(np.min(maxima), np.max(maxima), 100) pdf_y = 1.0 / np.sqrt(2 * np.pi * var) * np.exp(-0.5 * (pdf_x - avg)**2 / var) plt.hist(maxima, 50, density=True) plt.plot(pdf_x, pdf_y, 'k--') plt.title('Mean = {}'.format(str(avg))) ims = np.array(ims) index = 0 x = np.arange(-100 * 8.33, 100 * 8.33, 8.33) sv.graph_pane(globals(), 'index', 'ims', 'x') # </editor-fold> # ------------------------------------------------- # <editor-fold desc=CHECK IF DIFFERENT SETS OF NEURONS ARE CORRELATED WITH SPEED FOR SUCCESSFUL TRIALS VS OTHER PERIODS time_points_of_trial_pokes = np.load( join(poke_folder, 'time_points_of_trial_pokes.npy')) time_points_of_non_trial_pokes = np.load( join(poke_folder, 'time_points_of_non_trial_pokes.npy')) time_points_of_touch_ball = np.load( join(poke_folder, 'time_points_of_touch_ball.npy')) event_dataframes = ns_funcs.load_events_dataframes(events_folder, sync_funcs.event_types)
num_of_events, int(2 * window_timepoints / const.LFP_DOWNSAMPLE_FACTOR))) for i in np.arange(num_of_events): start_imfs = int((events[i] - window_timepoints) / const.LFP_DOWNSAMPLE_FACTOR) end_imfs = int((events[i] + window_timepoints) / const.LFP_DOWNSAMPLE_FACTOR) imfs_around_tp[:, :, i, :] = imfs[:, :, start_imfs:end_imfs] avg_imfs_around_tp = np.mean(imfs_around_tp, axis=2) avg_imfs_around_tp = np.swapaxes(avg_imfs_around_tp, 0, 1) ''' def space(data): return cdf.space_data_factor(data, 2) imf = 0 sv.graph_pane(globals(), 'imf', 'avg_imfs_around_tp', transform_name='space') _ = plt.plot(space(avg_lfps_around_event).T) random_times = np.random.choice( np.arange(2 * window_timepoints, lfps.shape[1] - 2 * window_timepoints, 1), num_of_events) random_triggered_lfps = [] for spike in random_times: random_triggered_lfps.append(lfps[:, spike - window_timepoints:spike + window_timepoints]) random_triggered_lfps = np.array(random_triggered_lfps) random_triggered_lfps_mean = random_triggered_lfps.mean(axis=0) random_triggered_lfps_std = random_triggered_lfps.std(axis=0) # _ = plt.plot(space(random_triggered_lfps_std).T)
large_channels_full = np.arange(int(largest_channel - 30), int(largest_channel + 30)) #plt.plot(mua_template_data[large_channels_full, :].T) spike_data = np.swapaxes( np.reshape(spike_data_one[large_channels_full, :], (len(large_channels_full), spike_time_windows.shape[0], spike_time_windows.shape[1])), 0, 1) spike_data = np.array(spike_data) spike_data_mean = np.mean(spike_data, axis=0) plt.plot(spike_data_mean.transpose()) s = 0 seq_v.graph_pane(globals(), 's', 'spike_data') c = 0 seq_v.graph_pane(globals(), 'c', 'spike_data_mean') # TSNE The 10 PCs of all the channels that are within the group of largest channels according to Kilosort # ------------------------------------------------------------- n_components = 10 principal_components = np.empty( (spike_data.shape[0], spike_data.shape[1] * n_components)) pca = PCA(n_components=n_components) i = 0 for index in range(len(spike_data)): pca.fit(spike_data[index, :, :].transpose()) components = pca.components_.flatten()
def update_trajectory(f): global traj_x global traj_y traj_x = body_positions[:f, 0] traj_y = body_positions[:f, 1] return body_positions[:f, :] traj = None tr.connect_repl_var(globals(), 'frame', 'traj', 'update_trajectory') osv.graph(globals(), 'traj_y', 'traj_x') # ------------------------------------------------- ''' # ------------------------------------------------- # FITTING THE MARKERS TO GET BETTER ESTIMATES OF THE LOW LIKELIHOOD ONES # Fitting 2d surface using multiple markers # DID NOT WORK body_markers_positions = markers.loc[:, markers.columns.get_level_values(1).isin(body_parts)] body_markers_positions = body_markers.loc[:, body_markers.columns.get_level_values(2).isin(['x', 'y'])] t = np.reshape(body_markers_positions.loc[:3605*120-1, :].values, (3605, 120, 6)) sec = 0 im_lev = [0, 50] cm = 'jet' sv.graph_pane(globals(), 'sec', 't')
data1[i] += i data2 = np.random.random(500) # Have a look at them position1 = 0 range1 = 1000 sequence_viewer.graph_range(globals(), 'position1', 'range1', 'data1') position2 = 0 range2 = 10 sequence_viewer.graph_range(globals(), 'position2', 'range2', 'data2') # Also you can view data in panes pane = 0 sequence_viewer.graph_pane(globals(), 'pane', 'data1') # The pane viewer shows te last one or two dimensions of a 2d or 3d data set and itterates over the first one. # Connect two guis def pos1_to_pos2(pos1): if pos1 >= 0 and pos1 <= 4000: return int(pos1 / 10) elif pos1 < 0: return 0 elif pos1 > 4000: return 400 transform.connect_repl_var(globals(), 'position1', 'pos1_to_pos2', 'position2') # Press the Transform to deactivate the function from running and so stop the connection