def draw_noise(data_files: Dict[int, File], neuron_id: int, params: MotionParams): last_day = max(data_files.keys()) lever = load_mat(data_files[last_day]['response']) neuron_rate = data_files[last_day].attrs['frame_rate'] neurons = common_axis([DataFrame.load(x['spike']) for x in data_files.values()]) good, bad, anti = classify_cells(motion_corr( lever, neurons[-1], neuron_rate, 16000, params), 0.001) amp = list() corrs: Dict[str, List[List[float]]] = {'good': [], 'unrelated': [], 'between': []} for (day_id, data_file), neuron in zip(data_files.items(), neurons): if day_id == last_day: continue lever = load_mat(data_file['response']) corrs['good'].append(_take_triu(noise_autocorrelation(lever, neuron[good], neuron_rate))) corrs['unrelated'].append(_take_triu(noise_autocorrelation(lever, neuron[bad | anti], neuron_rate))) corrs['between'].append(_take_triu(noise_correlation(lever, neuron[good], neuron[bad | anti], neuron_rate))) lever.center_on("motion", **params) neuron_trials = fold_by(neuron, lever, neuron_rate, True) amp.append(neuron_trials.values[np.argwhere(neuron.axes[0] == neuron_id)[0, 0], :, :].max(axis=1)) with Figure(join(project_folder, 'report', 'img', f'noise_corr_{neuron_id}.svg')) as (ax,): day_ids = [x for x in data_files.keys() if x != last_day] for idx, (group_str, group) in enumerate(corrs.items()): ax.errorbar(day_ids, [np.mean(x) for x in group], yerr=[_sem(x) for x in group], color=COLORS[idx], label=group_str) ax2 = ax.twinx() ax2.errorbar(day_ids, [np.mean(x) for x in amp], [_sem(x) for x in amp], color=COLORS[-1]) ax.set_title(str(neuron_id)) ax.legend()
def draw_hierarchy(data_files: Dict[int, File]): neurons = common_axis([DataFrame.load(x['spike']) for x in files.values()]) for (day_id, data_file), neuron in zip(files.items(), neurons): lever = load_mat(data_file['response']) corr_mat = noise_autocorrelation(lever, neuron, data_file.attrs['frame_rate']) with Figure() as (ax,): ax.set_title(f"day-{day_id:02d}") fancy_dendrogram(linkage(corr_mat, 'average'), ax=ax)
def draw_neuron_corr(data_files: Dict[int, File], params: MotionParams, fov_id: str = None): neurons = common_axis([DataFrame.load(x['spike']) for x in data_files.values()]) last_day = max(data_files.keys()) lever = load_mat(data_files[last_day]['response']) neuron_rate = data_files[last_day].attrs['frame_rate'] good, bad, anti = classify_cells(motion_corr( lever, neurons[-1], neuron_rate, 16000, params), 0.001) result_list = list() for (day, data_file), neuron in zip(data_files.items(), neurons): lever.center_on('motion') # type: ignore motion_neurons = fold_by(neuron, lever, neuron_rate, True) result_list.append([reliability(motion_neuron) for motion_neuron in motion_neurons.values]) result = np.array(result_list) with Figure(join(img_folder, ("neuron_corr.svg" if fov_id is None else f"{fov_id}.svg"))) as ax: ax[0].plot(list(data_files.keys()), result[:, good])
def draw_network_graph(data_files: Dict[int, File], params: MotionParams, threshold: int = 16000): """Draw neuron functional connection for each session, with neurons colored by the last session. Args: data_files: {day_id: int, data_file: File} params: classify_cells need ["quiet_var", "window_size", "event_thres", "pre_time"] threshold: threshold for motion_corr, single linked cluster distance """ last_day = data_files[max(data_files.keys())] neurons = common_axis([DataFrame.load(x['spike']) for x in data_files.values()]) neuron_rate = last_day.attrs['frame_rate'] final_corr_mat = noise_autocorrelation(load_mat(last_day['response']), neurons[-1], neuron_rate) categories = classify_cells(motion_corr(last_day, neurons[-1], neuron_rate, threshold, params), 0.001) layout = corr_graph.get_layout(final_corr_mat, neurons[-1].axes[0]) for (day_id, data_file), neuron in zip(data_files.items(), neurons): corr_mat = noise_autocorrelation(load_mat(data_file['response']), neuron, neuron_rate) with Figure(join(img_folder, f"network-day-{day_id:02d}.svg")) as ax: corr_graph.corr_plot(ax[0], corr_mat, categories, neuron.axes[0], layout=layout) print('done')
ax2.errorbar(day_ids, [np.mean(x) for x in amp], [_sem(x) for x in amp], color=COLORS[-1]) ax.set_title(str(neuron_id)) ax.legend() # Cell: Mesuare the inter-cell correlation between trials of typical pushes for single neurons on different days def draw_neuron_corr(data_files: Dict[int, File], params: MotionParams, fov_id: str = None): neurons = common_axis([DataFrame.load(x['spike']) for x in data_files.values()]) last_day = max(data_files.keys()) lever = load_mat(data_files[last_day]['response']) neuron_rate = data_files[last_day].attrs['frame_rate'] good, bad, anti = classify_cells(motion_corr( lever, neurons[-1], neuron_rate, 16000, params), 0.001) result_list = list() for (day, data_file), neuron in zip(data_files.items(), neurons): lever.center_on('motion') # type: ignore motion_neurons = fold_by(neuron, lever, neuron_rate, True) result_list.append([reliability(motion_neuron) for motion_neuron in motion_neurons.values]) result = np.array(result_list) with Figure(join(img_folder, ("neuron_corr.svg" if fov_id is None else f"{fov_id}.svg"))) as ax: ax[0].plot(list(data_files.keys()), result[:, good]) ## actual running common_id = common_axis([DataFrame.load(x['spike']) for x in files.values()])[-1].axes[0] draw_classify_neurons(files[14], common_id) draw_hierarchy(files) draw_stacked_bar(toml.load(join(res_folder, 'cluster.toml'))) # type: ignore neuron_ids = toml.load(join(res_folder, "0304-neurons.toml"))['neuron_id'] draw_noise(files, 27, motion_params) draw_neuron_corr(files, motion_params) ##