def figure_2(cc_objects, epsp_file): fig, axs = plt.subplots(3, sharex=True) colors = ['C{}'.format(i) for i in range(3)] colors = colors[2:] line_length = [30] for i in range(0, 2): spike_times = get_spike_times_for_cc(cc_objects[i], 9) for j in range(cc_objects[i].channelCount): cc_objects[i].setSweep(9, channel=j) axs[i].plot(cc_objects[i].sweepX, cc_objects[i].sweepY) line_offset = [30 + max(cc_objects[i].sweepY)] axs[i].eventplot(np.unique(spike_times), colors=colors, lineoffsets=line_offset, linelengths=line_length) axs[i].hlines(y=min(cc_objects[i].sweepY) - 10, xmin=0.1, xmax=0.6, color="r", linewidth=5) axs[i].hlines(y=min(cc_objects[i].sweepY) - 10, xmin=1.6, xmax=2.1, color="g", linewidth=5) spike_times = get_spike_times_for_epsp(epsp_file, 9) for j in range(epsp_file.channelCount): epsp_file.setSweep(9, channel=j) axs[2].plot(epsp_file.sweepX, epsp_file.sweepY) line_offset = [30 + max(epsp_file.sweepY)] axs[2].eventplot(np.unique(spike_times), colors=colors, lineoffsets=line_offset, linelengths=line_length) axs[2].hlines(y=min(epsp_file.sweepY) - 30, xmin=0.5, xmax=0.75, color="r", linewidth=5) axs[0].set_ylabel(f"V (mV)", fontsize=30) axs[1].set_ylabel(f"V (mV)", fontsize=30) axs[2].set_ylabel(f"V (mV)", fontsize=30) axs[2].set_xlabel(f"Time (s)", fontsize=40) axs[0].tick_params(labelsize=25) axs[1].tick_params(labelsize=25) axs[2].tick_params(labelsize=25) fig.set_size_inches(18.5, 20) plt.show() fig, axs = plt.subplots(2, sharex=True) for i in range(0, 2): spike_times = get_spike_times_for_cc(cc_objects[i], 9) x_d = np.linspace(0, 2.5, 1000) dens = calculate_spike_rate_kernel_smoothing(spike_times, 2.5) axs[i].fill_between(x_d, dens) axs[i].plot(spike_times, np.full_like(spike_times, -0.1), '|k', markeredgewidth=1) axs[0].set_ylabel(f"Spike Rate (HZ)", fontsize=20) axs[1].set_ylabel(f"Spike Rate (HZ)", fontsize=20) axs[1].set_xlabel(f"Time (s)", fontsize=30) axs[0].tick_params(labelsize=25) axs[1].tick_params(labelsize=25) fig.set_size_inches(9.25, 10) plt.show()
def figure_3(cc_object): fig, axs = plt.subplots(3, sharex=True) for i in range(0, 3): spike_times = get_spike_times_for_cc(cc_object, 9) x_d = np.linspace(0, 2.5, 1000) dens = calculate_spike_rate_kernel_smoothing(spike_times, 2.5) axs[i].fill_between(x_d, dens) axs[i].plot(spike_times, np.full_like(spike_times, -0.1), '|k', markeredgewidth=1) if i == 0: indexes = range(1000) maxima = [[x, y] for i, x, y in zip(indexes, x_d, dens) if dens[i - 1] < y > dens[i + 1]] new_b, new_c, new_e = fit_linear(maxima) maxima.append([0, new_c]) print(new_b, new_c) y = x_d * new_b + new_c axs[i].plot(x_d, y, "g") axs[i].scatter([m[0] for m in maxima], [m[1] for m in maxima]) elif i == 1: indexes = range(1000) maximum = [[x, y] for i, x, y in zip(indexes, x_d, dens) if dens[i - 1] < y > dens[i + 1]][0] axs[i].hlines(y=np.mean(dens), color="r", linewidth=2, xmin=0, xmax=2) axs[i].scatter([maximum[0]], [maximum[1]]) axs[0].set_ylabel(f"Spike Rate (HZ)", fontsize=20) axs[1].set_ylabel(f"Spike Rate (HZ)", fontsize=20) axs[2].set_ylabel(f"Spike Rate (HZ)", fontsize=20) axs[2].set_xlabel(f"Time (s)", fontsize=30) axs[0].tick_params(labelsize=25) axs[1].tick_params(labelsize=25) fig.set_size_inches(9.25, 15) plt.show()
def calculate_all_metrics_for_epsp(abf_object): spike_t = [] for sweep in range(abf_object.sweepCount): abf_object.setSweep(sweep) spike_t = spike_t + get_spike_times_for_cc(abf_object) if len(spike_t) > 1: pdf = calculate_spike_rate_kernel_smoothing(spike_t, max(obj.sweepX)) return [calculate_ifc(pdf) * 100, calculate_sfc(pdf)] else: return None
def plot_all_psth(abf_objects, function=False): for i in abf_objects: # neuron_spikes = [] # for j in range(i.sweepCount): # i.setSweep(j) # neuron_spikes = neuron_spikes + get_spike_times_for_cc(i, j) neuron_spikes = get_spike_times_for_cc(i, 9) if len(neuron_spikes) > 1: create_psth(neuron_spikes, max(i.sweepX), function, i.abfFolderPath.split("/")[-1])
def create_nep_plot(abf_objects, plot_number): n_subplots = max([o.sweepCount for o in abf_objects]) * 2 if n_subplots > 2: fig, axs = plt.subplots(int(n_subplots / 2), 2, sharex=True) else: fig, axs = plt.subplots(2, 2, sharex=True) fig.suptitle(f"Plot group {plot_number}", fontsize=16) colors = ['C{}'.format(i) for i in range(3)] colors = colors[2:] line_length = [30] axs[0, 0].title.set_text(re.sub("/home/samp/Granule-Data/", "", abf_objects[0].abfFilePath)) for i in range(abf_objects[0].sweepCount): spike_times = get_spike_times_for_cc(abf_objects[0], i) for j in range(abf_objects[0].channelCount): abf_objects[0].setSweep(i, channel=j) axs[i, 0].plot(abf_objects[0].sweepX, abf_objects[0].sweepY) line_offset = [30 + max(abf_objects[0].sweepY)] axs[i, 0].eventplot(np.unique(spike_times), colors=colors, lineoffsets=line_offset, linelengths=line_length) axs[i, 0].set_ylabel(f"I={abf_objects[0].sweepC[int(len(abf_objects[0].sweepC)/2)]}") axs[i, 0].tick_params(labelsize=15) if len(abf_objects) > 1: axs[0, 1].title.set_text(re.sub("/home/samp/Granule-Data/", "", abf_objects[1].abfFilePath)) for i in range(abf_objects[1].sweepCount): spike_times = get_spike_times_for_cc(abf_objects[1], i) for j in range(abf_objects[1].channelCount): abf_objects[1].setSweep(i, channel=j) axs[i, 1].plot(abf_objects[1].sweepX, abf_objects[1].sweepY) line_offset = [30 + max(abf_objects[1].sweepY)] axs[i, 1].eventplot(spike_times, colors=colors, lineoffsets=line_offset, linelengths=line_length) axs[i, 1].plot(abf_objects[1].sweepX, abf_objects[1].sweepY) axs[i, 1].set_ylabel(f"I={abf_objects[1].sweepC[int(len(abf_objects[1].sweepC) / 2)]}") axs[i, 1].tick_params(labelsize=15) # Add graph annotations axs[int(n_subplots / 2 - 1), 0].set_xlabel("t", fontsize=25) axs[int(n_subplots / 2 - 1), 1].set_xlabel("t (", fontsize=25) fig.set_size_inches(18.5, 20) plt.show()
def get_f_initial(abf_objects): results = [] for object in abf_objects: spike_t = [] for sweep in range(object.sweepCount): object.setSweep(sweep) spike_t = spike_t + get_spike_times_for_cc(object) if len(spike_t) > 1: pdf = calculate_spike_rate_kernel_smoothing( spike_t, max(obj.sweepX)) results.append([np.mean(pdf[:500]), calculate_ifc(pdf) * 100]) return results
def get_frequency_components(abf_objects): freq_components = [] indexes = [i for i in range(0, 1000, 10)] for abf_obj in abf_objects: for sweep in range(abf_obj.sweepCount): abf_obj.setSweep(sweep) spike_times = get_spike_times_for_cc(abf_obj) if len(spike_times) > 1: kds_data = calculate_spike_rate_kernel_smoothing( spike_times, max(abf_obj.sweepX)) kds_data = [kds_data[i] for i in indexes] freq_components.append(kds_data) return freq_components
def calculate_ifc(abf_objs): if len(abf_objs) == 0: return None ifc = 0 f_init = 0 # At 10pa for obj in abf_objs: spikes = get_spike_times_for_cc(obj, 9) f_initial = len([spike for spike in spikes if spike <= 0.6]) / 0.5 f_final = len([spike for spike in spikes if 1.6 < spike]) / 0.5 if "Subject18" in obj.abfFolderPath.split("/")[-1]: x = True if f_initial > 0: ifc += ((f_final - f_initial) / f_initial) * 100 else: ifc += 100 f_init += f_initial return ifc / len(abf_objs), f_init / len(abf_objs)
def check_isi_normality(abf_data, cc=True, cleanup=True): isis = [] if cc: for abf in abf_data: for sweep in range(abf.sweepCount): spike_times = get_spike_times_for_cc(abf, sweep, cleanup=cleanup) isis = isis + get_isi_values(spike_times) else: for abf in abf_data: for sweep in range(abf.sweepCount): abf.setSweep(sweep) spike_times = get_spike_times_for_epsp(abf) isis = isis + get_isi_values(spike_times) sns.histplot(isis) if cc: plt.title(f"ISI normality for cc Data, cleaned: {cleanup}") else: plt.title(f"ISI normality for EPSP Data, cleaned: {cleanup}") plt.show()
def get_all_kdfs(cc_objects, epsp_objects): neuron_names, neurons = sort_objects_by_neuron(cc_objects, epsp_objects) kdfs = [] for n, neuron in enumerate(neurons): kdf = [] sub_cc = [obj for obj in neuron if "CC step" in obj.abfFilePath] if len(sub_cc) == 1: for obj in sub_cc: spikes = get_spike_times_for_cc(obj, 9) if len(spikes) == 0: kdf = np.zeros(shape=(1000)) else: obj.setSweep(9) kdf = calculate_spike_rate_kernel_smoothing( spikes, max(obj.sweepX)) elif len(sub_cc) > 1: print("Problemo") else: print("Double problemo") kdfs.append(kdf) return kdfs
def do_masoli_analysis(epsp_obj, cc_obj): neuron_names = set( [obj.abfFolderPath.split("/")[-1] for obj in epsp_obj + cc_obj]) epsp_results = pd.DataFrame(index=neuron_names, columns=["SFC", "average IFC"]) for obj in epsp_obj: neuron_name = obj.abfFolderPath.split("/")[-1] fresp = [] for sweep in range(obj.sweepCount): obj.setSweep(sweep) spikes = get_spike_times_for_epsp(obj) spikes = [spike for spike in spikes if 0.5 <= spike < 0.75] fresp.append(len(spikes) / 0.25) fresp = np.mean(fresp) epsp_results.loc[neuron_name]["SFC"] = (fresp - 50) / 50 col_names = [i for i in range(-8, 26, 2)] neuron_names = [obj.abfFolderPath.split("/")[-1] for obj in cc_obj] new_neuron_names = [] for i, neuron in enumerate(neuron_names): if neuron in neuron_names[:i]: new_neuron_names.append(neuron + "B") else: new_neuron_names.append(neuron + "A") ifc_results = pd.DataFrame(index=new_neuron_names, columns=col_names) neurons = [] for i, obj in enumerate(cc_obj): # TODO: Fix indexing for negative nums. neuron_name = new_neuron_names[i] for sweep in range(obj.sweepCount): obj.setSweep(sweep) spikes = get_spike_times_for_cc(obj, sweep) f_initial = len([spike for spike in spikes if spike <= 0.5]) / 0.5 f_final = len([spike for spike in spikes if 1.5 < spike]) / 0.5 if f_initial > 0: ifc_results.loc[neuron_name][sweep] = (f_final - f_initial) / f_initial else: ifc_results.loc[neuron_name][sweep] = f_final x = True
def check_whole_rate_normality(abf_data, cc, cleanup): spike_rates = [] if cc: for abf in abf_data: for sweep in range(abf.sweepCount): spike_times = get_spike_times_for_cc(abf, sweep, cleanup=cleanup) spike_rates = spike_rates + calculate_spike_rate( spike_times, 2) else: for abf in abf_data: for sweep in range(abf.sweepCount): abf.setSweep(sweep) spike_times = get_spike_times_for_epsp(abf) spike_rates = spike_rates + calculate_spike_rate( spike_times, 2) sns.histplot(spike_rates) if cc: plt.title(f"Rate normality for cc Data, cleaned: {cleanup}") else: plt.title(f"Rate normality for EPSP Data, cleaned: {cleanup}") plt.show()
def compute_neuron_vectors(cc_objects, epsp_objects): neuron_names, neurons = sort_objects_by_neuron(cc_objects, epsp_objects) vectors = [] for n, neuron in enumerate(neurons): vector = [] sub_cc = [obj for obj in neuron if "CC step" in obj.abfFilePath] sub_epsp = [obj for obj in neuron if "EPSP" in obj.abfFilePath] vector.append(calculate_sfc(sub_epsp)) ifc, f_initial = calculate_ifc(sub_cc) vector.append(ifc) vector.append(f_initial) if len(sub_cc) > 0: B = 0 B_frac = 0 max_v = 0 mean = 0 median = 0 m = 0 c = 0 e = 0 tau = 0 for obj in sub_cc: spikes = get_spike_times_for_cc(obj, 9) if len(spikes) == 0: pass else: obj.setSweep(9) kdf = calculate_spike_rate_kernel_smoothing( spikes, max(obj.sweepX)) x_d = np.linspace(0, max(obj.sweepX), 1000) indexes = range(1000) B += min(kdf[100:-200]) B_frac += calculate_bfrac(x_d, kdf, B) max_v += max(kdf[100:-100]) mean += np.mean(kdf[100:-100]) median += np.median(kdf[100:-100]) maxima = [[x, y] for i, x, y in zip(indexes, x_d, kdf) if kdf[i - 1] < y > kdf[i + 1]] if len(maxima) > 1: new_m, new_c, new_e = fit_linear(maxima) else: new_m, new_c, new_e = 0, 0, 0 m += new_m c += new_c e += new_e tau += get_tau(kdf, x_d) # vector.append(B/len(sub_cc)) vector.append(B_frac / len(sub_cc)) vector.append(max_v / len(sub_cc)) vector.append(mean / len(sub_cc)) # vector.append(median/len(sub_cc)) if (mean / len(sub_cc)) != 0: vector.append((m / len(sub_cc)) / (mean / len(sub_cc))) else: vector.append(0) vector.append(c / len(sub_cc)) # vector.append(e/len(sub_cc)) vector.append(tau / len(sub_cc)) else: vector.append(0) vector.append(0) vector.append(0) vector.append(0) vector.append(0) vector.append(0) # vector.append(0) # vector.append(0) # vector.append(0) vectors.append(vector) return np.array(vectors), list(neuron_names)