def getSortedTimes(dirName, chanGroup): dataio = DataIO(dirname=dirName) dataio.load_catalogue(chan_grp=chanGroup) catalogueconstructor = CatalogueConstructor(dataio=dataio) sample_rate = dataio.sample_rate # Just initialize sample rate, will set later unitTimes = np.empty(dataio.nb_segment, dtype=object) for j in range(dataio.nb_segment): idd = {} times = {} try: # List of all cluster labels cluster_ids = np.array([i for i in catalogueconstructor.cluster_labels]) # List of all detected peaks by cluster ID clusters = np.array([i[1] for i in dataio.get_spikes(j)]) spike_times = np.array([i[0] for i in dataio.get_spikes(j)]) except: cluster_ids = np.array([]) clusters = np.array([]) spike_times = np.array([]) for i in cluster_ids: idd[i] = np.argwhere(clusters == i) for i in cluster_ids: times[i] = spike_times[idd[i]]/sample_rate mx = np.max([times[i].size for i in times.keys()]) for i in times.keys(): times[i].resize(mx + 1, 1) timesArray = np.array([times[i] for i in times.keys()]) timesArray = np.roll(timesArray, 1) timesArray[:, 0, :] = np.array(list(times.keys())).reshape(timesArray.shape[0], 1) timesArray = np.transpose(timesArray) unitTimes[j] = timesArray[0] return unitTimes
def run_peeler(dirname): dataio = DataIO(dirname=dirname) initial_catalogue = dataio.load_catalogue(chan_grp=0) peeler = Peeler(dataio) peeler.change_params(catalogue=initial_catalogue) t1 = time.perf_counter() peeler.run() t2 = time.perf_counter() print('peeler.run', t2 - t1) print() for seg_num in range(dataio.nb_segment): spikes = dataio.get_spikes(seg_num) print('seg_num', seg_num, 'nb_spikes', spikes.size)
def export_spikes(dirname, array_idx, chan_grp): print('Exporting ch %d' % chan_grp) data = { 'array': [], 'electrode': [], 'cell': [], 'segment': [], 'time': [] } array = cfg['arrays'][array_idx] dataio = DataIO(dirname=dirname, ch_grp=chan_grp) catalogue = dataio.load_catalogue(chan_grp=chan_grp) dataio._open_processed_data(ch_grp=chan_grp) clusters = catalogue['clusters'] for seg_num in range(dataio.nb_segment): spikes = dataio.get_spikes(seg_num=seg_num, chan_grp=chan_grp) spike_labels = spikes['cluster_label'].copy() for l in clusters: mask = spike_labels == l['cluster_label'] spike_labels[mask] = l['cell_label'] spike_indexes = spikes['index'] for (index, label) in zip(spike_indexes, spike_labels): if label >= 0: data['array'].append(array) data['electrode'].append(chan_grp) data['cell'].append(label) data['segment'].append(seg_num) data['time'].append(index) dataio.flush_processed_signals(seg_num=seg_num, chan_grp=chan_grp) df = pd.DataFrame( data, columns=['array', 'electrode', 'cell', 'segment', 'time']) df.to_csv(os.path.join(dirname, '%s_%d_spikes.csv' % (array, chan_grp)), index=False)
def make_animation(): """ Good example between 1.272 1.302 because collision """ dataio = DataIO(dirname=dirname) catalogue = dataio.load_catalogue(chan_grp=0) clusters = catalogue['clusters'] sr = dataio.sample_rate # also a good one a 11.356 - 11.366 t1, t2 = 1.272, 1.295 i1, i2 = int(t1 * sr), int(t2 * sr) spikes = dataio.get_spikes() spike_times = spikes['index'] / sr keep = (spike_times >= t1) & (spike_times <= t2) spikes = spikes[keep] print(spikes) sigs = dataio.get_signals_chunk(i_start=i1, i_stop=i2, signal_type='processed') sigs = sigs.copy() times = np.arange(sigs.shape[0]) / dataio.sample_rate def plot_spread_sigs(sigs, ax, ratioY=0.02, **kargs): #spread signals sigs2 = sigs * ratioY sigs2 += np.arange(0, len(channels))[np.newaxis, :] ax.plot(times, sigs2, **kargs) ax.set_ylim(-0.5, len(channels) - .5) ax.set_xticks([]) ax.set_yticks([]) residuals = sigs.copy() local_spikes = spikes.copy() local_spikes['index'] -= i1 #~ fig, ax = plt.subplots() #~ plot_spread_sigs(sigs, ax, color='k') num_fig = 0 fig_pred, ax_predictions = plt.subplots() ax_predictions.set_title('All detected templates from catalogue') fig, ax = plt.subplots() plot_spread_sigs(residuals, ax, color='k', lw=2) ax.set_title('Initial filtered signals with spikes') fig.savefig('../img/peeler_animation_sigs.png') fig.savefig('png/fig{}.png'.format(num_fig)) num_fig += 1 for i in range(local_spikes.size): label = local_spikes['cluster_label'][i] color = clusters[clusters['cluster_label'] == label]['color'][0] color = int32_to_rgba(color, mode='float') pred = make_prediction_signals(local_spikes[i:i + 1], 'float32', (i2 - i1, len(channels)), catalogue) fig, ax = plt.subplots() plot_spread_sigs(residuals, ax, color='k', lw=2) plot_spread_sigs(pred, ax, color=color, lw=1.5) ax.set_title('Dected spike label {}'.format(label)) fig.savefig('png/fig{}.png'.format(num_fig)) num_fig += 1 residuals -= pred plot_spread_sigs(pred, ax_predictions, color=color, lw=1.5) fig, ax = plt.subplots() plot_spread_sigs(residuals, ax, color='k', lw=2) plot_spread_sigs(pred, ax, color=color, lw=1, ls='--') ax.set_title('New residual after substraction') fig.savefig('png/fig{}.png'.format(num_fig)) num_fig += 1 fig_pred.savefig('png/fig{}.png'.format(num_fig)) num_fig += 1