Example #1
0
    def _run(self, recording, output_folder):
        p = self.params

        self.H = hs.HSDetection(self.Probe,
                                file_directory_name=str(output_folder),
                                left_cutout_time=p['left_cutout_time'],
                                right_cutout_time=p['right_cutout_time'],
                                threshold=p['detection_threshold'],
                                to_localize=True,
                                num_com_centers=p['num_com_centers'],
                                maa=p['maa'],
                                ahpthr=p['ahpthr'],
                                out_file_name=p['out_file_name'],
                                decay_filtering=p['decay_filtering'],
                                save_all=p['save_all'],
                                amp_evaluation_time=p['amp_evaluation_time'],
                                spk_evaluation_time=p['spk_evaluation_time'])

        self.H.DetectFromRaw(load=True, tInc=100000)

        sorted_file = str(output_folder / 'HS2_sorted.hdf5')
        if (not self.H.spikes.empty):
            self.C = hs.HSClustering(self.H)
            self.C.ShapePCA(pca_ncomponents=p['pca_ncomponents'],
                            pca_whiten=p['pca_whiten'])
            self.C.CombinedClustering(
                alpha=p['clustering_alpha'],
                cluster_subset=p['clustering_subset'],
                bandwidth=p['clustering_bandwidth'],
                bin_seeding=p['clustering_bin_seeding'],
                n_jobs=p['clustering_n_jobs'],
                min_bin_freq=p['clustering_min_bin_freq'])
        else:
            self.C = hs.HSClustering(self.H)

        print('Saving to', sorted_file)
        self.C.SaveHDF5(sorted_file, sampling=self.Probe.fps)
Example #2
0
    def test_03_run_clustering(self):
        self.H.LoadDetected()
        self.C = hs.HSClustering(self.H)
        self.C.ShapePCA(pca_ncomponents=2, pca_whiten=True)

        self.C.CombinedClustering(
            alpha=4, bandwidth=5.0, bin_seeding=False, n_jobs=4, cluster_subset=1000
        )
        fname = os.path.join(FILEDIR, "sorted.hdf5")
        self.C.SaveHDF5(fname)
        self.assertTrue(os.path.isfile(fname))

        plt.figure()
        self.C.PlotShapes(range(2))
        plt.savefig(os.path.join(FILEDIR, "cl_shapes.png"))
        plt.figure()
        self.C.PlotNeighbourhood(1, radius=6, alpha=0.8)
        plt.savefig(os.path.join(FILEDIR, "cl_neigh.png"))
        plt.figure()
        self.C.PlotAll(invert=True)
        plt.savefig(os.path.join(FILEDIR, "locations_clustered.png"))
Example #3
0
    def _run_from_folder(cls, output_folder, params, verbose):
        import herdingspikes as hs

        recording = load_extractor(output_folder /
                                   'spikeinterface_recording.json')

        p = params

        # Bandpass filter
        if p['filter'] and p['freq_min'] is not None and p[
                'freq_max'] is not None:
            recording = st.bandpass_filter(recording=recording,
                                           freq_min=p['freq_min'],
                                           freq_max=p['freq_max'])

        if p['pre_scale']:
            recording = st.normalize_by_quantile(recording=recording,
                                                 scale=p['pre_scale_value'],
                                                 median=0.0,
                                                 q1=0.05,
                                                 q2=0.95)

        print(
            'Herdingspikes use the OLD spikeextractors with RecordingExtractorOldAPI'
        )
        old_api_recording = RecordingExtractorOldAPI(recording)

        # this should have its name changed
        Probe = hs.probe.RecordingExtractor(
            old_api_recording,
            masked_channels=p['probe_masked_channels'],
            inner_radius=p['probe_inner_radius'],
            neighbor_radius=p['probe_neighbor_radius'],
            event_length=p['probe_event_length'],
            peak_jitter=p['probe_peak_jitter'])

        H = hs.HSDetection(Probe,
                           file_directory_name=str(output_folder),
                           left_cutout_time=p['left_cutout_time'],
                           right_cutout_time=p['right_cutout_time'],
                           threshold=p['detect_threshold'],
                           to_localize=True,
                           num_com_centers=p['num_com_centers'],
                           maa=p['maa'],
                           ahpthr=p['ahpthr'],
                           out_file_name=p['out_file_name'],
                           decay_filtering=p['decay_filtering'],
                           save_all=p['save_all'],
                           amp_evaluation_time=p['amp_evaluation_time'],
                           spk_evaluation_time=p['spk_evaluation_time'])

        H.DetectFromRaw(load=True, tInc=int(p['t_inc']))

        sorted_file = str(output_folder / 'HS2_sorted.hdf5')
        if (not H.spikes.empty):
            C = hs.HSClustering(H)
            C.ShapePCA(pca_ncomponents=p['pca_ncomponents'],
                       pca_whiten=p['pca_whiten'])
            C.CombinedClustering(alpha=p['clustering_alpha'],
                                 cluster_subset=p['clustering_subset'],
                                 bandwidth=p['clustering_bandwidth'],
                                 bin_seeding=p['clustering_bin_seeding'],
                                 n_jobs=p['clustering_n_jobs'],
                                 min_bin_freq=p['clustering_min_bin_freq'])
        else:
            C = hs.HSClustering(H)

        if p['filter_duplicates']:
            uids = C.spikes.cl.unique()
            for u in uids:
                s = C.spikes[C.spikes.cl == u].t.diff(
                ) < p['spk_evaluation_time'] / 1000 * Probe.fps
                C.spikes = C.spikes.drop(s.index[s])

        if verbose:
            print('Saving to', sorted_file)
        C.SaveHDF5(sorted_file, sampling=Probe.fps)