コード例 #1
0
ファイル: test_not_broken.py プロジェクト: jamesjun/HS2
 def setUp(self):
     os.makedirs(FILEDIR, exist_ok=True)
     self.Probe = hs.probe.HierlmannVisapyEmulationProbe(
         DATA, inner_radius=60, neighbor_radius=100)
     self.H = hs.HSDetection(self.Probe,
                             out_file_name=FILENAME,
                             file_directory_name=FILEDIR)
コード例 #2
0
    def _run(self, recording, output_folder):
        recording = recover_recording(recording)
        p = self.params

        if recording.is_filtered and p['filter']:
            print(
                "Warning! The recording is already filtered, but Herding Spikes filter is enabled. You can disable "
                "filters by setting 'filter' parameter to False")

        self.H = hs.HSDetection(self.Probe,
                                file_directory_name=str(output_folder),
                                left_cutout_time=p['left_cutout_time'],
                                right_cutout_time=p['right_cutout_time'],
                                threshold=p['detect_threshold'],
                                to_localize=True,
                                num_com_centers=p['num_com_centers'],
                                maa=p['maa'],
                                ahpthr=p['ahpthr'],
                                out_file_name=p['out_file_name'],
                                decay_filtering=p['decay_filtering'],
                                save_all=p['save_all'],
                                amp_evaluation_time=p['amp_evaluation_time'],
                                spk_evaluation_time=p['spk_evaluation_time'])

        self.H.DetectFromRaw(load=True, tInc=100000)

        sorted_file = str(output_folder / 'HS2_sorted.hdf5')
        if (not self.H.spikes.empty):
            self.C = hs.HSClustering(self.H)
            self.C.ShapePCA(pca_ncomponents=p['pca_ncomponents'],
                            pca_whiten=p['pca_whiten'])
            self.C.CombinedClustering(
                alpha=p['clustering_alpha'],
                cluster_subset=p['clustering_subset'],
                bandwidth=p['clustering_bandwidth'],
                bin_seeding=p['clustering_bin_seeding'],
                n_jobs=p['clustering_n_jobs'],
                min_bin_freq=p['clustering_min_bin_freq'])
        else:
            self.C = hs.HSClustering(self.H)

        if p['filter_duplicates']:
            uids = self.C.spikes.cl.unique()
            for u in uids:
                s = self.C.spikes[self.C.spikes.cl == u].t.diff(
                ) < p['spk_evaluation_time'] / 1000 * self.Probe.fps
                self.C.spikes = self.C.spikes.drop(s.index[s])

        print('Saving to', sorted_file)
        self.C.SaveHDF5(sorted_file, sampling=self.Probe.fps)
コード例 #3
0
    def _run(self, recording, output_folder):
        p = self.params

        self.H = hs.HSDetection(
            self.Probe, file_directory_name=str(output_folder),
            left_cutout_time=p['left_cutout_time'],
            right_cutout_time=p['right_cutout_time'],
            threshold=p['detection_threshold'],
            to_localize=True,
            num_com_centers=p['num_com_centers'],
            maa=p['maa'],
            ahpthr=p['ahpthr'],
            out_file_name=p['out_file_name'],
            decay_filtering=p['decay_filtering'],
            save_all=p['save_all'],
            amp_evaluation_time=p['amp_evaluation_time'],
            spk_evaluation_time=p['spk_evaluation_time']
        )

        self.H.DetectFromRaw(load=True, tInc=1000000)

        sorted_file = str(output_folder / 'HS2_sorted.hdf5')
        if(not self.H.spikes.empty):
            self.C = hs.HSClustering(self.H)
            self.C.ShapePCA(pca_ncomponents=p['pca_ncomponents'],
                            pca_whiten=p['pca_whiten'])
            self.C.CombinedClustering(
                alpha=p['clustering_alpha'],
                cluster_subset=p['clustering_subset'],
                bandwidth=p['clustering_bandwidth'],
                bin_seeding=p['clustering_bin_seeding'],
                n_jobs=p['clustering_n_jobs'],
                min_bin_freq=p['clustering_min_bin_freq']
            )
        else:
            self.C = hs.HSClustering(self.H)

        print('Saving to', sorted_file)
        self.C.SaveHDF5(sorted_file, sampling=self.Probe.fps)
コード例 #4
0
    def _run_from_folder(cls, output_folder, params, verbose):
        import herdingspikes as hs

        recording = load_extractor(output_folder /
                                   'spikeinterface_recording.json')

        p = params

        # Bandpass filter
        if p['filter'] and p['freq_min'] is not None and p[
                'freq_max'] is not None:
            recording = st.bandpass_filter(recording=recording,
                                           freq_min=p['freq_min'],
                                           freq_max=p['freq_max'])

        if p['pre_scale']:
            recording = st.normalize_by_quantile(recording=recording,
                                                 scale=p['pre_scale_value'],
                                                 median=0.0,
                                                 q1=0.05,
                                                 q2=0.95)

        print(
            'Herdingspikes use the OLD spikeextractors with RecordingExtractorOldAPI'
        )
        old_api_recording = RecordingExtractorOldAPI(recording)

        # this should have its name changed
        Probe = hs.probe.RecordingExtractor(
            old_api_recording,
            masked_channels=p['probe_masked_channels'],
            inner_radius=p['probe_inner_radius'],
            neighbor_radius=p['probe_neighbor_radius'],
            event_length=p['probe_event_length'],
            peak_jitter=p['probe_peak_jitter'])

        H = hs.HSDetection(Probe,
                           file_directory_name=str(output_folder),
                           left_cutout_time=p['left_cutout_time'],
                           right_cutout_time=p['right_cutout_time'],
                           threshold=p['detect_threshold'],
                           to_localize=True,
                           num_com_centers=p['num_com_centers'],
                           maa=p['maa'],
                           ahpthr=p['ahpthr'],
                           out_file_name=p['out_file_name'],
                           decay_filtering=p['decay_filtering'],
                           save_all=p['save_all'],
                           amp_evaluation_time=p['amp_evaluation_time'],
                           spk_evaluation_time=p['spk_evaluation_time'])

        H.DetectFromRaw(load=True, tInc=int(p['t_inc']))

        sorted_file = str(output_folder / 'HS2_sorted.hdf5')
        if (not H.spikes.empty):
            C = hs.HSClustering(H)
            C.ShapePCA(pca_ncomponents=p['pca_ncomponents'],
                       pca_whiten=p['pca_whiten'])
            C.CombinedClustering(alpha=p['clustering_alpha'],
                                 cluster_subset=p['clustering_subset'],
                                 bandwidth=p['clustering_bandwidth'],
                                 bin_seeding=p['clustering_bin_seeding'],
                                 n_jobs=p['clustering_n_jobs'],
                                 min_bin_freq=p['clustering_min_bin_freq'])
        else:
            C = hs.HSClustering(H)

        if p['filter_duplicates']:
            uids = C.spikes.cl.unique()
            for u in uids:
                s = C.spikes[C.spikes.cl == u].t.diff(
                ) < p['spk_evaluation_time'] / 1000 * Probe.fps
                C.spikes = C.spikes.drop(s.index[s])

        if verbose:
            print('Saving to', sorted_file)
        C.SaveHDF5(sorted_file, sampling=Probe.fps)