def get_features(self, cluster_id, load_all=False): # Overriden to take into account the sparse structure. # Only keep spikes belonging to the features spike ids. if self.features_spike_ids is not None: # All spikes spike_ids = self._select_spikes(cluster_id) spike_ids = np.intersect1d(spike_ids, self.features_spike_ids) # Relative indices of the spikes in the self.features_spike_ids # array, necessary to load features from all_features which only # contains the subset of the spikes. spike_ids_rel = _index_of(spike_ids, self.features_spike_ids) else: spike_ids = self._select_spikes(cluster_id, self.n_spikes_features if not load_all else None) spike_ids_rel = spike_ids st = self.spike_templates[spike_ids] nc = self.n_channels nfpc = self.n_features_per_channel ns = len(spike_ids) f = _densify(spike_ids_rel, self.all_features, self.features_ind[st, :], self.n_channels) f = np.transpose(f, (0, 2, 1)) assert f.shape == (ns, nc, nfpc) b = Bunch() # Normalize features. m = self.get_feature_lim() f = _normalize(f, -m, m) b.data = f b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_features(self, cluster_id, load_all=False): # Overriden to take into account the sparse structure. # Only keep spikes belonging to the features spike ids. if self.features_spike_ids is not None: # All spikes spike_ids = self._select_spikes(cluster_id) spike_ids = np.intersect1d(spike_ids, self.features_spike_ids) # Relative indices of the spikes in the self.features_spike_ids # array, necessary to load features from all_features which only # contains the subset of the spikes. spike_ids_rel = _index_of(spike_ids, self.features_spike_ids) else: spike_ids = self._select_spikes( cluster_id, self.n_spikes_features if not load_all else None) spike_ids_rel = spike_ids st = self.spike_templates[spike_ids] nc = self.n_channels nfpc = self.n_features_per_channel ns = len(spike_ids) f = _densify(spike_ids_rel, self.all_features, self.features_ind[st, :], self.n_channels) f = np.transpose(f, (0, 2, 1)) assert f.shape == (ns, nc, nfpc) b = Bunch() # Normalize features. m = self.get_feature_lim() f = _normalize(f, -m, m) b.data = f b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_features(self, cluster_id, load_all=False): data = self._select_data(cluster_id, self.all_features, (self.n_spikes_features if not load_all else None), ) m = self.get_feature_lim() data.data = _normalize(data.data.copy(), -m, +m) return data
def get_waveforms(self, cluster_id): data = self._select_data(cluster_id, self.all_waveforms, self.n_spikes_waveforms, ) # Cache the normalized waveforms. m, M = self.get_waveform_lims() data.data = _normalize(data.data, m, M) return [data]
def get_features(self, cluster_id, load_all=False): data = self._select_data( cluster_id, self.all_features, (self.n_spikes_features if not load_all else None), ) m = self.get_feature_lim() data.data = _normalize(data.data.copy(), -m, +m) return data
def get_waveforms(self, cluster_id): data = self._select_data( cluster_id, self.all_waveforms, self.n_spikes_waveforms, ) # Cache the normalized waveforms. m, M = self.get_waveform_lims() data.data = _normalize(data.data, m, M) return [data]
def get_waveforms(self, cluster_id): m, M = self.get_waveform_lims() if self.all_waveforms is not None: # Waveforms. waveforms_b = self._select_data( cluster_id, self.all_waveforms, self.n_spikes_waveforms, ) w = waveforms_b.data # Sparsify. channels = np.nonzero(w.mean(axis=1).mean(axis=0))[0] w = w[:, :, channels] waveforms_b.channels = channels # Normalize. mean = w.mean(axis=1).mean(axis=1) w = w.astype(np.float64) w -= mean[:, np.newaxis, np.newaxis] w = _normalize(w, m, M) waveforms_b.data = w waveforms_b.cluster_id = cluster_id waveforms_b.tag = 'waveforms' else: waveforms_b = None # Find the templates corresponding to the cluster. template_ids = np.nonzero(self.get_cluster_templates(cluster_id))[0] # Templates. templates = self.templates_unw[template_ids] assert templates.ndim == 3 # Masks. masks = self.template_masks[template_ids] assert masks.ndim == 2 assert templates.shape[0] == masks.shape[0] # Find mean amplitude. spike_ids = self._select_spikes(cluster_id, self.n_spikes_waveforms_lim) mean_amp = self.all_amplitudes[spike_ids].mean() # Normalize. # mean = templates.mean(axis=1).mean(axis=1) templates = templates.astype(np.float64).copy() # templates -= mean[:, np.newaxis, np.newaxis] templates *= mean_amp templates *= 2. / (M - m) template_b = Bunch( data=templates, masks=masks, alpha=1., cluster_id=cluster_id, tag='templates', ) if waveforms_b is not None: return [waveforms_b, template_b] else: return [template_b]
def get_background_features(self): k = max(1, int(self.n_spikes // self.n_spikes_background_features)) spike_ids = slice(None, None, k) b = Bunch() b.data = self.all_features[spike_ids] m = self.get_feature_lim() b.data = _normalize(b.data.copy(), -m, +m) b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_waveforms(self, cluster_id): m, M = self.get_waveform_lims() if self.all_waveforms is not None: # Waveforms. waveforms_b = self._select_data( cluster_id, self.all_waveforms, self.n_spikes_waveforms, ) mean = waveforms_b.data.mean(axis=1).mean(axis=1) waveforms_b.data = waveforms_b.data.astype(np.float64) waveforms_b.data -= mean[:, np.newaxis, np.newaxis] waveforms_b.data = _normalize(waveforms_b.data, m, M) else: waveforms_b = None # Find the templates corresponding to the cluster. template_ids = np.nonzero(self.get_cluster_templates(cluster_id))[0] # Templates. templates = self.templates_unw[template_ids] assert templates.ndim == 3 masks = self.template_masks[template_ids] assert masks.ndim == 2 assert templates.shape[0] == masks.shape[0] # Find mean amplitude. spike_ids = self._select_spikes(cluster_id, self.n_spikes_waveforms_lim) mean_amp = self.all_amplitudes[spike_ids].mean() tmp = templates * mean_amp tmp = _normalize(tmp, m, M) n = len(template_ids) template_b = Bunch( spike_ids=template_ids, spike_clusters=cluster_id * np.ones(n), data=tmp, masks=masks, alpha=1., ) if waveforms_b is not None: return [waveforms_b, template_b] else: return [template_b]
def get_waveforms(self, cluster_id): m, M = self.get_waveform_lims() if self.all_waveforms is not None: # Waveforms. waveforms_b = self._select_data(cluster_id, self.all_waveforms, self.n_spikes_waveforms, ) w = waveforms_b.data # Sparsify. channels = np.nonzero(w.mean(axis=1).mean(axis=0))[0] w = w[:, :, channels] waveforms_b.channels = channels # Normalize. mean = w.mean(axis=1).mean(axis=1) w = w.astype(np.float64) w -= mean[:, np.newaxis, np.newaxis] w = _normalize(w, m, M) waveforms_b.data = w waveforms_b.cluster_id = cluster_id waveforms_b.tag = 'waveforms' else: waveforms_b = None # Find the templates corresponding to the cluster. template_ids = np.nonzero(self.get_cluster_templates(cluster_id))[0] # Templates. templates = self.templates_unw[template_ids] assert templates.ndim == 3 # Masks. masks = self.template_masks[template_ids] assert masks.ndim == 2 assert templates.shape[0] == masks.shape[0] # Find mean amplitude. spike_ids = self._select_spikes(cluster_id, self.n_spikes_waveforms_lim) mean_amp = self.all_amplitudes[spike_ids].mean() # Normalize. # mean = templates.mean(axis=1).mean(axis=1) templates = templates.astype(np.float64).copy() # templates -= mean[:, np.newaxis, np.newaxis] templates *= mean_amp templates *= 2. / (M - m) template_b = Bunch(data=templates, masks=masks, alpha=1., cluster_id=cluster_id, tag='templates', ) if waveforms_b is not None: return [waveforms_b, template_b] else: return [template_b]
def get_waveforms(self, cluster_id): m, M = self.get_waveform_lims() if self.all_waveforms is not None: # Waveforms. waveforms_b = self._select_data(cluster_id, self.all_waveforms, self.n_spikes_waveforms, ) mean = waveforms_b.data.mean(axis=1).mean(axis=1) waveforms_b.data = waveforms_b.data.astype(np.float64) waveforms_b.data -= mean[:, np.newaxis, np.newaxis] waveforms_b.data = _normalize(waveforms_b.data, m, M) else: waveforms_b = None # Find the templates corresponding to the cluster. template_ids = np.nonzero(self.get_cluster_templates(cluster_id))[0] # Templates. templates = self.templates_unw[template_ids] assert templates.ndim == 3 masks = self.template_masks[template_ids] assert masks.ndim == 2 assert templates.shape[0] == masks.shape[0] # Find mean amplitude. spike_ids = self._select_spikes(cluster_id, self.n_spikes_waveforms_lim) mean_amp = self.all_amplitudes[spike_ids].mean() tmp = templates * mean_amp tmp = _normalize(tmp, m, M) n = len(template_ids) template_b = Bunch(spike_ids=template_ids, spike_clusters=cluster_id * np.ones(n), data=tmp, masks=masks, alpha=1., ) if waveforms_b is not None: return [waveforms_b, template_b] else: return [template_b]
def get_waveforms(self, cluster_id): data = self._select_data(cluster_id, self.all_waveforms, self.n_spikes_waveforms, batch_size=10, ) # Sparsify the waveforms. w = data.data channels = np.nonzero(w.mean(axis=1).mean(axis=0))[0] # FIX when the waveforms are null because of absent raw data. if not len(channels): # pragma: no cover channels = np.nonzero(self.get_mean_masks(cluster_id) > 0)[0] w = w[:, :, channels] data.channels = channels # Cache the normalized waveforms. m, M = self.get_waveform_lims() data.data = _normalize(w, m, M) data.cluster_id = cluster_id return data