def get_features(self, cluster_id, load_all=False): # Overriden to take into account the sparse structure. # Only keep spikes belonging to the features spike ids. if self.features_spike_ids is not None: # All spikes spike_ids = self._select_spikes(cluster_id) spike_ids = np.intersect1d(spike_ids, self.features_spike_ids) # Relative indices of the spikes in the self.features_spike_ids # array, necessary to load features from all_features which only # contains the subset of the spikes. spike_ids_rel = _index_of(spike_ids, self.features_spike_ids) else: spike_ids = self._select_spikes( cluster_id, self.n_spikes_features if not load_all else None) spike_ids_rel = spike_ids st = self.spike_templates[spike_ids] nc = self.n_channels nfpc = self.n_features_per_channel ns = len(spike_ids) f = _densify(spike_ids_rel, self.all_features, self.features_ind[st, :], self.n_channels) f = np.transpose(f, (0, 2, 1)) assert f.shape == (ns, nc, nfpc) b = Bunch() # Normalize features. m = self.get_feature_lim() f = _normalize(f, -m, m) b.data = f b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_features(self, cluster_id, load_all=False): # Overriden to take into account the sparse structure. # Only keep spikes belonging to the features spike ids. if self.features_spike_ids is not None: # All spikes spike_ids = self._select_spikes(cluster_id) spike_ids = np.intersect1d(spike_ids, self.features_spike_ids) # Relative indices of the spikes in the self.features_spike_ids # array, necessary to load features from all_features which only # contains the subset of the spikes. spike_ids_rel = _index_of(spike_ids, self.features_spike_ids) else: spike_ids = self._select_spikes(cluster_id, self.n_spikes_features if not load_all else None) spike_ids_rel = spike_ids st = self.spike_templates[spike_ids] nc = self.n_channels nfpc = self.n_features_per_channel ns = len(spike_ids) f = _densify(spike_ids_rel, self.all_features, self.features_ind[st, :], self.n_channels) f = np.transpose(f, (0, 2, 1)) assert f.shape == (ns, nc, nfpc) b = Bunch() # Normalize features. m = self.get_feature_lim() f = _normalize(f, -m, m) b.data = f b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_amplitudes(self, cluster_id): spike_ids = self._select_spikes(cluster_id, self.n_spikes_features) d = Bunch() d.spike_ids = spike_ids d.spike_clusters = cluster_id * np.ones(len(spike_ids), dtype=np.int32) d.x = self.spike_times[spike_ids] d.y = self.all_amplitudes[spike_ids] return d
def _select_data(self, cluster_id, arr, n_max=None): spike_ids = self._select_spikes(cluster_id, n_max) b = Bunch() b.data = arr[spike_ids] b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def _select_data(self, cluster_id, arr, n_max=None): spike_ids = self._select_spikes(cluster_id, n_max) b = Bunch() b.data = arr[spike_ids] b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_amplitudes(self, cluster_id): spike_ids = self._select_spikes(cluster_id, self.n_spikes_features) d = Bunch() d.spike_ids = spike_ids d.spike_clusters = cluster_id * np.ones(len(spike_ids), dtype=np.int32) d.x = self.spike_times[spike_ids] d.y = self.all_amplitudes[spike_ids] return d
def get_background_features(self): k = max(1, int(self.n_spikes // self.n_spikes_background_features)) spike_ids = slice(None, None, k) b = Bunch() b.data = self.all_features[spike_ids] b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_background_features(self): k = max(1, int(self.n_spikes // self.n_spikes_background_features)) spike_ids = slice(None, None, k) b = Bunch() b.data = self.all_features[spike_ids] b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_background_features(self): k = max(1, int(self.n_spikes // self.n_spikes_background_features)) spike_ids = slice(None, None, k) b = Bunch() b.data = self.all_features[spike_ids] m = self.get_feature_lim() b.data = _normalize(b.data.copy(), -m, +m) b.spike_ids = spike_ids b.spike_clusters = self.spike_clusters[spike_ids] b.masks = self.all_masks[spike_ids] return b
def get_cluster_pair_features(self, ci, cj): si = self._select_spikes(ci, self.n_spikes_features) sj = self._select_spikes(cj, self.n_spikes_features) ni = self.get_cluster_templates(ci) nj = self.get_cluster_templates(cj) ti = self._get_template_features(si) x0 = np.sum(ti * ni[np.newaxis, :], axis=1) / ni.sum() y0 = np.sum(ti * nj[np.newaxis, :], axis=1) / nj.sum() tj = self._get_template_features(sj) x1 = np.sum(tj * ni[np.newaxis, :], axis=1) / ni.sum() y1 = np.sum(tj * nj[np.newaxis, :], axis=1) / nj.sum() d = Bunch() d.x = np.hstack((x0, x1)) d.y = np.hstack((y0, y1)) d.spike_ids = np.hstack((si, sj)) d.spike_clusters = self.spike_clusters[d.spike_ids] return d
def get_cluster_pair_features(self, ci, cj): si = self._select_spikes(ci, self.n_spikes_features) sj = self._select_spikes(cj, self.n_spikes_features) ni = self.get_cluster_templates(ci) nj = self.get_cluster_templates(cj) ti = self._get_template_features(si) x0 = np.sum(ti * ni[np.newaxis, :], axis=1) / ni.sum() y0 = np.sum(ti * nj[np.newaxis, :], axis=1) / nj.sum() tj = self._get_template_features(sj) x1 = np.sum(tj * ni[np.newaxis, :], axis=1) / ni.sum() y1 = np.sum(tj * nj[np.newaxis, :], axis=1) / nj.sum() d = Bunch() d.x = np.hstack((x0, x1)) d.y = np.hstack((y0, y1)) d.spike_ids = np.hstack((si, sj)) d.spike_clusters = self.spike_clusters[d.spike_ids] return d