def template_extract_waveforms(ctx, params_path): # pragma: no cover """Extract spike waveforms.""" from phylib.io.model import load_model model = load_model(params_path) model.save_spike_waveforms() model.close()
def convert(self, out_path, force=False, label='', ampfactor=1): """Convert from KS/phy format to ALF.""" logger.info("Converting dataset to ALF.") self.out_path = Path(out_path) self.label = label self.ampfactor = ampfactor if self.out_path.resolve() == self.dir_path.resolve(): raise IOError( "The source and target directories cannot be the same.") if not self.out_path.exists(): self.out_path.mkdir() with tqdm(desc="Converting to ALF", total=95) as bar: self.copy_files(force=force) bar.update(10) self.make_spike_times_amplitudes() bar.update(10) self.make_cluster_objects() bar.update(10) self.make_channel_objects() bar.update(5) self.make_depths() bar.update(20) self.make_template_object() bar.update(30) self.rm_files() bar.update(10) self.rename_with_label() # Return the TemplateModel of the converted ALF dataset if the params.py file exists. params_path = self.out_path / 'params.py' if params_path.exists(): return load_model(params_path)
def merge(self): """Merge the probes data and return a TemplateModel instance of the merged data.""" with tqdm(desc="Merging", total=100) as bar: self.write_params() self.write_probe_desc() bar.update(10) self.write_spike_times() bar.update(10) self.write_spike_data() bar.update(10) self.write_spike_clusters() bar.update(10) self.write_cluster_data() bar.update(10) self.write_channel_data() bar.update(10) self.write_channel_positions() bar.update(10) self.write_templates() bar.update(10) self.write_template_data() bar.update(10) self.write_misc() bar.update(10) return load_model(self.out_dir / 'params.py')
def get_spike_waveforms(self, units=None): from phylib.io.model import load_model from phylib.utils.color import selected_cluster_color waveforms = [] if units is None: units = self.select_units() for rec_num, recording in enumerate(self.files): paramspy = self.processed / f'sorted_{rec_num}' / 'params.py' if not paramspy.exists(): raise PixelsError(f"{self.name}: params.py not found") model = load_model(paramspy) rec_forms = {} for unit in units[rec_num]: # get the waveforms from only the best channel spike_ids = model.get_cluster_spikes(unit) best_chan = model.get_cluster_channels(unit)[0] u_waveforms = model.get_waveforms(spike_ids, [best_chan]) if u_waveforms is None: raise PixelsError( f"{self.name}: unit {unit} - waveforms not read") rec_forms[unit] = pd.DataFrame(np.squeeze(u_waveforms).T) waveforms.append(pd.concat(rec_forms, axis=1)) df = pd.concat(waveforms, axis=1, keys=range(len(self.files)), names=['rec_num', 'unit', 'spike']) # convert indexes to ms rate = 1000 / int(self.spike_meta[rec_num]['imSampRate']) df.index = df.index * rate return df
def template_extract_waveforms(ctx, params_path, n_spikes_per_cluster, nc=None): # pragma: no cover """Extract spike waveforms.""" from phylib.io.model import load_model model = load_model(params_path) model.save_spikes_subset_waveforms( max_n_spikes_per_template=n_spikes_per_cluster, max_n_channels=nc) model.close()
def template_gui(params_path, **kwargs): # pragma: no cover """Launch the Template GUI.""" # Create a `phy.log` log file with DEBUG level. p = Path(params_path) dir_path = p.parent _add_log_file(dir_path / 'phy.log') create_app() controller = TemplateController(model=load_model(params_path), dir_path=dir_path, **kwargs) gui = controller.create_gui() gui.show() run_app() gui.close() controller.model.close()
def cli_alf_convert(ctx, subdirs, out_dir): """Convert an ephys dataset into ALF. If several directories are specified, it is assumed that each directory contains the data for one probe of the same recording.""" from phylib.io.alf import EphysAlfCreator from phylib.io.merge import Merger from phylib.io.model import load_model out_dir = Path(out_dir) if len(subdirs) >= 2: # Merge in the `merged` subdirectory inside the output directory. m = Merger(subdirs, out_dir / '_tmp_merged') model = m.merge() else: model = load_model(Path(subdirs[0]) / 'params.py') c = EphysAlfCreator(model) c.convert(out_dir)
def phy_model_from_ks2_path(ks2_path): params_file = ks2_path.joinpath('params.py') if params_file.exists(): m = model.load_model(params_file) else: meta_file = next(ks2_path.rglob('*.ap.meta'), None) if meta_file and meta_file.exists(): meta = spikeglx.read_meta_data(meta_file) fs = spikeglx._get_fs_from_meta(meta) nch = (spikeglx._get_nchannels_from_meta(meta) - len(spikeglx._get_sync_trace_indices_from_meta(meta))) else: fs = 30000 nch = 384 m = model.TemplateModel(dir_path=ks2_path, dat_path=[], sample_rate=fs, n_channels_dat=nch) return m
def test_probe_merge_1(tempdir): out_dir = tempdir / 'merged' # Create two identical datasets. probe_names = ('probe_left', 'probe_right') for name in probe_names: (tempdir / name).mkdir(exist_ok=True, parents=True) _make_dataset(tempdir / name, param='dense', has_spike_attributes=False) subdirs = [tempdir / name for name in probe_names] # Merge them. m = Merger(subdirs, out_dir) single = load_model(tempdir / probe_names[0] / 'params.py') # Test the merged dataset. merged = m.merge() for name in ('n_spikes', 'n_channels', 'n_templates'): assert getattr(merged, name) == getattr(single, name) * 2 assert merged.sample_rate == single.sample_rate
def template_gui(params_path, **kwargs): # pragma: no cover """Launch the Template GUI.""" # Create a `phy.log` log file with DEBUG level. p = Path(params_path) dir_path = p.parent _add_log_file(dir_path / 'phy.log') model = load_model(params_path) # Automatically export spike waveforms when using compressed raw ephys. if model.spike_waveforms is None and isinstance(model.traces, MtscompEphysReader): # TODO: customizable values below. model.save_spikes_subset_waveforms( max_n_spikes_per_template=500, max_n_channels=16) create_app() controller = TemplateController(model=model, dir_path=dir_path, **kwargs) gui = controller.create_gui() gui.show() run_app() gui.close() controller.model.close()
def test_probe_merge_2(tempdir): out_dir = tempdir / 'merged' # Create two identical datasets. probe_names = ('probe_left', 'probe_right') for name in probe_names: (tempdir / name).mkdir(exist_ok=True, parents=True) _make_dataset(tempdir / name, param='dense', has_spike_attributes=False) subdirs = [tempdir / name for name in probe_names] # Add small shift in the spike times of the second probe. single = load_model(tempdir / probe_names[0] / 'params.py') st_path = tempdir / 'probe_right/spike_times.npy' np.save(st_path, single.spike_samples + 1) # make amplitudes unique and growing so they can serve as key and sorting indices single.amplitudes = np.linspace(5, 15, single.n_spikes) # single.spike_clusters[single.spike_clusters == 0] = 12 for m, subdir in enumerate(subdirs): np.save(subdir / 'amplitudes.npy', single.amplitudes + 20 * m) np.save(subdir / 'spike_clusters.npy', single.spike_clusters) # Merge them. m = Merger(subdirs, out_dir) merged = m.merge() # Test the merged dataset. for name in ('n_spikes', 'n_channels', 'n_templates'): assert getattr(merged, name) == getattr(single, name) * 2 assert merged.sample_rate == single.sample_rate # Check the spikes. single = load_model(tempdir / probe_names[0] / 'params.py') def test_merged_single(merged, merged_original_amps=None): if merged_original_amps is None: merged_original_amps = merged.amplitudes _, im1, i1 = np.intersect1d(merged_original_amps, single.amplitudes, return_indices=True) _, im2, i2 = np.intersect1d(merged_original_amps, single.amplitudes + 20, return_indices=True) # intersection spans the full vector assert i1.size + i2.size == merged.amplitudes.size # test spikes assert np.allclose(merged.spike_times[im1], single.spike_times[i1]) assert np.allclose(merged.spike_times[im2], single.spike_times[i2] + 4e-5) # test clusters assert np.allclose(merged.spike_clusters[im2], single.spike_clusters[i2] + 64) assert np.allclose(merged.spike_clusters[im1], single.spike_clusters[i1]) # test templates assert np.all(merged.spike_templates[im1] - single.spike_templates[i1] == 0) assert np.all(merged.spike_templates[im2] - single.spike_templates[i2] == 64) # test probes assert np.all(merged.channel_probes == np.r_[single.channel_probes, single.channel_probes + 1]) assert np.all(merged.templates_channels[merged.templates_probes == 0] < single.n_channels) assert np.all(merged.templates_channels[merged.templates_probes == 1] >= single.n_channels) spike_probes = merged.templates_probes[merged.spike_templates] assert np.all(merged_original_amps[spike_probes == 0] <= 15) assert np.all(merged_original_amps[spike_probes == 1] >= 20) np.all(merged.sparse_templates.data[:64, :, 0:32] == single.sparse_templates.data) # Convert into ALF and load. alf = EphysAlfCreator(merged).convert(tempdir / 'alf') test_merged_single(merged) test_merged_single(alf, merged_original_amps=merged.amplitudes) # specific test channel ids only for ALF merge dataset: the raw indices are still individual # file indices, the merged channel mapping is in `channels._phy_ids.npy` chid = np.load(tempdir.joinpath('alf', 'channels.rawInd.npy')) assert np.all(chid == np.r_[single.channel_mapping, single.channel_mapping]) out_files = list(tempdir.joinpath('alf').glob('*.*')) cl_shape = [np.load(f).shape[0] for f in out_files if f.name.startswith('clusters.') and f.name.endswith('.npy')] sp_shape = [np.load(f).shape[0] for f in out_files if f.name.startswith('spikes.')] ch_shape = [np.load(f).shape[0] for f in out_files if f.name.startswith('channels.')] assert len(set(cl_shape)) == 1 assert len(set(sp_shape)) == 1 assert len(set(ch_shape)) == 1
def template_describe(params_path): """Describe a template dataset.""" model = load_model(params_path) model.describe() model.close()
def test_template_describe(qtbot, tempdir): model = load_model( _make_dataset(tempdir, param='dense', has_spike_attributes=False)) with captured_output() as (stdout, stderr): template_describe(model.dir_path / 'params.py') assert '314' in stdout.getvalue()