def __init__(self, tempdir): np.random.seed(42) self.tmp_dir = tempdir p = Path(self.tmp_dir) self.ns = 100 self.nsamp = 25 self.ncmax = 42 self.nc = 10 self.nt = 5 self.ncd = 1000 np.save(p / 'spike_times.npy', .01 * np.cumsum(nr.exponential(size=self.ns))) np.save(p / 'spike_clusters.npy', nr.randint(low=1, high=self.nt, size=self.ns)) shutil.copy(p / 'spike_clusters.npy', p / 'spike_templates.npy') np.save(p / 'amplitudes.npy', nr.uniform(low=0.5, high=1.5, size=self.ns)) np.save(p / 'channel_positions.npy', np.c_[np.arange(self.nc), np.zeros(self.nc)]) np.save(p / 'templates.npy', np.random.normal(size=(self.nt, 50, self.nc))) np.save(p / 'similar_templates.npy', np.tile(np.arange(self.nt), (self.nt, 1))) np.save(p / 'channel_map.npy', np.c_[np.arange(self.nc)]) np.save(p / 'channel_probe.npy', np.zeros(self.nc)) np.save(p / 'whitening_mat.npy', np.eye(self.nc, self.nc)) np.save(p / '_phy_spikes_subset.channels.npy', np.zeros([self.ns, self.ncmax])) np.save(p / '_phy_spikes_subset.spikes.npy', np.zeros([self.ns])) np.save(p / '_phy_spikes_subset.waveforms.npy', np.zeros([self.ns, self.nsamp, self.ncmax])) _write_tsv_simple(p / 'cluster_group.tsv', 'group', { 2: 'good', 3: 'mua', 5: 'noise' }) _write_tsv_simple( p / 'cluster_Amplitude.tsv', field_name='Amplitude', data={str(n): np.random.rand() * 120 for n in np.arange(self.nt)}) with open(p / 'probes.description.txt', 'w+') as fid: fid.writelines(['label\n']) # Raw data self.dat_path = p / 'rawdata.npy' np.save(self.dat_path, np.random.normal(size=(self.ncd, self.nc))) # LFP data. lfdata = (100 * np.random.normal(size=(1000, self.nc))).astype( np.int16) with (p / 'mydata.lf.bin').open('wb') as f: lfdata.tofile(f) self.files = os.listdir(self.tmp_dir)
def write_cluster_data(self): """We load all cluster metadata from TSV files, renumber the clusters, merge the dictionaries, and save in a new merged TSV file. """ cluster_data = [ 'cluster_Amplitude.tsv', 'cluster_ContamPct.tsv', 'cluster_KSLabel.tsv' ] for fn in cluster_data: metadata = {} for subdir, offset in zip(self.subdirs, self.cluster_offsets): try: field_name, metadata_loc = _read_tsv_simple(subdir / fn) except ValueError: # Skipping non-existing file. continue for k, v in metadata_loc.items(): metadata[k + offset] = v if metadata: _write_tsv_simple(self.out_dir / fn, field_name, metadata)
def __init__(self, tempdir): self.tmp_dir = tempdir p = Path(self.tmp_dir) self.ns = 100 self.nc = 10 self.nt = 5 self.ncd = 1000 np.save(p / 'spike_times.npy', .01 * np.cumsum(nr.exponential(size=self.ns))) np.save(p / 'spike_clusters.npy', nr.randint(low=10, high=10 + self.nt, size=self.ns)) shutil.copy(p / 'spike_clusters.npy', p / 'spike_templates.npy') np.save(p / 'amplitudes.npy', nr.uniform(low=0.5, high=1.5, size=self.ns)) np.save(p / 'channel_positions.npy', np.c_[np.arange(self.nc), np.zeros(self.nc)]) np.save(p / 'templates.npy', np.random.normal(size=(self.nt, 50, self.nc))) np.save(p / 'similar_templates.npy', np.tile(np.arange(self.nt), (self.nt, 1))) np.save(p / 'channel_map.npy', np.c_[np.arange(self.nc)]) _write_tsv_simple(p / 'cluster_group.tsv', 'group', { 2: 'good', 3: 'mua', 5: 'noise' }) # Raw data self.dat_path = p / 'rawdata.npy' np.save(self.dat_path, np.random.normal(size=(self.ncd, self.nc))) # LFP data. lfdata = (100 * np.random.normal(size=(1000, self.nc))).astype( np.int16) with (p / 'mydata.lf.bin').open('wb') as f: lfdata.tofile(f) self.files = os.listdir(self.tmp_dir)
def save_metadata(filename, field_name, metadata): """Save metadata in a CSV file.""" return _write_tsv_simple(filename, field_name, metadata)