def test_append(self): proc_mod = self.nwbfile.create_processing_module(name='test_proc_mod', description='') proc_inter = LFP(name='test_proc_dset') proc_mod.add(proc_inter) device = self.nwbfile.create_device(name='test_device') e_group = self.nwbfile.create_electrode_group( name='test_electrode_group', description='', location='', device=device) self.nwbfile.add_electrode(x=0.0, y=0.0, z=0.0, imp=np.nan, location='', filtering='', group=e_group) electrodes = self.nwbfile.create_electrode_table_region(region=[0], description='') e_series = ElectricalSeries( name='test_es', electrodes=electrodes, data=np.ones(shape=(100, )), rate=10000.0, ) proc_inter.add_electrical_series(e_series) with NWBHDF5IO(self.path, mode='w') as io: io.write(self.nwbfile, cache_spec=False) with NWBHDF5IO(self.path, mode='a') as io: nwb = io.read() link_electrodes = nwb.processing['test_proc_mod'][ 'LFP'].electrical_series['test_es'].electrodes ts2 = ElectricalSeries(name='timeseries2', data=[4., 5., 6.], rate=1.0, electrodes=link_electrodes) nwb.add_acquisition(ts2) io.write(nwb) # also attempt to write same spec again self.assertIs( nwb.processing['test_proc_mod'] ['LFP'].electrical_series['test_es'].electrodes, nwb.acquisition['timeseries2'].electrodes) with NWBHDF5IO(self.path, mode='r') as io: nwb = io.read() np.testing.assert_equal(nwb.acquisition['timeseries2'].data[:], ts2.data) self.assertIs( nwb.processing['test_proc_mod'] ['LFP'].electrical_series['test_es'].electrodes, nwb.acquisition['timeseries2'].electrodes) errors = validate(io) for e in errors: print('ERROR', e)
def test_add_electrical_series(self): lfp = LFP() table = make_electrode_table() region = DynamicTableRegion('electrodes', [0, 2], 'the first and third electrodes', table) eS = ElectricalSeries('test_eS', [0, 1, 2, 3], region, timestamps=[0.1, 0.2, 0.3, 0.4]) lfp.add_electrical_series(eS) self.assertEqual(lfp.electrical_series.get('test_eS'), eS)
def test_append(self): FILENAME = 'test_append.nwb' nwb = NWBFile(session_description='hi', identifier='hi', session_start_time=datetime(1970, 1, 1, 12, tzinfo=tzutc())) proc_mod = nwb.create_processing_module(name='test_proc_mod', description='') proc_inter = LFP(name='test_proc_dset') proc_mod.add_data_interface(proc_inter) device = nwb.create_device(name='test_device') e_group = nwb.create_electrode_group(name='test_electrode_group', description='', location='', device=device) nwb.add_electrode(x=0.0, y=0.0, z=0.0, imp=np.nan, location='', filtering='', group=e_group) electrodes = nwb.create_electrode_table_region(region=[0], description='') e_series = ElectricalSeries( name='test_device', electrodes=electrodes, data=np.ones(shape=(100, )), rate=10000.0, ) proc_inter.add_electrical_series(e_series) with NWBHDF5IO(FILENAME, mode='w') as io: io.write(nwb) with NWBHDF5IO(FILENAME, mode='a') as io: nwb = io.read() elec = nwb.modules['test_proc_mod']['LFP'].electrical_series[ 'test_device'].electrodes ts2 = ElectricalSeries(name='timeseries2', data=[4, 5, 6], rate=1.0, electrodes=elec) nwb.add_acquisition(ts2) io.write(nwb) with NWBHDF5IO(FILENAME, mode='r') as io: nwb = io.read() np.testing.assert_equal(nwb.acquisition['timeseries2'].data[:], ts2.data)
def test_add_electrical_series(self): lfp = LFP() # noqa: F405 dev1 = Device('dev1') # noqa: F405 group = ElectrodeGroup( # noqa: F405, F841 'tetrode1', 'tetrode description', 'tetrode location', dev1) table = make_electrode_table() region = DynamicTableRegion('electrodes', [0, 2], 'the first and third electrodes', table) eS = ElectricalSeries( # noqa: F405 'test_eS', [0, 1, 2, 3], region, timestamps=[0.1, 0.2, 0.3, 0.4]) lfp.add_electrical_series(eS) self.assertEqual(lfp.electrical_series.get('test_eS'), eS) self.assertEqual(lfp['test_eS'], lfp.electrical_series.get('test_eS'))
def test_add_decomposition_series(self): lfp = LFP() timeseries = TimeSeries(name='dummy timeseries', description='desc', data=np.ones((3, 3)), unit='Volts', timestamps=np.ones((3, ))) spec_anal = DecompositionSeries(name='LFPSpectralAnalysis', description='my description', data=np.ones((3, 3, 3)), timestamps=np.ones((3, )), source_timeseries=timeseries, metric='amplitude') lfp.add_decomposition_series(spec_anal)
def write_lfp( nwbfile: NWBFile, data: ArrayLike, fs: float, electrode_inds: Optional[List[int]] = None, name: Optional[str] = "LFP", description: Optional[str] = "local field potential signal", ): """ Add LFP from neuroscope to a "ecephys" processing module of an NWBFile. Parameters ---------- nwbfile: pynwb.NWBFile data: array-like fs: float electrode_inds: list(int), optional name: str, optional description: str, optional Returns ------- LFP pynwb.ecephys.ElectricalSeries """ if electrode_inds is None: if nwbfile.electrodes is not None and data.shape[1] <= len( nwbfile.electrodes.id.data[:]): electrode_inds = list(range(data.shape[1])) else: electrode_inds = list(range(len(nwbfile.electrodes.id.data[:]))) table_region = nwbfile.create_electrode_table_region( electrode_inds, "electrode table reference") data = H5DataIO( DataChunkIterator(tqdm(data, desc="writing lfp data"), buffer_size=int(fs * 3600)), compression="gzip", ) lfp_electrical_series = ElectricalSeries( name=name, description=description, data=data, electrodes=table_region, conversion=1e-6, rate=fs, resolution=np.nan, ) ecephys_mod = check_module( nwbfile, "ecephys", "intermediate data from extracellular electrophysiology recordings, e.g., LFP", ) if "LFP" not in ecephys_mod.data_interfaces: ecephys_mod.add_data_interface(LFP(name="LFP")) ecephys_mod.data_interfaces["LFP"].add_electrical_series( lfp_electrical_series) return lfp_electrical_series
def setUp(self): nwbfile = NWBFile( "my first synthetic recording", "EXAMPLE_ID", datetime.now(tzlocal()), experimenter="Dr. Matthew Douglass", lab="Vision Neuroscience Laboratory", institution="University of Middle Earth at the Shire", experiment_description= "We recorded from macaque monkeys during memory-guided saccade task", session_id="LONELYMTL", ) device = nwbfile.create_device(name="trodes_rig123") electrode_group = nwbfile.create_electrode_group( name="tetrode1", description="an example tetrode", location="somewhere in the hippocampus", device=device, ) for idx in [1, 2, 3, 4]: nwbfile.add_electrode( id=idx, x=1.0, y=2.0, z=3.0, imp=float(-idx), location="CA1", filtering="none", group=electrode_group, ) electrode_table_region = nwbfile.create_electrode_table_region( [0, 2], "the first and third electrodes") self.electrodes = electrode_table_region rate = 10.0 np.random.seed(1234) data_len = 1000 ephys_data = np.random.rand(data_len * 2).reshape((data_len, 2)) ephys_timestamps = np.arange(data_len) / rate self.ephys_ts = ElectricalSeries( "test_ephys_data", ephys_data, self.electrodes, timestamps=ephys_timestamps, resolution=0.001, description="Random numbers generated with numpy.random.rand", ) self.lfp = LFP(electrical_series=self.ephys_ts, name="LFP data")
def write_lfp(nwbfile, data, fs, name='LFP', description='local field potential signal', electrode_inds=None): """ Add LFP from neuroscope to a "ecephys" processing module of an NWBFile Parameters ---------- nwbfile: pynwb.NWBFile data: array-like fs: float name: str description: str electrode_inds: list(int) Returns ------- LFP pynwb.ecephys.ElectricalSeries """ if electrode_inds is None: electrode_inds = list(range(data.shape[1])) table_region = nwbfile.create_electrode_table_region( electrode_inds, 'electrode table reference') data = H5DataIO(DataChunkIterator(tqdm(data, desc='writing lfp data'), buffer_size=int(fs * 3600)), compression='gzip') lfp_electrical_series = ElectricalSeries(name=name, description=description, data=data, electrodes=table_region, conversion=np.nan, rate=fs, resolution=np.nan) ecephys_mod = check_module( nwbfile, 'ecephys', 'intermediate data from extracellular electrophysiology recordings, e.g., LFP' ) if 'LFP' not in ecephys_mod.data_interfaces: ecephys_mod.add_data_interface(LFP(name='LFP')) ecephys_mod.data_interfaces['LFP'].add_electrical_series( lfp_electrical_series) return lfp_electrical_series
def add_LFP(nwbfile, expt, count=1, region='CA1'): eeg_local = [ x for x in os.listdir(expt.LFPFilePath()) if x.endswith('.eeg') ][0] eeg_file = os.path.join(expt.LFPFilePath(), eeg_local) eeg_base = eeg_file.replace('.eeg', '') eeg_dict = lfph.loadEEG(eeg_base) lfp_xml_fpath = eeg_base + '.xml' channel_groups = get_channel_groups(lfp_xml_fpath) lfp_channels = channel_groups[0] lfp_fs = eeg_dict['sampeFreq'] nchannels = eeg_dict['nChannels'] lfp_signal = eeg_dict['EEG'][lfp_channels].T device_name = 'LFP_Device_{}'.format(count) device = nwbfile.create_device(device_name) electrode_group = nwbfile.create_electrode_group(name=device_name + '_electrodes', description=device_name, device=device, location=region) x, y, z = get_position(region) for channel in channel_groups[0]: nwbfile.add_electrode( float(x), float(y), float(z), # position? imp=np.nan, location=region, filtering='See lab.misc.lfp_helpers.ConvertFromRHD', group=electrode_group, id=channel) lfp_table_region = nwbfile.create_electrode_table_region( list(range(len(lfp_channels))), 'lfp electrodes') # TODO add conversion field for moving to V # TODO figure out how to link lfp data (zipping seems kludgey) lfp_elec_series = ElectricalSeries(name='LFP', data=H5DataIO(lfp_signal, compression='gzip'), electrodes=lfp_table_region, conversion=np.nan, rate=lfp_fs, resolution=np.nan) nwbfile.add_acquisition(LFP(electrical_series=lfp_elec_series))
def add_lfp(nwbfile, lfp_path, electrodes, iterator_flag, all_electrode_labels): num_electrodes = len(all_electrode_labels) if iterator_flag: print('Adding LFP using data chunk iterator') lfp, lfp_timestamps, lfp_rate = get_lfp_data( num_electrodes=1, lfp_path=lfp_path, all_electrode_labels=all_electrode_labels) lfp_gen = lfp_generator(lfp_path=lfp_path, num_electrodes=num_electrodes, all_electrode_labels=all_electrode_labels) lfp_data = DataChunkIterator(data=lfp_gen, iter_axis=1) else: print('Adding LFP') lfp_data, lfp_timestamps, lfp_rate = get_lfp_data( num_electrodes=num_electrodes, lfp_path=lfp_path, all_electrode_labels=all_electrode_labels) lfp_timestamps_sq = np.squeeze(lfp_timestamps) # if 1/(lfp_timestamps_sq[1]-lfp_timestamps_sq[0]) !=lfp_rate: # print("not equal to rate!!") # print(str(lfp_timestamps_sq[1]-lfp_timestamps_sq[0])) # print(str(lfp_rate)) # time x 120 # add the lfp metadata - some in the lab metadata and some in the electrical series lfp_es = ElectricalSeries( name='ElectricalSeries', data=lfp_data, electrodes=electrodes, #starting_time=float(lfp_timestamps_sq[0]), rate=lfp_rate, description="LFP") proc_module = nwbfile.create_processing_module( name='ecephys', description='module for processed extracellular electrophysiology data' ) # Store LFP data in ecephys lfp = LFP(name='LFP', electrical_series=lfp_es) proc_module.add(lfp)
def test_show_lfp(self): rate = 10.0 np.random.seed(1234) data_len = 1000 ephys_data = np.random.rand(data_len * 2).reshape((data_len, 2)) ephys_timestamps = np.arange(data_len) / rate ephys_ts = ElectricalSeries( "test_ephys_data", ephys_data, self.electrodes, timestamps=ephys_timestamps, resolution=0.001, description="Random numbers generated with numpy.random.rand", ) lfp = LFP(electrical_series=ephys_ts, name="LFP data") show_multi_container_interface(lfp, default_neurodata_vis_spec)
def test_show_lfp(self): rate = 10.0 np.random.seed(1234) data_len = 1000 ephys_data = np.random.rand(data_len * 2).reshape((data_len, 2)) ephys_timestamps = np.arange(data_len) / rate ephys_ts = ElectricalSeries( 'test_ephys_data', ephys_data, self.electrodes, timestamps=ephys_timestamps, resolution=0.001, comments= "This data was randomly generated with numpy, using 1234 as the seed", description="Random numbers generated with numpy.random.rand") lfp = LFP(electrical_series=ephys_ts, name='LFP data') show_lfp(lfp, default_neurodata_vis_spec)
def preprocess_raw_data(block_path, config): """ Takes raw data and runs: 1) CAR 2) notch filters 3) Downsampling Parameters ---------- block_path : str subject file path config : dictionary 'CAR' - Number of channels to use in CAR (default=16) 'Notch' - Main frequency (Hz) for notch filters (default=60) 'Downsample' - Downsampling frequency (Hz, default= 400) Returns ------- Saves preprocessed signals (LFP) in the current NWB file. Only if containers for these data do not exist in the current file. """ subj_path, block_name = os.path.split(block_path) block_name = os.path.splitext(block_path)[0] start = time.time() with NWBHDF5IO(block_path, 'r+', load_namespaces=True) as io: nwb = io.read() # Storage of processed signals on NWB file ----------------------------- try: # if ecephys module already exists ecephys_module = nwb.processing['ecephys'] except: # creates ecephys ProcessingModule ecephys_module = ProcessingModule( name='ecephys', description='Extracellular electrophysiology data.') # Add module to NWB file nwb.add_processing_module(ecephys_module) print('Created ecephys') # LFP: Downsampled and power line signal removed ----------------------- if 'LFP' in nwb.processing[ 'ecephys'].data_interfaces: # if LFP already exists lfp = nwb.processing['ecephys'].data_interfaces['LFP'] lfp_ts = nwb.processing['ecephys'].data_interfaces[ 'LFP'].electrical_series['preprocessed'] else: # creates LFP data interface container lfp = LFP() # Data source lis = list(nwb.acquisition.keys()) for i in lis: # Check if there is ElectricalSeries in acquisition group if type(nwb.acquisition[i]).__name__ == 'ElectricalSeries': source = nwb.acquisition[i] nChannels = source.data.shape[1] # Downsampling extraBins0 = 0 fs = source.rate if config['Downsample'] is not None: print("Downsampling signals to " + str(config['Downsample']) + " Hz.") print("Please wait, this might take around 30 minutes.") start = time.time() #zeros to pad to make signal lenght a power of 2 nBins = source.data.shape[0] extraBins0 = 2**(np.ceil(np.log2(nBins)).astype('int')) - nBins extraZeros = np.zeros(extraBins0) rate = config['Downsample'] #One channel at a time, to improve memory usage for long signals for ch in np.arange(nChannels): #1e6 scaling helps with numerical accuracy Xch = source.data[:, ch] * 1e6 #Make lenght a power of 2, improves performance Xch = np.append(Xch, extraZeros) Xch = resample(Xch, rate, fs) if ch == 0: X = Xch.reshape(1, -1) else: X = np.append(X, Xch.reshape(1, -1), axis=0) print( 'Downsampling finished in {} seconds'.format(time.time() - start)) else: # No downsample rate = fs X = source.data[:, :].T * 1e6 # Subtract CAR if config['CAR'] is not None: print( "Computing and subtracting Common Average Reference in " + str(config['CAR']) + " channel blocks.") start = time.time() X = subtract_CAR(X, b_size=config['CAR']) print('CAR subtract time for {}: {} seconds'.format( block_name, time.time() - start)) # Apply Notch filters if config['Notch'] is not None: print("Applying Notch filtering of " + str(config['Notch']) + " Hz") #zeros to pad to make signal lenght a power of 2 nBins = X.shape[1] extraBins1 = 2**(np.ceil(np.log2(nBins)).astype('int')) - nBins extraZeros = np.zeros(extraBins1) start = time.time() for ch in np.arange(nChannels): Xch = np.append(X[ch, :], extraZeros).reshape(1, -1) Xch = linenoise_notch(Xch, rate, notch_freq=config['Notch']) if ch == 0: X2 = Xch.reshape(1, -1) else: X2 = np.append(X2, Xch.reshape(1, -1), axis=0) print('Notch filter time for {}: {} seconds'.format( block_name, time.time() - start)) X = np.copy(X2) del X2 #Remove excess bins (because of zero padding on previous steps) excessBins = int(np.ceil(extraBins0 * rate / fs) + extraBins1) X = X[:, 0:-excessBins] X = X.astype('float32') # signal (nChannels,nSamples) X /= 1e6 # Scales signals back to Volt # Add preprocessed downsampled signals as an electrical_series if config['CAR'] is None: car = 'None' else: car = str(config['CAR']) if config['Notch'] is None: notch = 'None' else: notch = str(config['Notch']) if config['Downsample'] is None: downs = 'No' else: downs = 'Yes' config_comment = 'CAR:' + car + ', Notch:' + notch + ', Downsampled:' + downs lfp_ts = lfp.create_electrical_series(name='preprocessed', data=X.T, electrodes=source.electrodes, rate=rate, description='', comments=config_comment) ecephys_module.add_data_interface(lfp) # Write LFP to NWB file io.write(nwb) print('LFP saved in ' + block_path)
def transform(block_path, filter='default', bands_vals=None): """ Takes raw LFP data and does the standard Hilbert algorithm: 1) CAR 2) notch filters 3) Hilbert transform on different bands Takes about 20 minutes to run on 1 10-min block. Parameters ---------- block_path : str subject file path filter: str, optional Frequency bands to filter the signal. 'default' for Chang lab default values (Gaussian filters) 'custom' for user defined (Gaussian filters) bands_vals: 2D array, necessary only if filter='custom' [2,nBands] numpy array with Gaussian filter parameters, where: bands_vals[0,:] = filter centers [Hz] bands_vals[1,:] = filter sigmas [Hz] Returns ------- Saves preprocessed signals (LFP) and spectral power (DecompositionSeries) in the current NWB file. Only if containers for these data do not exist in the file. """ write_file = 1 rate = 400. # Define filter parameters if filter == 'default': band_param_0 = bands.chang_lab['cfs'] band_param_1 = bands.chang_lab['sds'] elif filter == 'high_gamma': band_param_0 = bands.chang_lab['cfs'][(bands.chang_lab['cfs'] > 70) & (bands.chang_lab['cfs'] < 150)] band_param_1 = bands.chang_lab['sds'][(bands.chang_lab['cfs'] > 70) & (bands.chang_lab['cfs'] < 150)] #band_param_0 = [ bands.neuro['min_freqs'][-1] ] #for hamming window filter #band_param_1 = [ bands.neuro['max_freqs'][-1] ] #band_param_0 = bands.chang_lab['cfs'][29:37] #for average of gaussian filters #band_param_1 = bands.chang_lab['sds'][29:37] elif filter == 'custom': band_param_0 = bands_vals[0, :] band_param_1 = bands_vals[1, :] block_name = os.path.splitext(block_path)[0] start = time.time() with NWBHDF5IO(block_path, 'a') as io: nwb = io.read() # Storage of processed signals on NWB file ----------------------------- if 'ecephys' not in nwb.modules: # Add module to NWB file nwb.create_processing_module( name='ecephys', description='Extracellular electrophysiology data.') ecephys_module = nwb.modules['ecephys'] # LFP: Downsampled and power line signal removed if 'LFP' in nwb.modules['ecephys'].data_interfaces: lfp_ts = nwb.modules['ecephys'].data_interfaces[ 'LFP'].electrical_series['preprocessed'] X = lfp_ts.data[:].T rate = lfp_ts.rate else: # 1e6 scaling helps with numerical accuracy X = nwb.acquisition['ECoG'].data[:].T * 1e6 fs = nwb.acquisition['ECoG'].rate bad_elects = load_bad_electrodes(nwb) print('Load time for h5 {}: {} seconds'.format( block_name, time.time() - start)) print('rates {}: {} {}'.format(block_name, rate, fs)) if not np.allclose(rate, fs): assert rate < fs start = time.time() X = resample(X, rate, fs) print('resample time for {}: {} seconds'.format( block_name, time.time() - start)) if bad_elects.sum() > 0: X[bad_elects] = np.nan # Subtract CAR start = time.time() X = subtract_CAR(X) print('CAR subtract time for {}: {} seconds'.format( block_name, time.time() - start)) # Apply Notch filters start = time.time() X = linenoise_notch(X, rate) print('Notch filter time for {}: {} seconds'.format( block_name, time.time() - start)) lfp = LFP() # Add preprocessed downsampled signals as an electrical_series lfp_ts = lfp.create_electrical_series( name='preprocessed', data=X.T, electrodes=nwb.acquisition['ECoG'].electrodes, rate=rate, description='') ecephys_module.add_data_interface(lfp) # Spectral band power if 'Bandpower_' + filter not in nwb.modules['ecephys'].data_interfaces: # Apply Hilbert transform X = X.astype('float32') # signal (nChannels,nSamples) nChannels = X.shape[0] nSamples = X.shape[1] nBands = len(band_param_0) Xp = np.zeros((nBands, nChannels, nSamples)) # power (nBands,nChannels,nSamples) X_fft_h = None for ii, (bp0, bp1) in enumerate(zip(band_param_0, band_param_1)): # if filter=='high_gamma': # kernel = hamming(X, rate, bp0, bp1) # else: kernel = gaussian(X, rate, bp0, bp1) X_analytic, X_fft_h = hilbert_transform(X, rate, kernel, phase=None, X_fft_h=X_fft_h) Xp[ii] = abs(X_analytic).astype('float32') # Scales signals back to Volt X /= 1e6 band_param_0V = VectorData( name='filter_param_0', description='frequencies for bandpass filters', data=band_param_0) band_param_1V = VectorData( name='filter_param_1', description='frequencies for bandpass filters', data=band_param_1) bandsTable = DynamicTable( name='bands', description='Series of filters used for Hilbert transform.', columns=[band_param_0V, band_param_1V], colnames=['filter_param_0', 'filter_param_1']) # data: (ndarray) dims: num_times * num_channels * num_bands Xp = np.swapaxes(Xp, 0, 2) decs = DecompositionSeries( name='Bandpower_' + filter, data=Xp, description='Band power estimated with Hilbert transform.', metric='power', unit='V**2/Hz', bands=bandsTable, rate=rate, source_timeseries=lfp_ts) ecephys_module.add_data_interface(decs) io.write(nwb) print('done', flush=True)
description='lfp electrode {}'.format(channel), group=electrode_group) lfp_table_region = nwbfile.create_electrode_table_region( list(range(4)), 'lfp electrodes') lfp_elec_series = ElectricalSeries('lfp', 'lfp', gzip(lfp_signal), lfp_table_region, conversion=np.nan, starting_time=0.0, rate=lfp_fs, resolution=np.nan) nwbfile.add_acquisition(LFP(source=source, electrical_series=lfp_elec_series)) optical_channel = OpticalChannel( name='Optical Channel', source=NA, description=NA, emission_lambda=NA, ) imaging_h5_filepath = '/Users/bendichter/Desktop/Losonczy/from_sebi/example_data/TSeries-05042017-001_Cycle00001_Element00001.h5' with h5py.File(imaging_h5_filepath, 'r') as f: if SHORTEN: all_imaging_data = f['imaging'][:100, ...] else: all_imaging_data = f['imaging'][:]
device = nwbfile.create_device(device_label) electrode_group = nwbfile.create_electrode_group(name=device_label + ' electrode group', description=' ', device=device, location='unknown') nwbfile.add_electrode(i, x, y, z, imp=np.nan, location='unknown', filtering='unknown', description=label, group=electrode_group) electrode_table_region = nwbfile.create_electrode_table_region( list(range(len(electrode_positions))), 'all ECoG electrodes') nwbfile.add_acquisition( LFP(electrical_series=ElectricalSeries('lfp', 'lfp signal for all electrodes', lfp, electrode_table_region, starting_time=0.0, rate=lfp_rate))) with NWBHDF5IO('resting_state.nwb') as io: io.write(nwbfile)
# NWB provides the concept of a *data interface*--an object for a standard # storage location of specific types of data--through the :py:class:`~pynwb.base.NWBDataInterface` class. # For example, :py:class:`~pynwb.ecephys.LFP` provides a container for holding one or more # :py:class:`~pynwb.ecephys.ElectricalSeries` objects that store local-field potential data. By putting # your LFP data into an :py:class:`~pynwb.ecephys.LFP` container, downstream users and tools know where # to look to retrieve LFP data. For a comprehensive list of available data interfaces, see the # :ref:`overview page <modules_overview>` # # :py:class:`~pynwb.base.NWBDataInterface` objects can be added as acquisition data, or as members # of a :ref:`ProcessingModule <basic_procmod>` # # For the purposes of demonstration, we will use a :py:class:`~pynwb.ecephys.LFP` data interface. from pynwb.ecephys import LFP lfp = LFP('PyNWB tutorial') nwbfile.add_acquisition(lfp) #################### # Each data interface stores its own type of data. We suggest you read the documentation for the # data interface of interest in the :ref:`API documentation <api_docs>` to figure out what data the # data interface allows and/or requires and what methods you will need to call to add this data. #################### # .. _basic_procmod: # # Processing modules # ------------------ # # *Processing modules* are used for storing a set of data interfaces that are related to a particular # processing workflow. For example, if you want to store intermediate and final results of a spike sorting workflow,
def setUpContainer(self): """ Return a test LFP to read/write """ es = self.setUpTwoElectricalSeries() lfp = LFP(es) return lfp
def preprocess_raw_data(block_path, config): """ Takes raw data and runs: 1) CAR 2) notch filters 3) Downsampling Parameters ---------- block_path : str subject file path config : dictionary 'referencing' - tuple specifying electrode referencing (type, options) ('CAR', N_channels_per_group) ('CMR', N_channels_per_group) ('bipolar', INCLUDE_OBLIQUE_NBHD) 'Notch' - Main frequency (Hz) for notch filters (default=60) 'Downsample' - Downsampling frequency (Hz, default= 400) Returns ------- Saves preprocessed signals (LFP) in the current NWB file. Only if containers for these data do not exist in the current file. """ subj_path, block_name = os.path.split(block_path) block_name = os.path.splitext(block_path)[0] start = time.time() with NWBHDF5IO(block_path, 'r+', load_namespaces=True) as io: nwb = io.read() # Storage of processed signals on NWB file ---------------------------- if 'ecephys' in nwb.processing: ecephys_module = nwb.processing['ecephys'] else: # creates ecephys ProcessingModule ecephys_module = ProcessingModule( name='ecephys', description='Extracellular electrophysiology data.') # Add module to NWB file nwb.add_processing_module(ecephys_module) print('Created ecephys') # LFP: Downsampled and power line signal removed ---------------------- if 'LFP' in nwb.processing['ecephys'].data_interfaces: ###### # What's the point of this? Nothing is done with these vars... lfp = nwb.processing['ecephys'].data_interfaces['LFP'] lfp_ts = nwb.processing['ecephys'].data_interfaces[ 'LFP'].electrical_series['preprocessed'] ###### else: # creates LFP data interface container lfp = LFP() # Data source source_list = [ acq for acq in nwb.acquisition.values() if type(acq) == ElectricalSeries ] assert len(source_list) == 1, ( 'Not precisely one ElectricalSeries in acquisition!') source = source_list[0] nChannels = source.data.shape[1] # Downsampling if config['Downsample'] is not None: print("Downsampling signals to " + str(config['Downsample']) + " Hz.") print("Please wait, this might take around 30 minutes.") start = time.time() # zeros to pad to make signal length a power of 2 nBins = source.data.shape[0] extraBins0 = 2**(np.ceil(np.log2(nBins)).astype('int')) - nBins extraZeros = np.zeros(extraBins0) rate = config['Downsample'] # malloc T = int(np.ceil((nBins + extraBins0) * rate / source.rate)) X = np.zeros((source.data.shape[1], T)) # One channel at a time, to improve memory usage for long signals for ch in np.arange(nChannels): # 1e6 scaling helps with numerical accuracy Xch = source.data[:, ch] * 1e6 # Make length a power of 2, improves performance Xch = np.append(Xch, extraZeros) X[ch, :] = resample(Xch, rate, source.rate) print( 'Downsampling finished in {} seconds'.format(time.time() - start)) else: # No downsample extraBins0 = 0 rate = source.rate X = source.data[()].T * 1e6 # re-reference the (scaled by 1e6!) data electrodes = source.electrodes if config['referencing'] is not None: if config['referencing'][0] == 'CAR': print( "Computing and subtracting Common Average Reference in " + str(config['referencing'][1]) + " channel blocks.") start = time.time() X = subtract_CAR(X, b_size=config['referencing'][1]) print('CAR subtract time for {}: {} seconds'.format( block_name, time.time() - start)) elif config['referencing'][0] == 'bipolar': X, bipolarTable, electrodes = get_bipolar_referenced_electrodes( X, electrodes, rate, grid_step=1) # add data interface for the metadata for saving ecephys_module.add_data_interface(bipolarTable) print('bipolarElectrodes stored for saving in ' + block_path) else: print('UNRECOGNIZED REFERENCING SCHEME; ', end='') print('SKIPPING REFERENCING!') # Apply Notch filters if config['Notch'] is not None: print("Applying notch filtering of " + str(config['Notch']) + " Hz") # zeros to pad to make signal lenght a power of 2 nBins = X.shape[1] extraBins1 = 2**(np.ceil(np.log2(nBins)).astype('int')) - nBins extraZeros = np.zeros(extraBins1) start = time.time() for ch in np.arange(nChannels): Xch = np.append(X[ch, :], extraZeros).reshape(1, -1) Xch = linenoise_notch(Xch, rate, notch_freq=config['Notch']) if ch == 0: X2 = Xch.reshape(1, -1) else: X2 = np.append(X2, Xch.reshape(1, -1), axis=0) print('Notch filter time for {}: {} seconds'.format( block_name, time.time() - start)) X = np.copy(X2) del X2 else: extraBins1 = 0 # Remove excess bins (because of zero padding on previous steps) excessBins = int( np.ceil(extraBins0 * rate / source.rate) + extraBins1) X = X[:, 0:-excessBins] X = X.astype('float32') # signal (nChannels,nSamples) X /= 1e6 # Scales signals back to volts # Add preprocessed downsampled signals as an electrical_series referencing = 'None' if config['referencing'] is None else config[ 'referencing'][0] notch = 'None' if config['Notch'] is None else str(config['Notch']) downs = 'No' if config['Downsample'] is None else 'Yes' config_comment = ('referencing:' + referencing + ',Notch:' + notch + ', Downsampled:' + downs) # create an electrical series for the LFP and store it in lfp lfp_ts = lfp.create_electrical_series(name='preprocessed', data=X.T, electrodes=electrodes, rate=rate, description='', comments=config_comment) ecephys_module.add_data_interface(lfp) # Write LFP to NWB file io.write(nwb) print('LFP saved in ' + block_path)
def preprocess_raw_data(block_path, config): """ Takes raw data and runs: 1) CAR 2) notch filters 3) Downsampling Parameters ---------- block_path : str subject file path config : dictionary 'referencing' - tuple specifying electrode referencing (type, options) ('CAR', N_channels_per_group) ('CMR', N_channels_per_group) ('bipolar', INCLUDE_OBLIQUE_NBHD) 'Notch' - Main frequency (Hz) for notch filters (default=60) 'Downsample' - Downsampling frequency (Hz, default= 400) Returns ------- Saves preprocessed signals (LFP) in the current NWB file. Only if containers for these data do not exist in the current file. """ subj_path, block_name = os.path.split(block_path) block_name = os.path.splitext(block_path)[0] start = time.time() with NWBHDF5IO(block_path, 'r+', load_namespaces=True) as io: nwb = io.read() # Storage of processed signals on NWB file ---------------------------- if 'ecephys' in nwb.processing: ecephys_module = nwb.processing['ecephys'] else: # creates ecephys ProcessingModule ecephys_module = ProcessingModule( name='ecephys', description='Extracellular electrophysiology data.') # Add module to NWB file nwb.add_processing_module(ecephys_module) print('Created ecephys') # LFP: Downsampled and power line signal removed ---------------------- if 'LFP' in nwb.processing['ecephys'].data_interfaces: warnings.warn( 'LFP data already exists in the nwb file. Skipping preprocessing.' ) else: # creates LFP data interface container lfp = LFP() # Data source source_list = [ acq for acq in nwb.acquisition.values() if type(acq) == ElectricalSeries ] assert len(source_list) == 1, ( 'Not precisely one ElectricalSeries in acquisition!') source = source_list[0] nChannels = source.data.shape[1] # Downsampling if config['Downsample'] is not None: print("Downsampling signals to " + str(config['Downsample']) + " Hz.") print("Please wait...") start = time.time() # Note: zero padding the signal to make the length # a power of 2 won't help, since resample will further pad it # (breaking the power of 2) nBins = source.data.shape[0] rate = config['Downsample'] # malloc T = int(np.ceil(nBins * rate / source.rate)) X = np.zeros((source.data.shape[1], T)) # One channel at a time, to improve memory usage for long signals for ch in np.arange(nChannels): # 1e6 scaling helps with numerical accuracy Xch = source.data[:, ch] * 1e6 X[ch, :] = resample(Xch, rate, source.rate) print( 'Downsampling finished in {} seconds'.format(time.time() - start)) else: # No downsample rate = source.rate X = source.data[()].T * 1e6 # re-reference the (scaled by 1e6!) data electrodes = source.electrodes if config['referencing'] is not None: if config['referencing'][0] == 'CAR': print( "Computing and subtracting Common Average Reference in " + str(config['referencing'][1]) + " channel blocks.") start = time.time() X = subtract_CAR(X, b_size=config['referencing'][1]) print('CAR subtract time for {}: {} seconds'.format( block_name, time.time() - start)) elif config['referencing'][0] == 'bipolar': X, bipolarTable, electrodes = get_bipolar_referenced_electrodes( X, electrodes, rate, grid_step=1) # add data interface for the metadata for saving ecephys_module.add_data_interface(bipolarTable) print('bipolarElectrodes stored for saving in ' + block_path) else: print('UNRECOGNIZED REFERENCING SCHEME; ', end='') print('SKIPPING REFERENCING!') # Apply Notch filters if config['Notch'] is not None: print("Applying notch filtering of " + str(config['Notch']) + " Hz") # Note: zero padding the signal to make the length a power # of 2 won't help, since notch filtering will further pad it start = time.time() for ch in np.arange(nChannels): # NOTE: apply_linenoise_notch takes a signal that is # (n_timePoints, n_channels). The documentation may be wrong Xch = X[ch, :].reshape(-1, 1) Xch = apply_linenoise_notch(Xch, rate) X[ch, :] = Xch[:, 0] print('Notch filter time for {}: {} seconds'.format( block_name, time.time() - start)) X = X.astype('float32') # signal (nChannels,nSamples) X /= 1e6 # Scales signals back to volts # Add preprocessed downsampled signals as an electrical_series referencing = 'None' if config['referencing'] is None else config[ 'referencing'][0] notch = 'None' if config['Notch'] is None else str(config['Notch']) downs = 'No' if config['Downsample'] is None else 'Yes' config_comment = ('referencing:' + referencing + ', Notch:' + notch + ', Downsampled:' + downs) # create an electrical series for the LFP and store it in lfp lfp.create_electrical_series(name='preprocessed', data=X.T, electrodes=electrodes, rate=rate, description='', comments=config_comment) ecephys_module.add_data_interface(lfp) # Write LFP to NWB file io.write(nwb) print('LFP saved in ' + block_path)
# NWB provides the concept of a *data interface*--an object for a standard # storage location of specific types of data--through the :py:class:`~pynwb.base.NWBDataInterface` class. # For example, :py:class:`~pynwb.ecephys.LFP` provides a container for holding one or more # :py:class:`~pynwb.ecephys.ElectricalSeries` objects that store local-field potential data. By putting # your LFP data into an :py:class:`~pynwb.ecephys.LFP` container, downstream users and tools know where # to look to retrieve LFP data. For a comprehensive list of available data interfaces, see the # :ref:`overview page <modules_overview>` # # :py:class:`~pynwb.base.NWBDataInterface` objects can be added as acquisition data, or as members # of a :ref:`ProcessingModule <basic_procmod>` # # For the purposes of demonstration, we will use a :py:class:`~pynwb.ecephys.LFP` data interface. from pynwb.ecephys import LFP lfp = LFP() nwbfile.add_acquisition(lfp) #################### # Each data interface stores its own type of data. We suggest you read the documentation for the # data interface of interest in the :ref:`API documentation <api_docs>` to figure out what data the # data interface allows and/or requires and what methods you will need to call to add this data. #################### # .. _basic_procmod: # # Processing modules # ------------------ # # *Processing modules* are used for storing a set of data interfaces that are related to a particular # processing workflow. For example, if you want to store intermediate and final results of a spike sorting workflow,
all_table_region = nwbfile.create_electrode_table_region( list(range(electrode_counter)), 'all electrodes') ### from pynwb.ecephys import ElectricalSeries, LFP lfp_data = np.random.randn(100, 7) all_lfp = nwbfile.add_acquisition( LFP( 'source', ElectricalSeries( 'name', 'source', lfp_data, all_table_region, starting_time=0.0, rate=1000., # Hz resolution=.001, conversion=1., unit='V'))) ### from pynwb.misc import UnitTimes # gen spiking data all_spikes = [] for unit in range(20): n_spikes = np.random.poisson(lam=10) all_spikes.append(np.random.randn(n_spikes))
def chang2nwb(blockpath, out_file_path=None, save_to_file=False, htk_config=None): """ Parameters ---------- blockpath: str out_file_path: None | str if None, output = [blockpath]/[blockname].nwb save_to_file : bool If True, saves to file. If False, just returns nwbfile object htk_config : dict Dictionary cotaining HTK conversion paths and options. Example: { ecephys_path: 'path_to/ecephys_htk_files', ecephys_type: 'raw', 'preprocessed' or 'high_gamma', analog_path: 'path_to/analog_htk_files', anin1: {present: True, name: 'microphone', type: 'acquisition'}, anin2: {present: True, name: 'speaker1', type: 'stimulus'}, anin3: {present: False, name: 'speaker2', type: 'stimulus'}, anin4: {present: False, name: 'custom', type: 'acquisition'}, metadata: metadata, electrodes_file: electrodes_file, bipolar_file: bipolar_file } Returns ------- """ metadata = {} if htk_config is None: blockpath = Path(blockpath) else: blockpath = Path(htk_config['ecephys_path']) metadata = htk_config['metadata'] blockname = blockpath.parent.name subject_id = blockpath.parent.parent.name[2:] if out_file_path is None: out_file_path = blockpath.resolve().parent / ''.join(['EC', subject_id, '_', blockname, '.nwb']) # file paths ecog_path = blockpath anin_path = htk_config['analog_path'] bad_time_file = path.join(blockpath, 'Artifacts', 'badTimeSegments.mat') # Create the NWB file object nwbfile_dict = { 'session_description': blockname, 'identifier': blockname, 'session_start_time': datetime.now().astimezone(), 'institution': 'University of California, San Francisco', 'lab': 'Chang Lab' } if 'NWBFile' in metadata: nwbfile_dict.update(metadata['NWBFile']) nwbfile = NWBFile(**nwbfile_dict) # Read electrophysiology data from HTK files print('reading htk acquisition...', flush=True) ecog_rate, data = readhtks(ecog_path) data = data.squeeze() print('done', flush=True) # Get electrodes info from mat file if htk_config['electrodes_file'] is not None: nwbfile = elecs_to_electrode_table( nwbfile=nwbfile, elecspath=htk_config['electrodes_file'], ) n_electrodes = nwbfile.electrodes[:].shape[0] all_elecs = list(range(n_electrodes)) elecs_region = nwbfile.create_electrode_table_region( region=all_elecs, description='ECoG electrodes on brain' ) else: ecephys_dict = { 'Device': [{'name': 'auto_device'}], 'ElectricalSeries': [{'name': 'ECoG', 'description': 'description'}], 'ElectrodeGroup': [{'name': 'auto_group', 'description': 'auto_group', 'location': 'location', 'device': 'auto_device'}] } if 'Ecephys' in metadata: ecephys_dict.update(metadata['Ecephys']) # Create devices for dev in ecephys_dict['Device']: device = nwbfile.create_device(dev['name']) # Electrode groups for el_grp in ecephys_dict['ElectrodeGroup']: device = nwbfile.devices[el_grp['device']] electrode_group = nwbfile.create_electrode_group( name=el_grp['name'], description=el_grp['description'], location=el_grp['location'], device=device ) # Electrodes table n_electrodes = data.shape[1] nwbfile.add_electrode_column('label', 'label of electrode') nwbfile.add_electrode_column('bad', 'electrode identified as too noisy') nwbfile.add_electrode_column('x_warped', 'x warped onto cvs_avg35_inMNI152') nwbfile.add_electrode_column('y_warped', 'y warped onto cvs_avg35_inMNI152') nwbfile.add_electrode_column('z_warped', 'z warped onto cvs_avg35_inMNI152') nwbfile.add_electrode_column('null', 'if not connected to real electrode') bad_elecs_inds = get_bad_elecs(blockpath) for elec_counter in range(n_electrodes): bad = elec_counter in bad_elecs_inds nwbfile.add_electrode( id=elec_counter, x=np.nan, y=np.nan, z=np.nan, imp=np.nan, x_warped=np.nan, y_warped=np.nan, z_warped=np.nan, location='', filtering='none', group=electrode_group, label='', bad=bad, null=False, ) all_elecs = list(range(n_electrodes)) elecs_region = nwbfile.create_electrode_table_region( region=all_elecs, description='ECoG electrodes on brain' ) # Get Bipolar table from file if htk_config['bipolar_file'] is not None: df = pd.read_csv(htk_config['bipolar_file'], index_col='id', sep='\t') # Create bipolar scheme table bipolar_scheme_table = BipolarSchemeTable( name='bipolar_scheme_table', description='desc' ) # Columns for bipolar scheme - all anodes and cathodes within the same # bipolar row are considered to have the same group and location bipolar_scheme_table.add_column( name='group_name', description='electrode group name' ) bipolar_scheme_table.add_column( name='location', description='electrode location' ) # Iterate over anode / cathode rows for i, r in df.iterrows(): if isinstance(r['anodes'], str): anodes = [int(a) for a in r['anodes'].split(',')] else: anodes = [int(r['anodes'])] if isinstance(r['cathodes'], str): cathodes = [int(a) for a in r['cathodes'].split(',')] else: cathodes = [int(r['cathodes'])] bipolar_scheme_table.add_row( anodes=anodes, cathodes=cathodes, group_name=nwbfile.electrodes['group_name'][anodes[0]], location=nwbfile.electrodes['location'][anodes[0]] ) bipolar_scheme_table.anodes.table = nwbfile.electrodes bipolar_scheme_table.cathodes.table = nwbfile.electrodes # Creates bipolar table region elecs_region = DynamicTableRegion( name='electrodes', data=np.arange(0, df.shape[0]), description='desc', table=bipolar_scheme_table ) ecephys_ext = EcephysExt(name='ecephys_ext') ecephys_ext.bipolar_scheme_table = bipolar_scheme_table nwbfile.add_lab_meta_data(ecephys_ext) # Stores HTK electrophysiology data as raw, preprocessed or high gamma if htk_config['ecephys_type'] == 'raw': ecog_es = ElectricalSeries(name='ECoG', data=H5DataIO(data[:, 0:n_electrodes], compression='gzip'), electrodes=elecs_region, rate=ecog_rate, description='all Wav data') nwbfile.add_acquisition(ecog_es) elif htk_config['ecephys_type'] == 'preprocessed': lfp = LFP() ecog_es = ElectricalSeries(name='preprocessed', data=H5DataIO(data[:, 0:n_electrodes], compression='gzip'), electrodes=elecs_region, rate=ecog_rate, description='all Wav data') lfp.add_electrical_series(ecog_es) # Creates the ecephys processing module ecephys_module = nwbfile.create_processing_module( name='ecephys', description='preprocessed electrophysiology data' ) ecephys_module.add_data_interface(lfp) elif htk_config['ecephys_type'] == 'high_gamma': ecog_es = ElectricalSeries(name='high_gamma', data=H5DataIO(data[:, 0:n_electrodes], compression='gzip'), electrodes=elecs_region, rate=ecog_rate, description='all Wav data') # Creates the ecephys processing module ecephys_module = nwbfile.create_processing_module( name='ecephys', description='preprocessed electrophysiology data' ) ecephys_module.add_data_interface(ecog_es) # Add ANIN 1 if htk_config['anin1']['present']: fs, data = get_analog(anin_path, 1) ts = TimeSeries( name=htk_config['anin1']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin1']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN1 saved with name "', htk_config['anin1']['name'], '" in ', htk_config['anin1']['type']) # Add ANIN 2 if htk_config['anin2']['present']: fs, data = get_analog(anin_path, 2) ts = TimeSeries( name=htk_config['anin2']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin2']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN2 saved with name "', htk_config['anin2']['name'], '" in ', htk_config['anin2']['type']) # Add ANIN 3 if htk_config['anin3']['present']: fs, data = get_analog(anin_path, 3) ts = TimeSeries( name=htk_config['anin3']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin3']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN3 saved with name "', htk_config['anin3']['name'], '" in ', htk_config['anin3']['type']) # Add ANIN 4 if htk_config['anin4']['present']: fs, data = get_analog(anin_path, 4) ts = TimeSeries( name=htk_config['anin4']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin4']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN4 saved with name "', htk_config['anin4']['name'], '" in ', htk_config['anin4']['type']) # Add bad time segments if os.path.exists(bad_time_file) and os.stat(bad_time_file).st_size: bad_time = sio.loadmat(bad_time_file)['badTimeSegments'] for row in bad_time: nwbfile.add_invalid_time_interval(start_time=row[0], stop_time=row[1], tags=('ECoG artifact',), timeseries=ecog_es) # Subject subject_dict = {'subject_id': subject_id} if 'Subject' in metadata: subject_dict.update(metadata['Subject']) subject = ECoGSubject(**subject_dict) nwbfile.subject = subject if save_to_file: print('Saving HTK content to NWB file...') # Export the NWB file with NWBHDF5IO(str(out_file_path), manager=manager, mode='w') as io: io.write(nwbfile) # read check with NWBHDF5IO(str(out_file_path), manager=manager, mode='r') as io: io.read() print('NWB file saved: ', str(out_file_path)) return nwbfile, out_file_path, subject_id, blockname
def setUp(self): self.nwbfile = NWBFile( 'my first synthetic recording', 'EXAMPLE_ID', datetime.now(tzlocal()), experimenter='Dr. Bilbo Baggins', lab='Bag End Laboratory', institution='University of Middle Earth at the Shire', experiment_description='I went on an adventure with thirteen dwarves to reclaim vast treasures.', session_id='LONELYMTN' ) device = self.nwbfile.create_device(name='trodes_rig123') electrode_name = 'tetrode1' description = "an example tetrode" location = "somewhere in the hippocampus" electrode_group = self.nwbfile.create_electrode_group( electrode_name, description=description, location=location, device=device ) for idx in [1, 2, 3, 4]: self.nwbfile.add_electrode( id=idx, x=1.0, y=2.0, z=3.0, imp=float(-idx), location='CA1', filtering='none', group=electrode_group ) electrode_table_region = self.nwbfile.create_electrode_table_region([0, 2], 'the first and third electrodes') rate = 5.0 np.random.seed(1234) data_len = 50 ephys_data = np.array([[0.76711663, 0.70811536], [0.79686718, 0.55776083], [0.96583653, 0.1471569], [0.029647, 0.59389349], [0.1140657, 0.95080985], [0.32570741, 0.19361869], [0.45781165, 0.92040257], [0.87906916, 0.25261576], [0.34800879, 0.18258873], [0.90179605, 0.70652816], [0.72665846, 0.90008784], [0.7791638, 0.59915478], [0.29112524, 0.15139526], [0.33517466, 0.65755178], [0.07334254, 0.0550064], [0.32319481, 0.5904818], [0.85389857, 0.28706243], [0.17306723, 0.13402121], [0.99465383, 0.17949787], [0.31754682, 0.5682914], [0.00934857, 0.90064862], [0.97724143, 0.55689468], [0.08477384, 0.33300247], [0.72842868, 0.14243537], [0.55246894, 0.27304326], [0.97449514, 0.66778691], [0.25565329, 0.10831149], [0.77618072, 0.78247799], [0.76160391, 0.91440311], [0.65862278, 0.56836758], [0.20175569, 0.69829638], [0.95219541, 0.88996329], [0.99356736, 0.81870351], [0.54512217, 0.45125405], [0.89055719, 0.97326479], [0.59341133, 0.3660745], [0.32309469, 0.87142326], [0.21563406, 0.73494519], [0.36561909, 0.8016026], [0.78273559, 0.70135538], [0.62277659, 0.49368265], [0.8405377, 0.71209699], [0.44390898, 0.03103486], [0.36323976, 0.73072179], [0.47556657, 0.34441697], [0.64088043, 0.12620532], [0.17146526, 0.73708649], [0.12702939, 0.36964987], [0.604334, 0.10310444], [0.80237418, 0.94555324]]) ephys_timestamps = np.arange(data_len) / rate ephys_ts = ElectricalSeries('preprocessed', ephys_data, electrode_table_region, starting_time=ephys_timestamps[0], rate=rate, resolution=0.001, comments="This data was randomly generated with numpy, using 1234 as the seed", description="Random numbers generated with numpy.random.rand") lfp = LFP(ephys_ts) self.nwbfile.create_processing_module( name='ecephys', description='preprocessed ecephys data' ) self.nwbfile.processing['ecephys'].add(lfp) self.nwbfile_new = NWBFile( 'my first synthetic recording', 'EXAMPLE_ID', datetime.now(tzlocal()), experimenter='Dr. Bilbo Baggins', lab='Bag End Laboratory', institution='University of Middle Earth at the Shire', experiment_description='I went on an adventure with thirteen dwarves to reclaim vast treasures.', session_id='LONELYMTN' )
print('done.') print('making ElectricalSeries objects for LFP...', end='', flush=True) all_lfp_electrical_series = ElectricalSeries( 'all_lfp', 'lfp signal for all shank electrodes', data, all_table_region, conversion=np.nan, starting_time=0.0, rate=lfp_fs, resolution=np.nan) all_ts.append(all_lfp_electrical_series) all_lfp = nwbfile.add_acquisition( LFP(name='all_lfp', source='source', electrical_series=all_lfp_electrical_series)) print('done.') electrical_series = ElectricalSeries('reference_lfp', 'signal used as the reference lfp', gzip(all_channels[:, lfp_channel]), lfp_table_region, conversion=np.nan, starting_time=0.0, rate=lfp_fs, resolution=np.nan) lfp = nwbfile.add_acquisition( LFP(source='source', name='reference_lfp',
def setUpContainer(self): es = self.setUpElectricalSeriesContainers() ret = LFP(es) return ret
def main(): import os.path # prerequisites: start import numpy as np rate = 10.0 np.random.seed(1234) data_len = 1000 ephys_data = np.random.rand(data_len) ephys_timestamps = np.arange(data_len) / rate spatial_timestamps = ephys_timestamps[::10] spatial_data = np.cumsum(np.random.normal(size=(2, len(spatial_timestamps))), axis=-1).T # prerequisites: end # create-nwbfile: start from datetime import datetime from dateutil.tz import tzlocal from pynwb import NWBFile f = NWBFile( 'the PyNWB tutorial', 'my first synthetic recording', 'EXAMPLE_ID', datetime.now(tzlocal()), experimenter='Dr. Bilbo Baggins', lab='Bag End Laboratory', institution='University of Middle Earth at the Shire', experiment_description= 'I went on an adventure with thirteen dwarves to reclaim vast treasures.', session_id='LONELYMTN') # create-nwbfile: end # save-nwbfile: start from pynwb import NWBHDF5IO filename = "example.h5" io = NWBHDF5IO(filename, mode='w') io.write(f) io.close() # save-nwbfile: end os.remove(filename) # create-device: start device = f.create_device(name='trodes_rig123', source="a source") # create-device: end # create-electrode-groups: start electrode_name = 'tetrode1' source = "an hypothetical source" description = "an example tetrode" location = "somewhere in the hippocampus" electrode_group = f.create_electrode_group(electrode_name, source=source, description=description, location=location, device=device) # create-electrode-groups: end # create-electrode-table-region: start for idx in [1, 2, 3, 4]: f.add_electrode(idx, x=1.0, y=2.0, z=3.0, imp=float(-idx), location='CA1', filtering='none', description='channel %s' % idx, group=electrode_group) electrode_table_region = f.create_electrode_table_region( [0, 2], 'the first and third electrodes') # create-electrode-table-region: end # create-timeseries: start from pynwb.ecephys import ElectricalSeries from pynwb.behavior import SpatialSeries ephys_ts = ElectricalSeries( 'test_ephys_data', 'an hypothetical source', ephys_data, electrode_table_region, timestamps=ephys_timestamps, # Alternatively, could specify starting_time and rate as follows # starting_time=ephys_timestamps[0], # rate=rate, resolution=0.001, comments= "This data was randomly generated with numpy, using 1234 as the seed", description="Random numbers generated with numpy.random.rand") f.add_acquisition(ephys_ts) spatial_ts = SpatialSeries( 'test_spatial_timeseries', 'a stumbling rat', spatial_data, 'origin on x,y-plane', timestamps=spatial_timestamps, resolution=0.1, comments="This data was generated with numpy, using 1234 as the seed", description="This 2D Brownian process generated with " "np.cumsum(np.random.normal(size=(2, len(spatial_timestamps))), axis=-1).T" ) f.add_acquisition(spatial_ts) # create-timeseries: end # create-data-interface: start from pynwb.ecephys import LFP from pynwb.behavior import Position lfp = f.add_acquisition(LFP('a hypothetical source')) ephys_ts = lfp.create_electrical_series( 'test_ephys_data', 'an hypothetical source', ephys_data, electrode_table_region, timestamps=ephys_timestamps, resolution=0.001, comments= "This data was randomly generated with numpy, using 1234 as the seed", # noqa: E501 description="Random numbers generated with numpy.random.rand") pos = f.add_acquisition(Position('a hypothetical source')) spatial_ts = pos.create_spatial_series( 'test_spatial_timeseries', 'a stumbling rat', spatial_data, 'origin on x,y-plane', timestamps=spatial_timestamps, resolution=0.1, comments="This data was generated with numpy, using 1234 as the seed", description="This 2D Brownian process generated with " "np.cumsum(np.random.normal(size=(2, len(spatial_timestamps))), axis=-1).T" ) # noqa: E501 # create-data-interface: end # create-epochs: start epoch_tags = ('example_epoch', ) f.add_epoch(name='epoch1', start_time=0.0, stop_time=1.0, tags=epoch_tags, description="the first test epoch", timeseries=[ephys_ts, spatial_ts]) f.add_epoch(name='epoch2', start_time=0.0, stop_time=1.0, tags=epoch_tags, description="the second test epoch", timeseries=[ephys_ts, spatial_ts]) # create-epochs: end # create-compressed-timeseries: start from pynwb.ecephys import ElectricalSeries from pynwb.behavior import SpatialSeries from pynwb.form.backends.hdf5 import H5DataIO ephys_ts = ElectricalSeries( 'test_compressed_ephys_data', 'an hypothetical source', H5DataIO(ephys_data, compress=True), electrode_table_region, timestamps=H5DataIO(ephys_timestamps, compress=True), resolution=0.001, comments= "This data was randomly generated with numpy, using 1234 as the seed", description="Random numbers generated with numpy.random.rand") f.add_acquisition(ephys_ts) spatial_ts = SpatialSeries( 'test_compressed_spatial_timeseries', 'a stumbling rat', H5DataIO(spatial_data, compress=True), 'origin on x,y-plane', timestamps=H5DataIO(spatial_timestamps, compress=True), resolution=0.1, comments="This data was generated with numpy, using 1234 as the seed", description="This 2D Brownian process generated with " "np.cumsum(np.random.normal(size=(2, len(spatial_timestamps))), axis=-1).T" ) f.add_acquisition(spatial_ts)
def setUpContainer(self): es = self.setUpElectricalSeriesContainers() ret = LFP('LFP roundtrip test', es) return ret
def copy_obj(obj_old, nwb_old, nwb_new): """ Creates a copy of obj_old. """ # ElectricalSeries -------------------------------------------------------- if type(obj_old) is ElectricalSeries: nChannels = obj_old.electrodes.table['x'].data.shape[0] elecs_region = nwb_new.electrodes.create_region( name='electrodes', region=np.arange(nChannels).tolist(), description='' ) return ElectricalSeries( name=obj_old.name, data=obj_old.data[:], electrodes=elecs_region, rate=obj_old.rate, description=obj_old.description ) # DynamicTable ------------------------------------------------------------ if type(obj_old) is DynamicTable: return DynamicTable( name=obj_old.name, description=obj_old.description, colnames=obj_old.colnames, columns=obj_old.columns, ) # LFP --------------------------------------------------------------------- if type(obj_old) is LFP: obj = LFP(name=obj_old.name) assert len(obj_old.electrical_series) == 1, ( 'Expected precisely one electrical series, got %i!' % len(obj_old.electrical_series)) els = list(obj_old.electrical_series.values())[0] nChannels = els.data.shape[1] #### # first check for a table among the new file's data_interfaces if els.electrodes.table.name in nwb_new.processing[ 'ecephys'].data_interfaces: LFP_dynamic_table = nwb_new.processing['ecephys'].data_interfaces[ els.electrodes.table.name] else: # othewise use the electrodes as the table LFP_dynamic_table = nwb_new.electrodes #### elecs_region = LFP_dynamic_table.create_region( name='electrodes', region=[i for i in range(nChannels)], description=els.electrodes.description ) obj_ts = obj.create_electrical_series( name=els.name, comments=els.comments, conversion=els.conversion, data=els.data[:], description=els.description, electrodes=elecs_region, rate=els.rate, resolution=els.resolution, starting_time=els.starting_time ) return obj # TimeSeries -------------------------------------------------------------- if type(obj_old) is TimeSeries: return TimeSeries( name=obj_old.name, description=obj_old.description, data=obj_old.data[:], rate=obj_old.rate, resolution=obj_old.resolution, conversion=obj_old.conversion, starting_time=obj_old.starting_time, unit=obj_old.unit ) # DecompositionSeries ----------------------------------------------------- if type(obj_old) is DecompositionSeries: list_columns = [] for item in obj_old.bands.columns: bp = VectorData( name=item.name, description=item.description, data=item.data[:] ) list_columns.append(bp) bandsTable = DynamicTable( name=obj_old.bands.name, description=obj_old.bands.description, columns=list_columns, colnames=obj_old.bands.colnames ) return DecompositionSeries( name=obj_old.name, data=obj_old.data[:], description=obj_old.description, metric=obj_old.metric, unit=obj_old.unit, rate=obj_old.rate, # source_timeseries=lfp, bands=bandsTable, ) # Spectrum ---------------------------------------------------------------- if type(obj_old) is Spectrum: file_elecs = nwb_new.electrodes nChannels = len(file_elecs['x'].data[:]) elecs_region = file_elecs.create_region( name='electrodes', region=np.arange(nChannels).tolist(), description='' ) return Spectrum( name=obj_old.name, frequencies=obj_old.frequencies[:], power=obj_old.power, electrodes=elecs_region )
nwbfile = NWBFile(session_start_time=session_start_time, identifier=this_dir, session_description='unknown') device = nwbfile.create_device(name='Neuronexus Probe Buzsaki32/H32Package') group = nwbfile.create_electrode_group(name='all_channels_group', description='all channels', device=device, location='unknown') for i in range(nchannels): nwbfile.add_electrode(np.nan, np.nan, np.nan, # position imp=np.nan, location='unknown', filtering='unknown', group=group) electrode_table_region = nwbfile.create_electrode_table_region( list(range(nchannels)), 'all electrodes') electrical_series = ElectricalSeries(data=amp_data, rate=amp_fs, electrodes=electrode_table_region, name='amp_data') nwbfile.add_acquisition(LFP(name='amp_data', electrical_series=electrical_series)) nwbfile.add_acquisition(TimeSeries('auxiliary', data=aux_data, rate=amp_fs, unit='na')) nwbfile.add_acquisition(TimeSeries('supply', data=supply_data, rate=amp_fs, unit='na')) out_fname = this_dir + '.nwb' with NWBHDF5IO(out_fname, 'w') as io: io.write(nwbfile)