def add_running_speed_to_nwbfile(nwbfile, running_speed, name='speed', unit='cm/s'): ''' Adds running speed data to an NWBFile as a timeseries in acquisition Parameters ---------- nwbfile : pynwb.NWBFile File to which runnign speeds will be written running_speed : RunningSpeed Contains attributes 'values' and 'timestamps' name : str, optional used as name of timeseries object unit : str, optional SI units of running speed values Returns ------- nwbfile : pynwb.NWBFile ''' running_speed_series = pynwb.base.TimeSeries( name=name, data=running_speed.values, timestamps=running_speed.timestamps, unit=unit ) running_mod = ProcessingModule('running', 'Running speed processing module') nwbfile.add_processing_module(running_mod) running_mod.add_data_interface(running_speed_series) return nwbfile
def add_image(nwbfile, image_data, image_name, module_name, module_description, image_api=None): description = '{} image at pixels/cm resolution'.format(image_name) if image_api is None: image_api = ImageApi if isinstance(image_data, sitk.Image): data, spacing, unit = ImageApi.deserialize(image_data) elif isinstance(image_data, Image): data = image_data.data spacing = image_data.spacing unit = image_data.unit else: raise ValueError("Not a supported image_data type: {}".format(type(image_data))) assert spacing[0] == spacing[1] and len(spacing) == 2 and unit == 'mm' if module_name not in nwbfile.modules: ophys_mod = ProcessingModule(module_name, module_description) nwbfile.add_processing_module(ophys_mod) else: ophys_mod = nwbfile.modules[module_name] image = GrayscaleImage(image_name, data, resolution=spacing[0] / 10, description=description) if 'images' not in ophys_mod.containers: images = Images(name='images') ophys_mod.add_data_interface(images) else: images = ophys_mod['images'] images.add_image(image) return nwbfile
def create_module_from_activity(name, description, activity, frequency): module = ProcessingModule(name=name, description=description) for electrode in activity.keys(): module.add( pynwb.base.TimeSeries('electrode' + str(electrode), activity[electrode], unit="spikes/s", rate=frequency, comments="Each row corresponds to a specific trial")) return module
def to_nwb(self, nwbfile: NWBFile) -> NWBFile: running_speed: pd.DataFrame = self.value data = running_speed['speed'].values timestamps = running_speed['timestamps'].values if self._filtered: data_interface_name = "speed" else: data_interface_name = "speed_unfiltered" running_speed_series = TimeSeries(name=data_interface_name, data=data, timestamps=timestamps, unit='cm/s') if 'running' in nwbfile.processing: running_mod = nwbfile.processing['running'] else: running_mod = ProcessingModule('running', 'Running speed processing module') nwbfile.add_processing_module(running_mod) running_mod.add_data_interface(running_speed_series) return nwbfile
def test_task_creator_create_task_and_write_to_nwb_successfully(self): nwb_content = NWBFile(session_description='demonstrate external files', identifier='NWBE1', session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), file_create_date=datetime(2017, 4, 15, 12, tzinfo=tzlocal())) processing_module = ProcessingModule('pm', 'none') mock_fl_task_0 = Mock(spec=FlTask) mock_fl_task_0.name = 'task_0' mock_fl_task_0.description = '' mock_fl_task_0.columns = [ VectorData(name='task_name', description='', data=['Sleep']), VectorData(name='task_description', description='', data=['The animal sleeps in a small empty box.']), VectorData(name='camera_id', description='', data=[[0]]), VectorData(name='task_epochs', description='', data=[[1, 3, 5]]), ] mock_fl_task_1 = Mock(spec=FlTask) mock_fl_task_1.name = 'task_1' mock_fl_task_1.description = '' mock_fl_task_1.columns = [ VectorData(name='task_name', description='', data=['Stem+Leaf']), VectorData(name='task_description', description='', data=['Spatial Bandit']), VectorData(name='camera_id', description='', data=[[1, 2]]), VectorData(name='task_epochs', description='', data=[[2, 4]]), ] task_0 = TaskCreator.create(mock_fl_task_0) task_1 = TaskCreator.create(mock_fl_task_1) processing_module.add(task_0) processing_module.add(task_1) nwb_content.add_processing_module(processing_module) with NWBHDF5IO(path='task.nwb', mode='w') as nwb_file_io: nwb_file_io.write(nwb_content) nwb_file_io.close() with NWBHDF5IO(path='task.nwb', mode='r') as nwb_file_io: nwb_content = nwb_file_io.read() self.assertContainerEqual( nwb_content.processing['pm'].data_interfaces['task_0'], task_0) self.assertContainerEqual( nwb_content.processing['pm'].data_interfaces['task_1'], task_1) os.remove('task.nwb')
def save(self, nwb_file): if "spikes" in nwb_file.processing: nwb_file.processing.pop("spikes") spikes_module = ProcessingModule(name='spikes', description='All extracted spikes') spikes_module.add_container( DynamicTable.from_dataframe(self.spikes, name="spikes")) nwb_file.add_processing_module(spikes_module) for processor in self.processors: processor.replace_module(nwb_file)
def to_nwb(self, nwbfile: NWBFile) -> NWBFile: stimulus_ts = TimeSeries(data=self._value, name="timestamps", timestamps=self._value, unit="s") stim_mod = ProcessingModule("stimulus", "Stimulus Times processing") stim_mod.add_data_interface(stimulus_ts) nwbfile.add_processing_module(stim_mod) return nwbfile
def create_module(self): module = ProcessingModule(name=self.name, description=self.description) for electrode in self.electrodes: module.add( pynwb.base.TimeSeries( 'electrode' + str(electrode), self.activity[electrode], unit="spikes/s", rate=self.frequency, comments="Each row corresponds to a specific trial")) return module
class ProcessingModuleCreator: def __init__(self, name, description): self.processing_module = ProcessingModule(name, description) def insert(self, data): try: self.processing_module.add(data) except TypeError as err: # log error instead logger.error('Inserting data into processing module has failed: ' + str(err))
def append_spike_times(input_nwb_path: PathLike, sweep_spike_times: Dict[int, List[float]], output_nwb_path: Optional[PathLike] = None): """ Appends spiketimes to an nwb2 file Paramters --------- input_nwb_path: location of input nwb file without spiketimes spike_times: Dict of sweep_num: spiketimes output_nwb_path: optional location to write new nwb file with spiketimes, otherwise appends spiketimes to input file """ # Copy to new location if output_nwb_path and output_nwb_path != input_nwb_path: shutil.copy(input_nwb_path, output_nwb_path) nwb_path = output_nwb_path else: nwb_path = input_nwb_path nwb_io = pynwb.NWBHDF5IO(nwb_path, mode='a', load_namespaces=True) nwbfile = nwb_io.read() spikes_module = "spikes" # Add spikes only if not previously added if spikes_module not in nwbfile.processing.keys(): spike_module = ProcessingModule(name=spikes_module, description='detected spikes') for sweep_num, spike_times in sweep_spike_times.items(): wrapped_spike_times = H5DataIO(data=np.asarray(spike_times), compression=True) ts = TimeSeries(timestamps=wrapped_spike_times, unit='seconds', data=wrapped_spike_times, name=f"Sweep_{sweep_num}") spike_module.add_data_interface(ts) nwbfile.add_processing_module(spike_module) nwb_io.write(nwbfile) else: raise ValueError("Cannot add spikes times to the nwb file: " "spikes times already exist!") nwb_io.close()
def add_spike_times(self, sweep_spike_times): spike_module = ProcessingModule(name='spikes', description='detected spikes') for sweep_num, spike_times in sweep_spike_times.items(): ts = TimeSeries(timestamps=spike_times, name=f"Sweep_{sweep_num}") spike_module.add_data_interface(ts) self.nwbfile.add_processing_module(spike_module) io = pynwb.NWBHDF5IO(self.nwb_file_name, 'w') io.write(self.nwbfile) io.close()
def add_stimulus_timestamps(nwbfile, stimulus_timestamps, module_name='stimulus'): stimulus_ts = TimeSeries(data=stimulus_timestamps, name='timestamps', timestamps=stimulus_timestamps, unit='s') stim_mod = ProcessingModule(module_name, 'Stimulus Times processing') nwbfile.add_processing_module(stim_mod) stim_mod.add_data_interface(stimulus_ts) return nwbfile
def add_running_speed_to_nwbfile(nwbfile, running_speed, name='speed', unit='cm/s', from_dataframe=False): ''' Adds running speed data to an NWBFile as a timeseries in acquisition Parameters ---------- nwbfile : pynwb.NWBFile File to which running speeds will be written running_speed : Union[RunningSpeed, pd.DataFrame] Either a RunningSpeed object or pandas DataFrame. Contains attributes 'values' and 'timestamps' name : str, optional Used as name of timeseries object unit : str, optional SI units of running speed values from_dataframe : bool, optional Whether `running_speed` is a dataframe or not. Default is False. Returns ------- nwbfile : pynwb.NWBFile ''' if from_dataframe: data = running_speed['speed'].values timestamps = running_speed['timestamps'].values else: data = running_speed.values timestamps = running_speed.timestamps running_speed_series = pynwb.base.TimeSeries(name=name, data=data, timestamps=timestamps, unit=unit) if 'running' in nwbfile.processing: running_mod = nwbfile.processing['running'] else: running_mod = ProcessingModule('running', 'Running speed processing module') nwbfile.add_processing_module(running_mod) running_mod.add_data_interface(running_speed_series) return nwbfile
def to_nwb(self, nwbfile: NWBFile) -> NWBFile: lick_timeseries = TimeSeries( name='licks', data=self.value['frame'].values, timestamps=self.value['timestamps'].values, description=('Timestamps and stimulus presentation ' 'frame indices for lick events'), unit='N/A') # Add lick interface to nwb file, by way of a processing module: licks_mod = ProcessingModule('licking', 'Licking behavior processing module') licks_mod.add_data_interface(lick_timeseries) nwbfile.add_processing_module(licks_mod) return nwbfile
def make_file(curr_dir, identifier, age, genotype, sex, date): """ :param curr_dir: current directory :param identifier: ID :param age: age in days :param genotype: genotype :param sex: subject sex :param date: list [year, month, day] :return: """ nwb_file = create(identifier, age, genotype, sex, date) behavior_module = ProcessingModule('behavior', 'behavior module') nwb_file.add_processing_module(behavior_module) eye(behavior_module) face_nwb(behavior_module) lick_piezo(nwb_file) lick_times(behavior_module) spontaneous(nwb_file) wheel(nwb_file) wheel_moves(behavior_module) trial_table(nwb_file) sparse_noise(nwb_file) passive_stimulus(nwb_file) neural_data(nwb_file) filename = curr_dir + '/Files/Steinmetz2019_' + identifier + '.nwb' with NWBHDF5IO(filename, 'w') as io: io.write(nwb_file) print('Saved', filename)
def add_licks(nwbfile, licks): lick_timeseries = TimeSeries( name='licks', data=licks.frame.values, timestamps=licks.timestamps.values, description=('Timestamps and stimulus presentation ' 'frame indices for lick events'), unit='N/A') # Add lick interface to nwb file, by way of a processing module: licks_mod = ProcessingModule('licking', 'Licking behavior processing module') licks_mod.add_data_interface(lick_timeseries) nwbfile.add_processing_module(licks_mod) return nwbfile
def add_licks(nwbfile, licks): licks_event_series = TimeSeries(data=licks.time.values, name='timestamps', timestamps=licks.time.values, unit='s') # Add lick event timeseries to lick interface: licks_interface = BehavioralEvents([licks_event_series], 'licks') # Add lick interface to nwb file, by way of a processing module: licks_mod = ProcessingModule('licking', 'Licking behavior processing module') licks_mod.add_data_interface(licks_interface) nwbfile.add_processing_module(licks_mod) return nwbfile
def add_running_acquisition_to_nwbfile(nwbfile, running_acquisition_df: pd.DataFrame): running_dx_series = TimeSeries( name='dx', data=running_acquisition_df['dx'].values, timestamps=running_acquisition_df.index.values, unit='cm', description=( 'Running wheel angular change, computed during data collection')) v_sig = TimeSeries( name='v_sig', data=running_acquisition_df['v_sig'].values, timestamps=running_acquisition_df.index.values, unit='V', description='Voltage signal from the running wheel encoder') v_in = TimeSeries( name='v_in', data=running_acquisition_df['v_in'].values, timestamps=running_acquisition_df.index.values, unit='V', description=( 'The theoretical maximum voltage that the running wheel encoder ' 'will reach prior to "wrapping". This should ' 'theoretically be 5V (after crossing 5V goes to 0V, or ' 'vice versa). In practice the encoder does not always ' 'reach this value before wrapping, which can cause ' 'transient spikes in speed at the voltage "wraps".')) if 'running' in nwbfile.processing: running_mod = nwbfile.processing['running'] else: running_mod = ProcessingModule('running', 'Running speed processing module') nwbfile.add_processing_module(running_mod) running_mod.add_data_interface(running_dx_series) nwbfile.add_acquisition(v_sig) nwbfile.add_acquisition(v_in) return nwbfile
def add_image_to_nwb(nwbfile: NWBFile, image_data: Image, image_name: str): """ Adds image given by image_data with name image_name to nwbfile Parameters ---------- nwbfile nwbfile to add image to image_data The image data image_name Image name Returns ------- None """ module_name = 'ophys' description = '{} image at pixels/cm resolution'.format(image_name) data, spacing, unit = image_data assert spacing[0] == spacing[1] and len(spacing) == 2 and unit == 'mm' if module_name not in nwbfile.processing: ophys_mod = ProcessingModule(module_name, 'Ophys processing module') nwbfile.add_processing_module(ophys_mod) else: ophys_mod = nwbfile.processing[module_name] image = GrayscaleImage(image_name, data, resolution=spacing[0] / 10, description=description) if 'images' not in ophys_mod.containers: images = Images(name='images') ophys_mod.add_data_interface(images) else: images = ophys_mod['images'] images.add_image(image)
def add_motion_correction(nwbfile, motion_correction): twop_module = nwbfile.modules['two_photon_imaging'] ophys_timestamps = twop_module.get_data_interface( 'dff').roi_response_series['traces'].timestamps t1 = TimeSeries(name='x', data=motion_correction['x'].values, timestamps=ophys_timestamps, unit='pixels') t2 = TimeSeries(name='y', data=motion_correction['y'].values, timestamps=ophys_timestamps, unit='pixels') motion_module = ProcessingModule('motion_correction', 'Motion Correction processing module') motion_module.add_data_interface(t1) motion_module.add_data_interface(t2) nwbfile.add_processing_module(motion_module)
def to_nwb(self, nwbfile: NWBFile) -> NWBFile: # If there is no rewards data, do not # write anything to the NWB file (this # is expected for passive sessions) if len(self.value['timestamps']) == 0: return nwbfile reward_volume_ts = TimeSeries( name='volume', data=self.value['volume'].values, timestamps=self.value['timestamps'].values, unit='mL' ) autorewarded_ts = TimeSeries( name='autorewarded', data=self.value['autorewarded'].values, timestamps=reward_volume_ts.timestamps, unit='mL' ) rewards_mod = ProcessingModule('rewards', 'Licking behavior processing module') rewards_mod.add_data_interface(reward_volume_ts) rewards_mod.add_data_interface(autorewarded_ts) nwbfile.add_processing_module(rewards_mod) return nwbfile
def test_processing_module(self): start_time = datetime(2020, 1, 29, 11, tzinfo=tzlocal()) nwbfile = NWBFile(session_description='Test Session', identifier='NWBPM', session_start_time=start_time) behavior_module = ProcessingModule(name='behavior', description='preprocessed behavioral data') nwbfile.add_processing_module(behavior_module) nwbfile.processing['behavior'].add(self.position) processing_module(nwbfile.processing['behavior'], default_neurodata_vis_spec)
def to_nwb(self, nwbfile: NWBFile) -> NWBFile: # If there is no lick data, do not write # anything to the NWB file (this is # expected for passive sessions) if len(self.value['frame']) == 0: return nwbfile lick_timeseries = TimeSeries( name='licks', data=self.value['frame'].values, timestamps=self.value['timestamps'].values, description=('Timestamps and stimulus presentation ' 'frame indices for lick events'), unit='N/A') # Add lick interface to nwb file, by way of a processing module: licks_mod = ProcessingModule('licking', 'Licking behavior processing module') licks_mod.add_data_interface(lick_timeseries) nwbfile.add_processing_module(licks_mod) return nwbfile
def test_read_nwb_nwb_image_series_successfully(self): device_1 = Device('device1') device_2 = Device('device2') mock_timestamps = [1, 2, 3] mock_external_file = ['some file'] nwb_image_series = NwbImageSeries(name='NwbImageSeries1', timestamps=mock_timestamps, external_file=mock_external_file, devices=[device_1, device_2]) behavioral_time_series = BehavioralEvents(name="BehavioralTimeSeries") behavioral_time_series.add_timeseries(nwb_image_series) processing_module = ProcessingModule(name='ProcessingModule', description='') processing_module.add_data_interface(behavioral_time_series) self.nwb_file_content.add_processing_module(processing_module) self.nwb_file_content.add_stimulus_template(nwb_image_series) nwb_file_handler = NWBHDF5IO('nwb_image_series.nwb', mode='w') nwb_file_handler.write(self.nwb_file_content) nwb_file_handler.close() self.assertTrue(os.path.exists('nwb_image_series.nwb')) with pynwb.NWBHDF5IO('nwb_image_series.nwb', 'r', load_namespaces=True) as nwb_file_handler: nwb_file = nwb_file_handler.read() self.assertContainerEqual( nwb_file.stimulus_template['NwbImageSeries1'], nwb_image_series) self.assertContainerEqual( nwb_file.processing['ProcessingModule'].data_interfaces[ 'BehavioralTimeSeries'].time_series['NwbImageSeries1'], nwb_image_series) self.delete_nwb('nwb_image_series')
def setUp(self): self.nwb_content = NWBFile( session_description='session_description', experimenter='experimenter_name', lab='lab', institution='institution', session_start_time=start_time, identifier='identifier', experiment_description='experiment_description') processing_module = ProcessingModule(name='test_processing_module_name', description='test_description') self.nwb_content.add_processing_module(processing_module) self.behavioral_events = BehavioralEvents(name='test_BehavioralEvents_name') self.dio_injector = DioInjector(self.nwb_content)
def test_processing_module(self): start_time = datetime(2020, 1, 29, 11, tzinfo=tzlocal()) nwbfile = NWBFile( session_description="Test Session", identifier="NWBPM", session_start_time=start_time, ) behavior_module = ProcessingModule( name="behavior", description="preprocessed behavioral data") nwbfile.add_processing_module(behavior_module) nwbfile.processing["behavior"].add(self.position) processing_module(nwbfile.processing["behavior"], default_neurodata_vis_spec)
def create_module(self): parameters = pd.DataFrame(self.parameters) processed_trf = pd.DataFrame({ "filtered_trf": self.filtered_trf, "cleaned_trf": self.cleaned_trf, "electrodes": self.list_electrodes }) trf_module = ProcessingModule(name=self.name, description=self.description) trf_module.add( DynamicTable.from_dataframe(self.trf.reset_index(), name=self.name)) trf_module.add( DynamicTable.from_dataframe( parameters.set_index("electrode").reset_index(), name="trf_parameters")) trf_module.add( DynamicTable.from_dataframe(processed_trf.reset_index(), name="processed_trf")) return trf_module
def test_dummy(tmpdir_factory): arr = np.linspace(0, 1, 300) dummy_instance = DummyClass(name='my_instance', source='imagination', afloat=5.0, anarray=arr) module = ProcessingModule(name='my_module', source='imagination', description='foo', containers=[dummy_instance]) nwbfile = NWBFile('name', 'source', '?', datetime.now(), modules=[module]) file_path = tmpdir_factory.mktemp('data').join('test.nwb') roundtrip_test(nwbfile, file_path) assert (np.array_equal(nwbfile.modules['my_module'].containers[0].anarray, arr)) assert (np.array_equal(nwbfile.modules['my_module'].containers[0].afloat, 5.0))
def test_read_nwb_associated_files_successfully(self): associated_files = AssociatedFiles( name='file1', description='description of file1', content='1 2 3 content of file test', task_epochs='1, 2') self.nwb_file_content.add_processing_module( ProcessingModule('associated_files', 'description_of_associaed_files')) self.nwb_file_content.processing['associated_files'].add( associated_files) nwb_file_handler = NWBHDF5IO('associated_files.nwb', mode='w') nwb_file_handler.write(self.nwb_file_content) nwb_file_handler.close() self.assertTrue(os.path.exists('associated_files.nwb')) with pynwb.NWBHDF5IO('associated_files.nwb', 'r') as nwb_file_handler: nwb_file = nwb_file_handler.read() self.assertIsInstance( nwb_file.processing['associated_files']['file1'], AssociatedFiles) self.assertEqual( 'file1', nwb_file.processing['associated_files']['file1'].name) self.assertEqual( 'description of file1', nwb_file.processing['associated_files'] ['file1'].fields['description']) self.assertEqual( '1 2 3 content of file test', nwb_file.processing['associated_files'] ['file1'].fields['content']) self.assertEqual( '1, 2', nwb_file.processing['associated_files'] ['file1'].fields['task_epochs']) self.delete_nwb('associated_files')
def add_rewards(nwbfile, rewards_df): reward_volume_ts = TimeSeries(name='volume', data=rewards_df.volume.values, timestamps=rewards_df['timestamps'].values, unit='mL') autorewarded_ts = TimeSeries(name='autorewarded', data=rewards_df.autorewarded.values, timestamps=reward_volume_ts.timestamps, unit='mL') rewards_mod = ProcessingModule('rewards', 'Licking behavior processing module') rewards_mod.add_data_interface(reward_volume_ts) rewards_mod.add_data_interface(autorewarded_ts) nwbfile.add_processing_module(rewards_mod) return nwbfile