def test_dataframe_roundtrip(self): df = self.get_dataframe() epochs = TimeIntervals.from_dataframe(df, name='test epochs') obtained = epochs.to_dataframe() self.assertIs(obtained.loc[3, 'timeseries'][1], df.loc[3, 'timeseries'][1]) self.assertEqual(obtained.loc[2, 'foo'], df.loc[2, 'foo'])
def test_from_dataframe(self): df = pd.DataFrame({'start_time': [1., 2., 3.], 'stop_time': [2., 3., 4.], 'label': ['a', 'b', 'c']}, columns=('start_time', 'stop_time', 'label')) ti = TimeIntervals.from_dataframe(df, name='ti_name') self.assertEqual(ti.colnames, ('start_time', 'stop_time', 'label')) self.assertEqual(ti.columns[0].data, [1.0, 2.0, 3.0]) self.assertEqual(ti.columns[2].data, ['a', 'b', 'c'])
def inject_all(self, valid_times, nwb_content): intervals = TimeIntervals( name='mda_valid_times', description='Valid times based on mda timestamps', ) for single_interval in valid_times: self.inject(single_interval, intervals) nwb_content.add_time_intervals(intervals)
def test_create_custom_intervals(self): df_words = pd.DataFrame({ 'start_time': [.1, 2.], 'stop_time': [.8, 2.3], 'label': ['hello', 'there'] }) words = TimeIntervals.from_dataframe(df_words, name='words') self.nwbfile.add_time_intervals(words) self.assertEqual(self.nwbfile.intervals['words'], words)
def run_conversion(self, nwbfile: NWBFile, metadata: dict = None, stub_test: bool = False): conditions = intervals_from_traces(self.recording_extractor) mech_stim = TimeIntervals( name='MechanicalStimulus', description= "Activation times inferred from TTL commands for mechanical stimulus." ) laser_stim = TimeIntervals( name='LaserStimulus', description= "Activation times inferred from TTL commands for cortical laser stimulus." ) for j, table in enumerate([mech_stim, laser_stim]): for row in conditions[j]: table.add_row( dict(start_time=float(row[0]), stop_time=float(row[1]))) # TODO - these really should be IntervalSeries added to stimulus, rather than processing check_module(nwbfile, 'stimulus', "Contains stimuli data.").add(mech_stim) check_module(nwbfile, 'stimulus', "Contains stimuli data.").add(laser_stim) if stub_test or self.subset_channels is not None: recording = self.subset_recording(stub_test=stub_test) else: recording = self.recording_extractor # Pressure values nwbfile.add_stimulus( TimeSeries( name='MechanicalPressure', data=H5DataIO(recording.get_traces(0), compression="gzip"), unit=self.recording_extractor._channel_smrxinfo[0]['unit'], conversion=recording.get_channel_property(0, 'gain'), rate=recording.get_sampling_frequency(), description= "Pressure sensor attached to the mechanical stimulus used to repeatedly evoke spiking." ))
def textgriddf_converter(text_df): """Converts data into TimeIntervals For a given DataFrame this function converts the data into TimeIntervals Parameters ---------- text_df : pandas.DataFrame Data related to an item Returns ---------- pynwb.epoch.TimeIntervals """ textgrid_sentences = TimeIntervals( name='sentences', description='desc' ) textgrid_sentences.add_column('label', 'text of sentences') for i in text_df.index: textgrid_sentences.add_interval(label=text_df.iloc[i]['text'], start_time=float(text_df.iloc[i]['xmin']), stop_time=float(text_df.iloc[i]['xmax'])) return textgrid_sentences
def setUp(self): super().setUp() # add intervals to nwbfile ti1 = TimeIntervals(name='intervals', description='experimental intervals') ti1.add_interval(start_time=0.0, stop_time=2.0) ti1.add_interval(start_time=2.0, stop_time=4.0) ti1.add_interval(start_time=4.0, stop_time=6.0) ti1.add_interval(start_time=6.0, stop_time=8.0) ti1.add_column(name='var1', data=['a', 'b', 'a', 'b'], description='no description') self.nwbfile.add_time_intervals(ti1) self.widget = ExtendedTimeIntervalSelector( input_data=self.nwbfile.units)
class TestAlignMultiTraceTimeSeriesByTrials(unittest.TestCase): def setUp(self): data = np.random.rand(100, 10) timestamps = [0.0] for _ in range(data.shape[0]): timestamps.append(timestamps[-1] + 0.75 + 0.25 * np.random.rand()) self.ts_rate = TimeSeries( name="test_timeseries_rate", data=data, unit="m", starting_time=0.0, rate=1.0, ) self.ts_timestamps = TimeSeries( name="test_timeseries_timestamps", data=data, unit="m", timestamps=np.array(timestamps), ) self.time_intervals = TimeIntervals(name="Test Time Interval") n_intervals = 10 for start_time in np.linspace(0, 75, n_intervals + 1): if start_time < 75: stt = start_time + np.random.rand() spt = stt + 7 - np.random.rand() self.time_intervals.add_interval(start_time=stt, stop_time=spt) self.time_intervals.add_column( name="temp", description="desc", data=np.random.randint(2, size=n_intervals) ) self.time_intervals.add_column( name="temp2", description="desc", data=np.random.randint(10, size=n_intervals), ) def test_align_by_timestamps(self): amt = AlignMultiTraceTimeSeriesByTrialsVariable( time_series=self.ts_timestamps, trials=self.time_intervals ) gas = amt.controls['gas'] gas.group_dd.value = list(gas.categorical_columns.keys())[0] gas.group_sm.value = (gas.group_sm.options[0],) fig = amt.children[-1] assert len(fig.data)==len(gas.group_sm.value) def test_align_by_rate(self): amt = AlignMultiTraceTimeSeriesByTrialsConstant( time_series=self.ts_rate, trials=self.time_intervals ) gas = amt.controls['gas'] gas.group_dd.value = list(gas.categorical_columns)[0] gas.group_sm.value = (gas.group_sm.options[0],) fig = amt.children[-1] assert len(fig.data) == len(gas.group_sm.value)
def addContainer(self, nwbfile): """ Add the test epochs with TimeSeries objects to the given NWBFile """ tsa, tsb = [ TimeSeries(name='a', data=np.arange(11), unit='flubs', timestamps=np.linspace(0, 1, 11)), TimeSeries(name='b', data=np.arange(13), unit='flubs', timestamps=np.linspace(0.1, 5, 13)), ] nwbfile.add_acquisition(tsa) nwbfile.add_acquisition(tsb) nwbfile.epochs = TimeIntervals.from_dataframe(pd.DataFrame({ 'foo': [1, 2, 3, 4], 'bar': ['fish', 'fowl', 'dog', 'cat'], 'start_time': [0.2, 0.25, 0.30, 0.35], 'stop_time': [0.25, 0.30, 0.40, 0.45], 'timeseries': [[(2, 1, tsa)], [(3, 1, tsa)], [(3, 1, tsa)], [(4, 1, tsa)]], 'tags': [[''], [''], ['fizz', 'buzz'], ['qaz']] }), 'epochs', columns=[ { 'name': 'foo', 'description': 'a column of integers' }, { 'name': 'bar', 'description': 'a column of strings' }, ]) # reset the thing self.container = nwbfile.epochs
def out_close(self, val): self.value = val if val == 1: # finished auto-detection of events self.stimTimes = self.thread.stimTimes self.respTimes = self.thread.respTimes fname = self.parent.model.fullpath with NWBHDF5IO(fname, 'r+', load_namespaces=True) as io: nwb = io.read() # Speaker stimuli times ti_stim = TimeIntervals(name='TimeIntervals_speaker') times = self.stimTimes.reshape((-1, 2)).astype('float') for start, stop in times: ti_stim.add_interval(start, stop) # Microphone responses times ti_resp = TimeIntervals(name='TimeIntervals_mic') times = self.respTimes.reshape((-1, 2)).astype('float') for start, stop in times: ti_resp.add_interval(start, stop) # Add both to file nwb.add_time_intervals(ti_stim) nwb.add_time_intervals(ti_resp) # Write file io.write(nwb) self.accept()
class SpatialSeriesTrialsAlign(unittest.TestCase): def setUp(self) -> None: data = np.random.rand(100, 3) timestamps = [0.0] for _ in range(data.shape[0]): timestamps.append(timestamps[-1] + 0.75 + 0.25 * np.random.rand()) self.spatial_series_rate = SpatialSeries( name="position_rate", data=data, starting_time=0.0, rate=1.0, reference_frame="starting gate", ) self.spatial_series_ts = SpatialSeries( name="position_ts", data=data, timestamps=np.array(timestamps), reference_frame="starting gate", ) self.time_intervals = TimeIntervals(name="Test Time Interval") n_intervals = 10 for start_time in np.linspace(0, 75, n_intervals + 1): if start_time < 75: stt = start_time + np.random.rand() spt = stt + 7 - np.random.rand() self.time_intervals.add_interval(start_time=stt, stop_time=spt) self.time_intervals.add_column(name="temp", description="desc", data=np.random.randint( 2, size=n_intervals)) self.time_intervals.add_column( name="temp2", description="desc", data=np.random.randint(10, size=n_intervals), ) def test_spatial_series_trials_align_rate(self): trial_align_spatial_series(self.spatial_series_rate, self.time_intervals) def test_spatial_series_trials_align_ts(self): trial_align_spatial_series(self.spatial_series_ts, self.time_intervals)
def test_align_by_time_intervals_Nonetrials_select(self): time_intervals = TimeIntervals(name='Test Time Interval') time_intervals.add_interval(start_time=21.0, stop_time=28.0) time_intervals.add_interval(start_time=22.0, stop_time=26.0) time_intervals.add_interval(start_time=22.0, stop_time=28.4) ati = align_by_time_intervals(self.nwbfile.units, index=1, intervals=time_intervals, stop_label=None, before=20., after=30.) compare_to_ati = [ np.array([-18.8, -18., 4., 5.]), np.array([-19.8, -19., 3., 4.]), np.array([-19.8, -19., 3., 4.]) ] np.testing.assert_array_equal(ati, compare_to_ati)
def test_align_by_time_intervals(self): time_intervals = TimeIntervals(name='Test Time Interval') time_intervals.add_interval(start_time=21.0, stop_time=28.0) time_intervals.add_interval(start_time=22.0, stop_time=26.0) time_intervals.add_interval(start_time=22.0, stop_time=28.4) ATI = align_by_time_intervals(self.nwbfile.units, index=1, intervals=time_intervals, stop_label=None, before=20., after=30., rows_select=[0, 1]) ComparedtoATI = [ np.array([-18.8, -18., 4., 5.]), np.array([-19.8, -19., 3., 4.]) ] np.testing.assert_array_equal(ATI, ComparedtoATI)
def test_align_by_time_intervals(self): time_intervals = TimeIntervals(name="Test Time Interval") time_intervals.add_interval(start_time=21.0, stop_time=28.0) time_intervals.add_interval(start_time=22.0, stop_time=26.0) time_intervals.add_interval(start_time=22.0, stop_time=28.4) ati = align_by_time_intervals( self.nwbfile.units, index=1, intervals=time_intervals, stop_label=None, start=-20.0, end=30.0, rows_select=[0, 1], ) compare_to_ati = [ np.array([-18.8, -18.0, 4.0, 5.0]), np.array([-19.8, -19.0, 3.0, 4.0]), ] np.testing.assert_array_equal(ati, compare_to_ati)
from pynwb import NWBHDF5IO, NWBFile from datetime import datetime from pynwb.epoch import TimeIntervals from ndx_speech import Transcription import pandas as pd words = TimeIntervals.from_dataframe(pd.DataFrame({ 'start_time': [.1, 2.], 'stop_time': [.8, 2.3], 'label': ['hello', 'there'] }), name='words') nwbfile = NWBFile('aa', 'aa', datetime.now().astimezone()) nwbfile.add_acquisition(Transcription(words=words)) with NWBHDF5IO('test_transcription.nwb', 'w') as io: io.write(nwbfile, cache_spec=True) with NWBHDF5IO('test_transcription.nwb', 'r', load_namespaces=True) as io: nwbfile2 = io.read() assert (nwbfile.acquisition['transcription'].words.to_dataframe().equals( nwbfile2.acquisition['transcription'].words.to_dataframe()))
def setUpContainer(self): """ Return placeholder epochs object. Tested epochs are added directly to the NWBFile in addContainer """ return TimeIntervals('epochs')
def setUpContainer(self): # this will get ignored return TimeIntervals('epochs')
def test_align_by_time_intervals(self): intervals = TimeIntervals(name='Time Intervals') np.testing.assert_array_equal( align_by_time_intervals(timeseries=self.ts, intervals=intervals, stop_label=None), np.array([]))
def nwb_copy_file(old_file, new_file, cp_objs={}): """ Copy fields defined in 'obj', from existing NWB file to new NWB file. Parameters ---------- old_file : str, path String such as '/path/to/old_file.nwb'. new_file : str, path String such as '/path/to/new_file.nwb'. cp_objs : dict Name:Value pairs (Group:Children) listing the groups and respective children from the current NWB file to be copied. Children can be: - Boolean, indicating an attribute (e.g. for institution, lab) - List of strings, containing several children names Example: {'institution':True, 'lab':True, 'acquisition':['microphone'], 'ecephys':['LFP','DecompositionSeries']} """ manager = get_manager() # Open original signal file with NWBHDF5IO(old_file, 'r', manager=manager, load_namespaces=True) as io1: nwb_old = io1.read() # Creates new file nwb_new = NWBFile(session_description=str(nwb_old.session_description), identifier='', session_start_time=datetime.now(tzlocal())) with NWBHDF5IO(new_file, mode='w', manager=manager, load_namespaces=False) as io2: # Institution name ------------------------------------------------ if 'institution' in cp_objs: nwb_new.institution = str(nwb_old.institution) # Lab name -------------------------------------------------------- if 'lab' in cp_objs: nwb_new.lab = str(nwb_old.lab) # Session id ------------------------------------------------------ if 'session' in cp_objs: nwb_new.session_id = nwb_old.session_id # Devices --------------------------------------------------------- if 'devices' in cp_objs: for aux in list(nwb_old.devices.keys()): dev = Device(nwb_old.devices[aux].name) nwb_new.add_device(dev) # Electrode groups ------------------------------------------------ if 'electrode_groups' in cp_objs: for aux in list(nwb_old.electrode_groups.keys()): nwb_new.create_electrode_group( name=str(nwb_old.electrode_groups[aux].name), description=str(nwb_old.electrode_groups[ aux].description), location=str(nwb_old.electrode_groups[aux].location), device=nwb_new.get_device( nwb_old.electrode_groups[aux].device.name) ) # Electrodes ------------------------------------------------------ if 'electrodes' in cp_objs: nElec = len(nwb_old.electrodes['x'].data[:]) for aux in np.arange(nElec): nwb_new.add_electrode( x=nwb_old.electrodes['x'][aux], y=nwb_old.electrodes['y'][aux], z=nwb_old.electrodes['z'][aux], imp=nwb_old.electrodes['imp'][aux], location=str(nwb_old.electrodes['location'][aux]), filtering=str(nwb_old.electrodes['filtering'][aux]), group=nwb_new.get_electrode_group( nwb_old.electrodes['group'][aux].name), group_name=str(nwb_old.electrodes['group_name'][aux]) ) # if there are custom variables new_vars = list(nwb_old.electrodes.colnames) default_vars = ['x', 'y', 'z', 'imp', 'location', 'filtering', 'group', 'group_name'] [new_vars.remove(var) for var in default_vars] for var in new_vars: if var == 'label': var_data = [str(elem) for elem in nwb_old.electrodes[ var].data[:]] else: var_data = np.array(nwb_old.electrodes[var].data[:]) nwb_new.add_electrode_column(name=str(var), description= str(nwb_old.electrodes[ var].description), data=var_data) # Epochs ---------------------------------------------------------- if 'epochs' in cp_objs: nEpochs = len(nwb_old.epochs['start_time'].data[:]) for i in np.arange(nEpochs): nwb_new.add_epoch( start_time=nwb_old.epochs['start_time'].data[i], stop_time=nwb_old.epochs['stop_time'].data[i]) # if there are custom variables new_vars = list(nwb_old.epochs.colnames) default_vars = ['start_time', 'stop_time', 'tags', 'timeseries'] [new_vars.remove(var) for var in default_vars if var in new_vars] for var in new_vars: nwb_new.add_epoch_column(name=var, description=nwb_old.epochs[ var].description, data=nwb_old.epochs[var].data[:]) # Invalid times --------------------------------------------------- if 'invalid_times' in cp_objs: nInvalid = len(nwb_old.invalid_times['start_time'][:]) for aux in np.arange(nInvalid): nwb_new.add_invalid_time_interval( start_time=nwb_old.invalid_times['start_time'][aux], stop_time=nwb_old.invalid_times['stop_time'][aux]) # Trials ---------------------------------------------------------- if 'trials' in cp_objs: nTrials = len(nwb_old.trials['start_time']) for aux in np.arange(nTrials): nwb_new.add_trial( start_time=nwb_old.trials['start_time'][aux], stop_time=nwb_old.trials['stop_time'][aux]) # if there are custom variables new_vars = list(nwb_old.trials.colnames) default_vars = ['start_time', 'stop_time'] [new_vars.remove(var) for var in default_vars] for var in new_vars: nwb_new.add_trial_column(name=var, description=nwb_old.trials[ var].description, data=nwb_old.trials[var].data[:]) # Intervals ------------------------------------------------------- if 'intervals' in cp_objs: all_objs_names = list(nwb_old.intervals.keys()) for obj_name in all_objs_names: obj_old = nwb_old.intervals[obj_name] # create and add TimeIntervals obj = TimeIntervals(name=obj_old.name, description=obj_old.description) nInt = len(obj_old['start_time']) for ind in np.arange(nInt): obj.add_interval(start_time=obj_old['start_time'][ind], stop_time=obj_old['stop_time'][ind]) # Add to file nwb_new.add_time_intervals(obj) # Stimulus -------------------------------------------------------- if 'stimulus' in cp_objs: all_objs_names = list(nwb_old.stimulus.keys()) for obj_name in all_objs_names: obj_old = nwb_old.stimulus[obj_name] obj = TimeSeries(name=obj_old.name, description=obj_old.description, data=obj_old.data[:], rate=obj_old.rate, resolution=obj_old.resolution, conversion=obj_old.conversion, starting_time=obj_old.starting_time, unit=obj_old.unit) nwb_new.add_stimulus(obj) # Processing modules ---------------------------------------------- if 'ecephys' in cp_objs: if cp_objs['ecephys'] is True: interfaces = nwb_old.processing[ 'ecephys'].data_interfaces.keys() else: # list of items interfaces = [ nwb_old.processing['ecephys'].data_interfaces[key] for key in cp_objs['ecephys'] ] # Add ecephys module to NWB file ecephys_module = ProcessingModule( name='ecephys', description='Extracellular electrophysiology data.' ) nwb_new.add_processing_module(ecephys_module) for interface_old in interfaces: obj = copy_obj(interface_old, nwb_old, nwb_new) if obj is not None: ecephys_module.add_data_interface(obj) # Acquisition ----------------------------------------------------- if 'acquisition' in cp_objs: if cp_objs['acquisition'] is True: all_acq_names = list(nwb_old.acquisition.keys()) else: # list of items all_acq_names = cp_objs['acquisition'] for acq_name in all_acq_names: obj_old = nwb_old.acquisition[acq_name] obj = copy_obj(obj_old, nwb_old, nwb_new) if obj is not None: nwb_new.add_acquisition(obj) # Subject --------------------------------------------------------- if 'subject' in cp_objs: try: cortical_surfaces = CorticalSurfaces() surfaces = nwb_old.subject.cortical_surfaces.surfaces for sfc in list(surfaces.keys()): cortical_surfaces.create_surface( name=surfaces[sfc].name, faces=surfaces[sfc].faces, vertices=surfaces[sfc].vertices) nwb_new.subject = ECoGSubject( cortical_surfaces=cortical_surfaces, subject_id=nwb_old.subject.subject_id, age=nwb_old.subject.age, description=nwb_old.subject.description, genotype=nwb_old.subject.genotype, sex=nwb_old.subject.sex, species=nwb_old.subject.species, weight=nwb_old.subject.weight, date_of_birth=nwb_old.subject.date_of_birth) except: nwb_new.subject = Subject(age=nwb_old.subject.age, description=nwb_old.subject.description, genotype=nwb_old.subject.genotype, sex=nwb_old.subject.sex, species=nwb_old.subject.species, subject_id=nwb_old.subject.subject_id, weight=nwb_old.subject.weight, date_of_birth=nwb_old.subject.date_of_birth) # Write new file with copied fields io2.write(nwb_new, link_data=False)
def test_frorm_dataframe_missing_supplied_col(self): with self.assertRaises(ValueError): df = pd.DataFrame({'start_time': [1., 2., 3.], 'stop_time': [2., 3., 4.], 'label': ['a', 'b', 'c']}) TimeIntervals.from_dataframe(df, name='ti_name', columns=[{'name': 'not there'}])
def test_from_dataframe_missing_required_cols(self): with self.assertRaises(ValueError): df = pd.DataFrame({'start_time': [1., 2., 3.], 'label': ['a', 'b', 'c']}) TimeIntervals.from_dataframe(df, name='ti_name')
]) nwbfile.add_epoch(6.0, 8.0, ['second', 'example'], [ test_ts, ]) #################### # Other time intervals # ~~~~~~~~~~~~~~~~~~~~~~ # Both ``epochs`` and ``trials`` are of of data type :py:class:`~pynwb.epoch.TimeIntervals`, which is a type of # ``DynamicTable`` for storing information about time intervals. ``"epochs"`` and ``"trials"`` # are the two default names for :py:class:`~pynwb.base.TimeIntervals` objects, but you can also add your own from pynwb.epoch import TimeIntervals sleep_stages = TimeIntervals( name="sleep_stages", description="intervals for each sleep stage as determined by EEG", ) sleep_stages.add_column(name="stage", description="stage of sleep") sleep_stages.add_column(name="confidence", description="confidence in stage (0-1)") sleep_stages.add_row(start_time=0.3, stop_time=0.5, stage=1, confidence=.5) sleep_stages.add_row(start_time=0.7, stop_time=0.9, stage=2, confidence=.99) sleep_stages.add_row(start_time=1.3, stop_time=3.0, stage=3, confidence=0.7) nwbfile.add_time_intervals(sleep_stages) #################### # .. _basic_units: #
def nwb_copy_file(old_file, new_file, cp_objs={}, save_to_file=True): """ Copy fields defined in 'obj', from existing NWB file to new NWB file. Parameters ---------- old_file : str, path, nwbfile String or path to nwb file '/path/to/old_file.nwb'. Alternatively, the nwbfile object. new_file : str, path String such as '/path/to/new_file.nwb'. cp_objs : dict Name:Value pairs (Group:Children) listing the groups and respective children from the current NWB file to be copied. Children can be: - Boolean, indicating an attribute (e.g. for institution, lab) - List of strings, containing several children names Example: {'institution':True, 'lab':True, 'acquisition':['microphone'], 'ecephys':['LFP','DecompositionSeries']} save_to_file: Boolean If True, saves directly to new_file.nwb. If False, only returns nwb_new. Returns: -------- nwb_new : nwbfile object """ manager = get_manager() # Get from nwbfile object in memory or from file if isinstance(old_file, NWBFile): nwb_old = old_file io1 = False else: io1 = NWBHDF5IO(str(old_file), 'r', manager=manager, load_namespaces=True) nwb_old = io1.read() # Creates new file nwb_new = NWBFile( session_description=str(nwb_old.session_description), identifier=id_generator(), session_start_time=nwb_old.session_start_time, ) with NWBHDF5IO(new_file, mode='w', manager=manager, load_namespaces=False) as io2: # Institution name ------------------------------------------------ if 'institution' in cp_objs: nwb_new.institution = str(nwb_old.institution) # Lab name -------------------------------------------------------- if 'lab' in cp_objs: nwb_new.lab = str(nwb_old.lab) # Session id ------------------------------------------------------ if 'session' in cp_objs: nwb_new.session_id = nwb_old.session_id # Devices --------------------------------------------------------- if 'devices' in cp_objs: for aux in list(nwb_old.devices.keys()): dev = Device(nwb_old.devices[aux].name) nwb_new.add_device(dev) # Electrode groups ------------------------------------------------ if 'electrode_groups' in cp_objs and nwb_old.electrode_groups is not None: for aux in list(nwb_old.electrode_groups.keys()): nwb_new.create_electrode_group( name=str(nwb_old.electrode_groups[aux].name), description=str(nwb_old.electrode_groups[aux].description), location=str(nwb_old.electrode_groups[aux].location), device=nwb_new.get_device( nwb_old.electrode_groups[aux].device.name)) # Electrodes ------------------------------------------------------ if 'electrodes' in cp_objs and nwb_old.electrodes is not None: nElec = len(nwb_old.electrodes['x'].data[:]) for aux in np.arange(nElec): nwb_new.add_electrode( x=nwb_old.electrodes['x'][aux], y=nwb_old.electrodes['y'][aux], z=nwb_old.electrodes['z'][aux], imp=nwb_old.electrodes['imp'][aux], location=str(nwb_old.electrodes['location'][aux]), filtering=str(nwb_old.electrodes['filtering'][aux]), group=nwb_new.get_electrode_group( nwb_old.electrodes['group'][aux].name), group_name=str(nwb_old.electrodes['group_name'][aux])) # if there are custom variables new_vars = list(nwb_old.electrodes.colnames) default_vars = [ 'x', 'y', 'z', 'imp', 'location', 'filtering', 'group', 'group_name' ] [new_vars.remove(var) for var in default_vars] for var in new_vars: if var == 'label': var_data = [ str(elem) for elem in nwb_old.electrodes[var].data[:] ] else: var_data = np.array(nwb_old.electrodes[var].data[:]) nwb_new.add_electrode_column( name=str(var), description=str(nwb_old.electrodes[var].description), data=var_data) # If Bipolar scheme for electrodes for v in nwb_old.lab_meta_data.values(): if isinstance(v, EcephysExt) and hasattr( v, 'bipolar_scheme_table'): bst_old = v.bipolar_scheme_table bst_new = BipolarSchemeTable( name=bst_old.name, description=bst_old.description) ecephys_ext = EcephysExt(name=v.name) ecephys_ext.bipolar_scheme_table = bst_new nwb_new.add_lab_meta_data(ecephys_ext) # Epochs ---------------------------------------------------------- if 'epochs' in cp_objs and nwb_old.epochs is not None: nEpochs = len(nwb_old.epochs['start_time'].data[:]) for i in np.arange(nEpochs): nwb_new.add_epoch( start_time=nwb_old.epochs['start_time'].data[i], stop_time=nwb_old.epochs['stop_time'].data[i]) # if there are custom variables new_vars = list(nwb_old.epochs.colnames) default_vars = ['start_time', 'stop_time', 'tags', 'timeseries'] [new_vars.remove(var) for var in default_vars if var in new_vars] for var in new_vars: nwb_new.add_epoch_column( name=var, description=nwb_old.epochs[var].description, data=nwb_old.epochs[var].data[:]) # Invalid times --------------------------------------------------- if 'invalid_times' in cp_objs and nwb_old.invalid_times is not None: nInvalid = len(nwb_old.invalid_times['start_time'][:]) for aux in np.arange(nInvalid): nwb_new.add_invalid_time_interval( start_time=nwb_old.invalid_times['start_time'][aux], stop_time=nwb_old.invalid_times['stop_time'][aux]) # Trials ---------------------------------------------------------- if 'trials' in cp_objs and nwb_old.trials is not None: nTrials = len(nwb_old.trials['start_time']) for aux in np.arange(nTrials): nwb_new.add_trial(start_time=nwb_old.trials['start_time'][aux], stop_time=nwb_old.trials['stop_time'][aux]) # if there are custom variables new_vars = list(nwb_old.trials.colnames) default_vars = ['start_time', 'stop_time'] [new_vars.remove(var) for var in default_vars] for var in new_vars: nwb_new.add_trial_column( name=var, description=nwb_old.trials[var].description, data=nwb_old.trials[var].data[:]) # Intervals ------------------------------------------------------- if 'intervals' in cp_objs and nwb_old.intervals is not None: all_objs_names = list(nwb_old.intervals.keys()) for obj_name in all_objs_names: obj_old = nwb_old.intervals[obj_name] # create and add TimeIntervals obj = TimeIntervals(name=obj_old.name, description=obj_old.description) nInt = len(obj_old['start_time']) for ind in np.arange(nInt): obj.add_interval(start_time=obj_old['start_time'][ind], stop_time=obj_old['stop_time'][ind]) # Add to file nwb_new.add_time_intervals(obj) # Stimulus -------------------------------------------------------- if 'stimulus' in cp_objs: all_objs_names = list(nwb_old.stimulus.keys()) for obj_name in all_objs_names: obj_old = nwb_old.stimulus[obj_name] obj = TimeSeries(name=obj_old.name, description=obj_old.description, data=obj_old.data[:], rate=obj_old.rate, resolution=obj_old.resolution, conversion=obj_old.conversion, starting_time=obj_old.starting_time, unit=obj_old.unit) nwb_new.add_stimulus(obj) # Processing modules ---------------------------------------------- if 'ecephys' in cp_objs: interfaces = [ nwb_old.processing['ecephys'].data_interfaces[key] for key in cp_objs['ecephys'] ] # Add ecephys module to NWB file ecephys_module = ProcessingModule( name='ecephys', description='Extracellular electrophysiology data.') nwb_new.add_processing_module(ecephys_module) for interface_old in interfaces: obj = copy_obj(interface_old, nwb_old, nwb_new) if obj is not None: ecephys_module.add_data_interface(obj) if 'behavior' in cp_objs: interfaces = [ nwb_old.processing['behavior'].data_interfaces[key] for key in cp_objs['behavior'] ] if 'behavior' not in nwb_new.processing: # Add behavior module to NWB file behavior_module = ProcessingModule( name='behavior', description='behavioral data.') nwb_new.add_processing_module(behavior_module) for interface_old in interfaces: obj = copy_obj(interface_old, nwb_old, nwb_new) if obj is not None: behavior_module.add_data_interface(obj) # Acquisition ----------------------------------------------------- # Can get raw ElecetricalSeries and Mic recording if 'acquisition' in cp_objs: for acq_name in cp_objs['acquisition']: obj_old = nwb_old.acquisition[acq_name] acq = copy_obj(obj_old, nwb_old, nwb_new) nwb_new.add_acquisition(acq) # Surveys --------------------------------------------------------- if 'surveys' in cp_objs and 'behavior' in nwb_old.processing: surveys_list = [ v for v in nwb_old.processing['behavior'].data_interfaces.values() if v.neurodata_type == 'SurveyTable' ] if cp_objs['surveys'] and len(surveys_list) > 0: if 'behavior' not in nwb_new.processing: # Add behavior module to NWB file behavior_module = ProcessingModule( name='behavior', description='behavioral data.') nwb_new.add_processing_module(behavior_module) for obj_old in surveys_list: srv = copy_obj(obj_old, nwb_old, nwb_new) behavior_module.add_data_interface(srv) # Subject --------------------------------------------------------- if nwb_old.subject is not None: if 'subject' in cp_objs: try: cortical_surfaces = CorticalSurfaces() surfaces = nwb_old.subject.cortical_surfaces.surfaces for sfc in list(surfaces.keys()): cortical_surfaces.create_surface( name=surfaces[sfc].name, faces=surfaces[sfc].faces, vertices=surfaces[sfc].vertices) nwb_new.subject = ECoGSubject( cortical_surfaces=cortical_surfaces, subject_id=nwb_old.subject.subject_id, age=nwb_old.subject.age, description=nwb_old.subject.description, genotype=nwb_old.subject.genotype, sex=nwb_old.subject.sex, species=nwb_old.subject.species, weight=nwb_old.subject.weight, date_of_birth=nwb_old.subject.date_of_birth) except: nwb_new.subject = Subject(**nwb_old.subject.fields) # Write new file with copied fields if save_to_file: io2.write(nwb_new, link_data=False) # Close old file and return new nwbfile object if io1: io1.close() return nwb_new