def test_cache_spec(self): self.test_temp_file = tempfile.NamedTemporaryFile() # On Windows h5py cannot truncate an open file in write mode. # The temp file will be closed before h5py truncates it # and will be removed during the tearDown step. self.test_temp_file.close() self.io = NWBHDF5IO(self.test_temp_file.name) # Setup all the data we need start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal()) create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal()) data = np.arange(1000).reshape((100, 10)) timestamps = np.arange(100) # Create the first file nwbfile1 = NWBFile(session_description='demonstrate external files', identifier='NWBE1', session_start_time=start_time, file_create_date=create_date) test_ts1 = TimeSeries(name='test_timeseries', data=data, unit='SIunit', timestamps=timestamps) nwbfile1.add_acquisition(test_ts1) # Write the first file self.io.write(nwbfile1, cache_spec=True) self.io.close() ns_catalog = NamespaceCatalog(group_spec_cls=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace) NWBHDF5IO.load_namespaces(ns_catalog, self.test_temp_file.name) self.assertEqual(ns_catalog.namespaces, ('core',)) source_types = self.__get_types(self.io.manager.namespace_catalog) read_types = self.__get_types(ns_catalog) self.assertSetEqual(source_types, read_types)
def test_add_ic_electrode_deprecation(self): # Make sure we warn when using the add_ic_electrodes parameter on NWBFile nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal())) msg = "deprecated, use NWBFile.add_icephys_electrode instead" with self.assertWarnsWith(DeprecationWarning, msg): nwbfile.add_ic_electrode(self.icephys_electrode)
def test_icephys_electrodes_parameter(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), icephys_electrodes=[ self.icephys_electrode, ]) self.assertEqual(nwbfile.get_icephys_electrode('test_iS'), self.icephys_electrode)
def test_link_resolve(self): nwbfile = NWBFile("a file with header data", "NB123A", datetime(2018, 6, 1, tzinfo=tzlocal())) device = nwbfile.create_device('device_name') electrode_group = nwbfile.create_electrode_group( name='electrode_group_name', description='desc', device=device, location='unknown') nwbfile.add_electrode(id=0, x=1.0, y=2.0, z=3.0, # position? imp=2.718, location='unknown', filtering='unknown', group=electrode_group) etr = nwbfile.create_electrode_table_region([0], 'etr_name') for passband in ('theta', 'gamma'): electrical_series = ElectricalSeries(name=passband + '_phase', data=[1., 2., 3.], rate=0.0, electrodes=etr) nwbfile.add_acquisition(electrical_series) with NWBHDF5IO(self.path, 'w') as io: io.write(nwbfile) with NWBHDF5IO(self.path, 'r') as io: io.read()
def test_link_resolve(self): print("TEST_LINK_RESOLVE") nwbfile = NWBFile("source", "a file with header data", "NB123A", '2018-06-01T00:00:00') device = nwbfile.create_device('device_name', 'source') electrode_group = nwbfile.create_electrode_group( name='electrode_group_name', source='source', description='desc', device=device, location='unknown') nwbfile.add_electrode( 0, 1.0, 2.0, 3.0, # position? imp=2.718, location='unknown', filtering='unknown', description='desc', group=electrode_group) etr = nwbfile.create_electrode_table_region([0], 'etr_name') for passband in ('theta', 'gamma'): electrical_series = ElectricalSeries(name=passband + '_phase', source='ephys_analysis', data=[1., 2., 3.], rate=0.0, electrodes=etr) nwbfile.add_acquisition(electrical_series) with NWBHDF5IO(self.path, 'w') as io: io.write(nwbfile) with NWBHDF5IO(self.path, 'r') as io: io.read()
def test_copy_file_with_external_links(self): # Setup all the data we need start_time = datetime(2017, 4, 3, 11, 0, 0) create_date = datetime(2017, 4, 15, 12, 0, 0) data = np.arange(1000).reshape((100, 10)) timestamps = np.arange(100) # Create the first file nwbfile1 = NWBFile(source='PyNWB tutorial', session_description='demonstrate external files', identifier='NWBE1', session_start_time=start_time, file_create_date=create_date) test_ts1 = TimeSeries(name='test_timeseries', source='PyNWB tutorial', data=data, unit='SIunit', timestamps=timestamps) nwbfile1.add_acquisition(test_ts1) # Write the first file self.io[0].write(nwbfile1) nwbfile1_read = self.io[0].read() # Create the second file nwbfile2 = NWBFile(source='PyNWB tutorial', session_description='demonstrate external files', identifier='NWBE1', session_start_time=start_time, file_create_date=create_date) test_ts2 = TimeSeries(name='test_timeseries', source='PyNWB tutorial', data=nwbfile1_read.get_acquisition('test_timeseries').data, unit='SIunit', timestamps=timestamps) nwbfile2.add_acquisition(test_ts2) # Write the second file self.io[1].write(nwbfile2) self.io[1].close() self.io[0].close() # Don't forget to close the first file too # Copy the file self.io[2].close() HDF5IO.copy_file(source_filename=self.test_temp_files[1].name, dest_filename=self.test_temp_files[2].name, expand_external=True, expand_soft=False, expand_refs=False) # Test that everything is working as expected # Confirm that our original data file is correct f1 = File(self.test_temp_files[0].name) self.assertTrue(isinstance(f1.get('/acquisition/test_timeseries/data', getlink=True), HardLink)) # Confirm that we successfully created and External Link in our second file f2 = File(self.test_temp_files[1].name) self.assertTrue(isinstance(f2.get('/acquisition/test_timeseries/data', getlink=True), ExternalLink)) # Confirm that we successfully resolved the External Link when we copied our second file f3 = File(self.test_temp_files[2].name) self.assertTrue(isinstance(f3.get('/acquisition/test_timeseries/data', getlink=True), HardLink))
def test_create_ic_electrode_deprecation(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal())) device = Device(name='device_name') msg = "deprecated, use NWBFile.create_icephys_electrode instead" with self.assertWarnsWith(DeprecationWarning, msg): nwbfile.create_ic_electrode( name='test_iS', device=device, description='description', slice='slice', seal='seal', location='location', resistance='resistance', filtering='filtering', initial_access_resistance='initial_access_resistance')
def test_ic_electrodes_attribute_deprecation(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), icephys_electrodes=[ self.icephys_electrode, ]) # make sure NWBFile.ic_electrodes property warns msg = "deprecated. use NWBFile.icephys_electrodes instead" with self.assertWarnsWith(DeprecationWarning, msg): nwbfile.ic_electrodes # make sure NWBFile.get_ic_electrode warns msg = "deprecated, use NWBFile.get_icephys_electrode instead" with self.assertWarnsWith(DeprecationWarning, msg): nwbfile.get_ic_electrode(self.icephys_electrode.name)
def test_ic_electrodes_parameter_deprecation(self): # Make sure we warn when using the ic_electrodes parameter on NWBFile msg = "Use of the ic_electrodes parameter is deprecated. Use the icephys_electrodes parameter instead" with self.assertWarnsWith(DeprecationWarning, msg): _ = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), ic_electrodes=[ self.icephys_electrode, ])
def test_sweep_table_depractation_warn(self): msg = ( "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable " "instead. See also the NWBFile.add_intracellular_recordings function." ) with self.assertWarnsWith(DeprecationWarning, msg): _ = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), ic_electrodes=[ self.icephys_electrode, ], sweep_table=SweepTable())
def create_icephys_testfile(filename=None, add_custom_columns=True, randomize_data=True, with_missing_stimulus=True): """ Create a small but relatively complex icephys test file that we can use for testing of queries. :param filename: The name of the output file to be generated. If set to None then the file is not written but only created in memory :type filename: str, None :param add_custom_colums: Add custom metadata columns to each table :type add_custom_colums: bool :param randomize_data: Randomize data values in the stimulus and response :type randomize_data: bool :returns: ICEphysFile NWBFile object created for writing. NOTE: If filename is provided then the file is written to disk, but the function does not read the file back. If you want to use the file from disk then you will need to read it with NWBHDF5IO. :rtype: ICEphysFile """ nwbfile = NWBFile( session_description='my first synthetic recording', identifier='EXAMPLE_ID', session_start_time=datetime.now(tzlocal()), experimenter='Dr. Bilbo Baggins', lab='Bag End Laboratory', institution='University of Middle Earth at the Shire', experiment_description= 'I went on an adventure with thirteen dwarves to reclaim vast treasures.', session_id='LONELYMTN') # Add a device device = nwbfile.create_device(name='Heka ITC-1600') # Add an intracellular electrode electrode0 = nwbfile.create_icephys_electrode( name="elec0", description='a mock intracellular electrode', device=device) # Add an intracellular electrode electrode1 = nwbfile.create_icephys_electrode( name="elec1", description='another mock intracellular electrode', device=device) # Add the intracelluar recordings for sweep_number in range(20): elec = (electrode0 if (sweep_number % 2 == 0) else electrode1) stim, resp = create_icephys_stimulus_and_response( sweep_number=np.uint64(sweep_number), electrode=elec, randomize_data=randomize_data) if with_missing_stimulus and sweep_number in [0, 10]: stim = None nwbfile.add_intracellular_recording(electrode=elec, stimulus=stim, response=resp, id=sweep_number) nwbfile.intracellular_recordings.add_column( name='recording_tags', data=[ 'A1', 'A2', 'B1', 'B2', 'C1', 'C2', 'C3', 'D1', 'D2', 'D3', 'A1', 'A2', 'B1', 'B2', 'C1', 'C2', 'C3', 'D1', 'D2', 'D3' ], description='String with a set of recording tags') # Add simultaneous_recordings nwbfile.add_icephys_simultaneous_recording(recordings=[0, 1], id=np.int64(100)) nwbfile.add_icephys_simultaneous_recording(recordings=[2, 3], id=np.int64(101)) nwbfile.add_icephys_simultaneous_recording(recordings=[4, 5, 6], id=np.int64(102)) nwbfile.add_icephys_simultaneous_recording(recordings=[7, 8, 9], id=np.int64(103)) nwbfile.add_icephys_simultaneous_recording(recordings=[10, 11], id=np.int64(104)) nwbfile.add_icephys_simultaneous_recording(recordings=[12, 13], id=np.int64(105)) nwbfile.add_icephys_simultaneous_recording(recordings=[14, 15, 16], id=np.int64(106)) nwbfile.add_icephys_simultaneous_recording(recordings=[17, 18, 19], id=np.int64(107)) if add_custom_columns: nwbfile.icephys_simultaneous_recordings.add_column( name='tag', data=np.arange(8), description='some integer tag for a sweep') # Add sequential recordings nwbfile.add_icephys_sequential_recording(simultaneous_recordings=[0, 1], id=np.int64(1000), stimulus_type="StimType_1") nwbfile.add_icephys_sequential_recording(simultaneous_recordings=[ 2, ], id=np.int64(1001), stimulus_type="StimType_2") nwbfile.add_icephys_sequential_recording(simultaneous_recordings=[ 3, ], id=np.int64(1002), stimulus_type="StimType_3") nwbfile.add_icephys_sequential_recording(simultaneous_recordings=[4, 5], id=np.int64(1003), stimulus_type="StimType_1") nwbfile.add_icephys_sequential_recording(simultaneous_recordings=[ 6, ], id=np.int64(1004), stimulus_type="StimType_2") nwbfile.add_icephys_sequential_recording(simultaneous_recordings=[ 7, ], id=np.int64(1005), stimulus_type="StimType_3") if add_custom_columns: nwbfile.icephys_sequential_recordings.add_column( name='type', data=['T1', 'T2', 'T3', 'T1', 'T2', 'T3'], description='type of the sequential recording') # Add repetitions nwbfile.add_icephys_repetition(sequential_recordings=[ 0, ], id=np.int64(10000)) nwbfile.add_icephys_repetition(sequential_recordings=[1, 2], id=np.int64(10001)) nwbfile.add_icephys_repetition(sequential_recordings=[ 3, ], id=np.int64(10002)) nwbfile.add_icephys_repetition(sequential_recordings=[4, 5], id=np.int64(10003)) if add_custom_columns: nwbfile.icephys_repetitions.add_column( name='type', data=['R1', 'R2', 'R1', 'R2'], description='some repetition type indicator') # Add experimental_conditions nwbfile.add_icephys_experimental_condition(repetitions=[0, 1], id=np.int64(100000)) nwbfile.add_icephys_experimental_condition(repetitions=[2, 3], id=np.int64(100001)) if add_custom_columns: nwbfile.icephys_experimental_conditions.add_column( name='temperature', data=[32., 24.], description='Temperatur in C') # Write our test file if filename is not None: with NWBHDF5IO(filename, 'w') as io: io.write(nwbfile) # Return our in-memory NWBFile return nwbfile
def run_conversion(self, nwbfile: NWBFile, metadata: dict): session_path = Path(self.source_data["folder_path"]) session_id = session_path.stem # Load the file with behavioral data behavior_file_path = Path(session_path) / f"{session_id}.behavior.mat" behavior_mat = read_matlab_file(str(behavior_file_path))["behavior"] # Add trials events = behavior_mat["events"] trial_interval_list = events["trialIntervals"] data = [] for start_time, stop_time in trial_interval_list: data.append( dict( start_time=float(start_time), stop_time=float(stop_time), )) [ nwbfile.add_trial(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] trial_list = events["trials"] direction_list = [trial.get("direction", "") for trial in trial_list] trial_type_list = [trial.get("type", "") for trial in trial_list] if not all([direction == "" for direction in direction_list]): nwbfile.add_trial_column(name="direction", description="direction of the trial", data=direction_list) if not all([trial_type == "" for trial_type in trial_type_list]): nwbfile.add_trial_column(name="trial_type", description="type of trial", data=trial_type_list) # Position module_name = "behavior" module_description = "Contains behavioral data concerning position." processing_module = get_module(nwbfile=nwbfile, name=module_name, description=module_description) timestamps = np.array(behavior_mat["timestamps"])[..., 0] position = behavior_mat["position"] pos_data = [[x, y, z] for (x, y, z) in zip(position["x"], position["y"], position["y"]) ] pos_data = np.array(pos_data)[..., 0] unit = behavior_mat.get("units", "") if unit == ["m", "meter", "meters"]: conversion = 1.0 else: warnings.warn(f"Spatial units {unit} not listed in meters; " "setting conversion to nan.") conversion = np.nan description = behavior_mat.get("description", "generic_position_tracking").replace( "/", "-") rotation_type = behavior_mat.get("rotationType", "non_specified") pos_obj = Position(name=f"{description}_task".replace(" ", "_")) spatial_series_object = SpatialSeries( name="position", description="(x,y,z) coordinates tracking subject movement.", data=H5DataIO(pos_data, compression="gzip"), reference_frame="unknown", unit=unit, conversion=conversion, timestamps=timestamps, resolution=np.nan, ) pos_obj.add_spatial_series(spatial_series_object) # Add error if available errorPerMarker = behavior_mat.get("errorPerMarker", None) if errorPerMarker: error_data = np.array([error for error in errorPerMarker])[..., 0] spatial_series_object = SpatialSeries( name="error_per_marker", description= "Estimated error for marker tracking from optitrack system.", data=H5DataIO(error_data, compression="gzip"), reference_frame="unknown", conversion=conversion, timestamps=timestamps, resolution=np.nan, ) pos_obj.add_spatial_series(spatial_series_object) processing_module.add_data_interface(pos_obj) # Compass try: orientation = behavior_mat["orientation"] orientation_data = [[ x, y, z, w ] for (x, y, z, w) in zip(orientation["x"], orientation["y"], orientation["z"], orientation["w"])] orientation_data = np.array(orientation_data)[..., 0] compass_obj = CompassDirection(name=f"allocentric_frame_tracking") spatial_series_object = SpatialSeries( name="orientation", description= f"(x, y, z, w) orientation coordinates, orientation type: {rotation_type}", data=H5DataIO(orientation_data, compression="gzip"), reference_frame="unknown", conversion=conversion, timestamps=timestamps, resolution=np.nan, ) compass_obj.add_spatial_series(spatial_series_object) processing_module.add_data_interface(compass_obj) except KeyError: warnings.warn(f"Orientation data not found") # States module_name = "ecephys" module_description = "Contains behavioral data concerning classified states." processing_module = get_module(nwbfile=nwbfile, name=module_name, description=module_description) # Sleep states sleep_file_path = session_path / f"{session_id}.SleepState.states.mat" if Path(sleep_file_path).exists(): mat_file = read_matlab_file(sleep_file_path) state_label_names = dict(WAKEstate="Awake", NREMstate="Non-REM", REMstate="REM", MAstate="MA") sleep_state_dic = mat_file["SleepState"]["ints"] table = TimeIntervals(name="sleep_states", description="Sleep state of the animal.") table.add_column(name="label", description="Sleep state.") data = [] for sleep_state in state_label_names: values = sleep_state_dic[sleep_state] if len(values) != 0 and isinstance(values[0], int): values = [values] for start_time, stop_time in values: data.append( dict( start_time=float(start_time), stop_time=float(stop_time), label=state_label_names[sleep_state], )) [ table.add_row(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] processing_module.add(table) # Add epochs lfp_file_path = session_path / f"{session_path.name}.lfp" raw_file_path = session_path / f"{session_id}.dat" xml_file_path = session_path / f"{session_id}.xml" if raw_file_path.is_file(): recorder = NeuroscopeRecordingExtractor( file_path=raw_file_path, xml_file_path=xml_file_path) else: recorder = NeuroscopeRecordingExtractor( file_path=lfp_file_path, xml_file_path=xml_file_path) num_frames = recorder.get_num_frames() sampling_frequency = recorder.get_sampling_frequency() end_of_the_session = num_frames / sampling_frequency session_start = 0.0 start_trials_time = min( [interval[0] for interval in trial_interval_list]) end_trials_time = max( [interval[1] for interval in trial_interval_list]) end_of_the_session = end_of_the_session nwbfile.add_epoch(start_time=session_start, stop_time=start_trials_time, tags="before trials") nwbfile.add_epoch(start_time=start_trials_time, stop_time=end_trials_time, tags="during trials") nwbfile.add_epoch(start_time=end_trials_time, stop_time=end_of_the_session, tags="after trials")