def to_nwb(self, nwbfile: NWBFile) -> NWBFile: self._behavior_metadata.subject_metadata.to_nwb(nwbfile=nwbfile) self._behavior_metadata.equipment.to_nwb(nwbfile=nwbfile) nwb_extension = load_pynwb_extension(OphysBehaviorMetadataSchema, 'ndx-aibs-behavior-ophys') behavior_meta = self._behavior_metadata ophys_meta = self._ophys_metadata if isinstance(ophys_meta, MultiplaneMetadata): imaging_plane_group = ophys_meta.imaging_plane_group imaging_plane_group_count = ophys_meta.imaging_plane_group_count else: imaging_plane_group_count = 0 imaging_plane_group = -1 nwb_metadata = nwb_extension( name='metadata', ophys_session_id=ophys_meta.ophys_session_id, field_of_view_width=ophys_meta.field_of_view_shape.width, field_of_view_height=ophys_meta.field_of_view_shape.height, imaging_plane_group=imaging_plane_group, imaging_plane_group_count=imaging_plane_group_count, stimulus_frame_rate=behavior_meta.stimulus_frame_rate, experiment_container_id=ophys_meta.experiment_container_id, ophys_experiment_id=ophys_meta.ophys_experiment_id, session_type=behavior_meta.session_type, equipment_name=behavior_meta.equipment.value, imaging_depth=ophys_meta.imaging_depth, behavior_session_uuid=str(behavior_meta.behavior_session_uuid), behavior_session_id=behavior_meta.behavior_session_id) nwbfile.add_lab_meta_data(nwb_metadata) return nwbfile
def test_lab_meta(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0') test_meta_ext = NWBGroupSpec( neurodata_type_def='MyTestMetaData', neurodata_type_inc='LabMetaData', doc='my test meta data', attributes=[ NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype')]) ns_builder.add_spec(self.ext_source, test_meta_ext) ns_builder.export(self.ns_path, outdir=self.tempdir) ns_abs_path = os.path.join(self.tempdir, self.ns_path) load_namespaces(ns_abs_path) @register_class('MyTestMetaData', self.prefix) class MyTestMetaData(LabMetaData): __nwbfields__ = ('test_attr',) @docval({'name': 'name', 'type': str, 'doc': 'name'}, {'name': 'test_attr', 'type': float, 'doc': 'test attribute'}) def __init__(self, **kwargs): test_attr = popargs('test_attr', kwargs) super(MyTestMetaData, self).__init__(**kwargs) self.test_attr = test_attr nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal())) nwbfile.add_lab_meta_data(MyTestMetaData(name='test_name', test_attr=5.))
class CompartmentsTest(unittest.TestCase): def setUp(self): self.nwbfile = NWBFile('description', 'id', datetime.now().astimezone()) def test_add_compartments(self): compartments = Compartments() compartments.add_row(number=[0, 1, 2, 3, 4], position=[0.1, 0.2, 0.3, 0.4, 0.5]) compartments.add_row(number=[0], position=[np.nan]) self.nwbfile.add_lab_meta_data( SimulationMetaData(compartments=compartments)) cs = CompartmentSeries('membrane_potential', np.random.randn(10, 6), compartments=compartments, unit='V', rate=100.) self.nwbfile.add_acquisition(cs) filename = 'test_compartment_series.nwb' with NWBHDF5IO(filename, 'w') as io: io.write(self.nwbfile) with NWBHDF5IO(filename, mode='r') as io: io.read() assert (all(cs.find_compartments(0, [1, 3]) == [1, 3])) assert (all(cs.find_compartments(1) == 5)) os.remove(filename)
def test_lab_meta_auto(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0') test_meta_ext = NWBGroupSpec(neurodata_type_def='MyTestMetaData', neurodata_type_inc='LabMetaData', doc='my test meta data', attributes=[ NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype') ]) ns_builder.add_spec(self.ext_source, test_meta_ext) ns_builder.export(self.ns_path, outdir=self.tempdir) ns_abs_path = os.path.join(self.tempdir, self.ns_path) load_namespaces(ns_abs_path) MyTestMetaData = get_class('MyTestMetaData', self.prefix) nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal())) nwbfile.add_lab_meta_data( MyTestMetaData(name='test_name', test_attr=5.))
def test_io(): nwbfile = NWBFile('description', 'id', datetime.now().astimezone()) device = nwbfile.create_device('device_test') group = nwbfile.create_electrode_group(name='electrodes', description='label', device=device, location='brain') for i in range(4): nwbfile.add_electrode(x=float(i), y=float(i), z=float(i), imp=np.nan, location='', filtering='', group=group) bipolar_scheme_table = BipolarSchemeTable(name='bipolar_scheme_table', description='desc') bipolar_scheme_table.anodes.table = nwbfile.electrodes bipolar_scheme_table.cathodes.table = nwbfile.electrodes bipolar_scheme_table.add_row(anodes=[0], cathodes=[1]) bipolar_scheme_table.add_row(anodes=[0, 1], cathodes=[2, 3]) ecephys_ext = EcephysExt(name='ecephys_ext') ecephys_ext.bipolar_scheme_table = bipolar_scheme_table nwbfile.add_lab_meta_data(ecephys_ext) st = StimTable( name='stimtable', description='stimulation parameters', # bipolar_table=bipolar_scheme_table ) # calling this before `add_run` obviates bipolar_table=bipolar_scheme_table above. # You can add it to the NWBFile later, but you'll need to specify bipolar_table manually nwbfile.add_time_intervals(st) frequencies = [10., 10.] amplitudes = [5., 5.] pulse_widths = [2., 3.] for i in range(2): st.add_run(start_time=np.nan, stop_time=np.nan, frequency=frequencies[i], amplitude=amplitudes[i], pulse_width=pulse_widths[i], bipolar_pair=i) with NWBHDF5IO('test_file.nwb', 'w') as io: io.write(nwbfile) # Make a 300 timepoint waveform time series for 2 electrodes (one # cathode, and one anode). current_data = np.random.randn(300, 2)
def test_ext(): nwbfile = NWBFile('description', 'id', datetime.now().astimezone()) device = nwbfile.create_device('device_name') electrode_group = nwbfile.create_electrode_group('electrode_group', 'desc', 'loc', device=device) for i in np.arange(20.): nwbfile.add_electrode(i, i, i, np.nan, 'loc', 'filt', electrode_group) bipolar_scheme = DynamicTable(name='bipolar_scheme', description='desc') bipolar_scheme.add_column(name='anode', description='desc', index=True, table=nwbfile.electrodes) bipolar_scheme.add_column(name='cathode', description='desc', index=True, table=nwbfile.electrodes) bipolar_scheme.add_row(anode=[0], cathode=[1]) bipolar_scheme.add_row(anode=[0, 1], cathode=[2, 3]) bipolar_scheme.add_row(anode=[0, 1], cathode=[2]) ecephys_ext = EcephysExt(bipolar_scheme=bipolar_scheme) nwbfile.add_lab_meta_data(ecephys_ext) bipolar_scheme_region = DynamicTableRegion( name='electrodes', data=np.arange(0, 3), description='desc', table=nwbfile.lab_meta_data['extracellular_ephys_extensions']. bipolar_scheme) ec_series = ElectricalSeries(name='test_ec_series', description='desc', data=np.random.rand(100, 3), rate=1000., electrodes=bipolar_scheme_region) nwbfile.add_acquisition(ec_series) with NWBHDF5IO('test_nwb.nwb', 'w') as io: io.write(nwbfile) with NWBHDF5IO('test_nwb.nwb', 'r', load_namespaces=True) as io: nwbfile = io.read() assert_array_equal( nwbfile.acquisition['test_ec_series'].electrodes.table['anode'][2] ['x'], [0., 1.]) os.remove('test_nwb.nwb')
def to_nwb(self, nwbfile: NWBFile) -> NWBFile: nwb_extension = load_pynwb_extension(BehaviorTaskParametersSchema, 'ndx-aibs-behavior-ophys') task_parameters = self.to_dict()['task_parameters'] task_parameters_clean = BehaviorTaskParametersSchema().dump( task_parameters) new_task_parameters_dict = {} for key, val in task_parameters_clean.items(): if isinstance(val, list): new_task_parameters_dict[key] = np.array(val) else: new_task_parameters_dict[key] = val nwb_task_parameters = nwb_extension(name='task_parameters', **new_task_parameters_dict) nwbfile.add_lab_meta_data(nwb_task_parameters) return nwbfile
def test_nwbfileio(self): testdir = tempfile.mkdtemp() nwbfile = NWBFile(**temp_session_nwbfile) nwbfile.add_lab_meta_data(IblSessionData(**temp_sessions)) nwbfile.subject = IblSubject(**temp_subject) for i, name in zip(temp_probes, probe_names): nwbfile.add_device(IblProbes(name, **i)) saveloc = os.path.join(testdir, 'test.nwb') with NWBHDF5IO(saveloc, mode='w') as io: io.write(nwbfile) with NWBHDF5IO(saveloc, mode='r', load_namespaces=True) as io: read_nwbfile = io.read() for i, j in temp_sessions.items(): attr_loop = getattr(read_nwbfile.lab_meta_data['Ibl_session_data'], i, None) if attr_loop: if isinstance(attr_loop, h5py._hl.dataset.Dataset): assert all(getattr(read_nwbfile.lab_meta_data['Ibl_session_data'], i).value == j) else: assert getattr(read_nwbfile.lab_meta_data['Ibl_session_data'], i) == j for i, j in temp_subject.items(): attr_loop = getattr(read_nwbfile.subject, i, None) if attr_loop: if isinstance(attr_loop, h5py._hl.dataset.Dataset): assert all(getattr(read_nwbfile.subject, i).value == j) else: assert getattr(read_nwbfile.subject, i) == j for no,probe_name in enumerate(probe_names): for i, j in temp_probes[no].items(): attr_loop = getattr(read_nwbfile.devices[probe_name], i, None) if attr_loop: if isinstance(attr_loop, h5py._hl.dataset.Dataset): assert all(getattr(read_nwbfile.devices[probe_name], i).value == j) else: assert getattr(read_nwbfile.devices[probe_name], i) == j shutil.rmtree(testdir)
def chang2nwb(blockpath, out_file_path=None, save_to_file=False, htk_config=None): """ Parameters ---------- blockpath: str out_file_path: None | str if None, output = [blockpath]/[blockname].nwb save_to_file : bool If True, saves to file. If False, just returns nwbfile object htk_config : dict Dictionary cotaining HTK conversion paths and options. Example: { ecephys_path: 'path_to/ecephys_htk_files', ecephys_type: 'raw', 'preprocessed' or 'high_gamma', analog_path: 'path_to/analog_htk_files', anin1: {present: True, name: 'microphone', type: 'acquisition'}, anin2: {present: True, name: 'speaker1', type: 'stimulus'}, anin3: {present: False, name: 'speaker2', type: 'stimulus'}, anin4: {present: False, name: 'custom', type: 'acquisition'}, metadata: metadata, electrodes_file: electrodes_file, bipolar_file: bipolar_file } Returns ------- """ metadata = {} if htk_config is None: blockpath = Path(blockpath) else: blockpath = Path(htk_config['ecephys_path']) metadata = htk_config['metadata'] blockname = blockpath.parent.name subject_id = blockpath.parent.parent.name[2:] if out_file_path is None: out_file_path = blockpath.resolve().parent / ''.join(['EC', subject_id, '_', blockname, '.nwb']) # file paths ecog_path = blockpath anin_path = htk_config['analog_path'] bad_time_file = path.join(blockpath, 'Artifacts', 'badTimeSegments.mat') # Create the NWB file object nwbfile_dict = { 'session_description': blockname, 'identifier': blockname, 'session_start_time': datetime.now().astimezone(), 'institution': 'University of California, San Francisco', 'lab': 'Chang Lab' } if 'NWBFile' in metadata: nwbfile_dict.update(metadata['NWBFile']) nwbfile = NWBFile(**nwbfile_dict) # Read electrophysiology data from HTK files print('reading htk acquisition...', flush=True) ecog_rate, data = readhtks(ecog_path) data = data.squeeze() print('done', flush=True) # Get electrodes info from mat file if htk_config['electrodes_file'] is not None: nwbfile = elecs_to_electrode_table( nwbfile=nwbfile, elecspath=htk_config['electrodes_file'], ) n_electrodes = nwbfile.electrodes[:].shape[0] all_elecs = list(range(n_electrodes)) elecs_region = nwbfile.create_electrode_table_region( region=all_elecs, description='ECoG electrodes on brain' ) else: ecephys_dict = { 'Device': [{'name': 'auto_device'}], 'ElectricalSeries': [{'name': 'ECoG', 'description': 'description'}], 'ElectrodeGroup': [{'name': 'auto_group', 'description': 'auto_group', 'location': 'location', 'device': 'auto_device'}] } if 'Ecephys' in metadata: ecephys_dict.update(metadata['Ecephys']) # Create devices for dev in ecephys_dict['Device']: device = nwbfile.create_device(dev['name']) # Electrode groups for el_grp in ecephys_dict['ElectrodeGroup']: device = nwbfile.devices[el_grp['device']] electrode_group = nwbfile.create_electrode_group( name=el_grp['name'], description=el_grp['description'], location=el_grp['location'], device=device ) # Electrodes table n_electrodes = data.shape[1] nwbfile.add_electrode_column('label', 'label of electrode') nwbfile.add_electrode_column('bad', 'electrode identified as too noisy') nwbfile.add_electrode_column('x_warped', 'x warped onto cvs_avg35_inMNI152') nwbfile.add_electrode_column('y_warped', 'y warped onto cvs_avg35_inMNI152') nwbfile.add_electrode_column('z_warped', 'z warped onto cvs_avg35_inMNI152') nwbfile.add_electrode_column('null', 'if not connected to real electrode') bad_elecs_inds = get_bad_elecs(blockpath) for elec_counter in range(n_electrodes): bad = elec_counter in bad_elecs_inds nwbfile.add_electrode( id=elec_counter, x=np.nan, y=np.nan, z=np.nan, imp=np.nan, x_warped=np.nan, y_warped=np.nan, z_warped=np.nan, location='', filtering='none', group=electrode_group, label='', bad=bad, null=False, ) all_elecs = list(range(n_electrodes)) elecs_region = nwbfile.create_electrode_table_region( region=all_elecs, description='ECoG electrodes on brain' ) # Get Bipolar table from file if htk_config['bipolar_file'] is not None: df = pd.read_csv(htk_config['bipolar_file'], index_col='id', sep='\t') # Create bipolar scheme table bipolar_scheme_table = BipolarSchemeTable( name='bipolar_scheme_table', description='desc' ) # Columns for bipolar scheme - all anodes and cathodes within the same # bipolar row are considered to have the same group and location bipolar_scheme_table.add_column( name='group_name', description='electrode group name' ) bipolar_scheme_table.add_column( name='location', description='electrode location' ) # Iterate over anode / cathode rows for i, r in df.iterrows(): if isinstance(r['anodes'], str): anodes = [int(a) for a in r['anodes'].split(',')] else: anodes = [int(r['anodes'])] if isinstance(r['cathodes'], str): cathodes = [int(a) for a in r['cathodes'].split(',')] else: cathodes = [int(r['cathodes'])] bipolar_scheme_table.add_row( anodes=anodes, cathodes=cathodes, group_name=nwbfile.electrodes['group_name'][anodes[0]], location=nwbfile.electrodes['location'][anodes[0]] ) bipolar_scheme_table.anodes.table = nwbfile.electrodes bipolar_scheme_table.cathodes.table = nwbfile.electrodes # Creates bipolar table region elecs_region = DynamicTableRegion( name='electrodes', data=np.arange(0, df.shape[0]), description='desc', table=bipolar_scheme_table ) ecephys_ext = EcephysExt(name='ecephys_ext') ecephys_ext.bipolar_scheme_table = bipolar_scheme_table nwbfile.add_lab_meta_data(ecephys_ext) # Stores HTK electrophysiology data as raw, preprocessed or high gamma if htk_config['ecephys_type'] == 'raw': ecog_es = ElectricalSeries(name='ECoG', data=H5DataIO(data[:, 0:n_electrodes], compression='gzip'), electrodes=elecs_region, rate=ecog_rate, description='all Wav data') nwbfile.add_acquisition(ecog_es) elif htk_config['ecephys_type'] == 'preprocessed': lfp = LFP() ecog_es = ElectricalSeries(name='preprocessed', data=H5DataIO(data[:, 0:n_electrodes], compression='gzip'), electrodes=elecs_region, rate=ecog_rate, description='all Wav data') lfp.add_electrical_series(ecog_es) # Creates the ecephys processing module ecephys_module = nwbfile.create_processing_module( name='ecephys', description='preprocessed electrophysiology data' ) ecephys_module.add_data_interface(lfp) elif htk_config['ecephys_type'] == 'high_gamma': ecog_es = ElectricalSeries(name='high_gamma', data=H5DataIO(data[:, 0:n_electrodes], compression='gzip'), electrodes=elecs_region, rate=ecog_rate, description='all Wav data') # Creates the ecephys processing module ecephys_module = nwbfile.create_processing_module( name='ecephys', description='preprocessed electrophysiology data' ) ecephys_module.add_data_interface(ecog_es) # Add ANIN 1 if htk_config['anin1']['present']: fs, data = get_analog(anin_path, 1) ts = TimeSeries( name=htk_config['anin1']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin1']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN1 saved with name "', htk_config['anin1']['name'], '" in ', htk_config['anin1']['type']) # Add ANIN 2 if htk_config['anin2']['present']: fs, data = get_analog(anin_path, 2) ts = TimeSeries( name=htk_config['anin2']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin2']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN2 saved with name "', htk_config['anin2']['name'], '" in ', htk_config['anin2']['type']) # Add ANIN 3 if htk_config['anin3']['present']: fs, data = get_analog(anin_path, 3) ts = TimeSeries( name=htk_config['anin3']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin3']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN3 saved with name "', htk_config['anin3']['name'], '" in ', htk_config['anin3']['type']) # Add ANIN 4 if htk_config['anin4']['present']: fs, data = get_analog(anin_path, 4) ts = TimeSeries( name=htk_config['anin4']['name'], data=data, unit='NA', rate=fs, ) if htk_config['anin4']['type'] == 'acquisition': nwbfile.add_acquisition(ts) else: nwbfile.add_stimulus(ts) print('ANIN4 saved with name "', htk_config['anin4']['name'], '" in ', htk_config['anin4']['type']) # Add bad time segments if os.path.exists(bad_time_file) and os.stat(bad_time_file).st_size: bad_time = sio.loadmat(bad_time_file)['badTimeSegments'] for row in bad_time: nwbfile.add_invalid_time_interval(start_time=row[0], stop_time=row[1], tags=('ECoG artifact',), timeseries=ecog_es) # Subject subject_dict = {'subject_id': subject_id} if 'Subject' in metadata: subject_dict.update(metadata['Subject']) subject = ECoGSubject(**subject_dict) nwbfile.subject = subject if save_to_file: print('Saving HTK content to NWB file...') # Export the NWB file with NWBHDF5IO(str(out_file_path), manager=manager, mode='w') as io: io.write(nwbfile) # read check with NWBHDF5IO(str(out_file_path), manager=manager, mode='r') as io: io.read() print('NWB file saved: ', str(out_file_path)) return nwbfile, out_file_path, subject_id, blockname
class Alyx2NWBConverter: def __init__(self, saveloc=None, nwb_metadata_file=None, metadata_obj: Alyx2NWBMetadata = None, one_object: ONE = None, save_raw=False, save_camera_raw=False, complevel=4, shuffle=False, buffer_size=1): """ Retrieve all Alyx session, subject metadata, raw data for eid using the one apis load method Map that to nwb supported datatypes and create an nwb file. Parameters ---------- saveloc: str, Path save location of nwbfile nwb_metadata_file: [dict, str] output of Alyx2NWBMetadata as a dict/json location str metadata_obj: Alyx2NWBMetadata one_object: ONE() save_raw: bool will load and save large raw files: ecephys.raw.ap/lf.cbin to nwb save_camera_raw: bool will load and save mice camera movie .mp4: _iblrig_Camera.raw complevel: int level of compression to apply to raw datasets (0-9)>(low,high). https://docs.h5py.org/en/latest/high/dataset.html shuffle: bool Enable shuffle I/O filter. http://docs.h5py.org/en/latest/high/dataset.html#dataset-shuffle """ self.buffer_size = buffer_size self.complevel = complevel self.shuffle = shuffle if nwb_metadata_file is not None: if isinstance(nwb_metadata_file, dict): self.nwb_metadata = nwb_metadata_file elif isinstance(nwb_metadata_file, str): with open(nwb_metadata_file, 'r') as f: self.nwb_metadata = json.load(f) elif metadata_obj is not None: self.nwb_metadata = metadata_obj.complete_metadata else: raise Exception( 'required one of argument: nwb_metadata_file OR metadata_obj') if one_object is not None: self.one_object = one_object elif metadata_obj is not None: self.one_object = metadata_obj.one_obj else: Warning('creating a ONE object and continuing') self.one_object = ONE() if saveloc is None: Warning('saving nwb file in current working directory') self.saveloc = str(Path.cwd()) else: self.saveloc = str(saveloc) self.eid = self.nwb_metadata["eid"] if not isinstance(self.nwb_metadata['NWBFile']['session_start_time'], datetime): self.nwb_metadata['NWBFile']['session_start_time'] = \ datetime.strptime(self.nwb_metadata['NWBFile']['session_start_time'], '%Y-%m-%dT%X').replace( tzinfo=pytz.utc) self.nwb_metadata['IBLSubject']['date_of_birth'] = \ datetime.strptime(self.nwb_metadata['IBLSubject']['date_of_birth'], '%Y-%m-%dT%X').replace( tzinfo=pytz.utc) # create nwbfile: self.initialize_nwbfile() self.no_probes = len(self.nwb_metadata['Probes']) if self.no_probes == 0: warnings.warn( 'could not find probe information, will create trials, behavior, acquisition' ) self.electrode_table_exist = False self._one_data = _OneData(self.one_object, self.eid, self.no_probes, self.nwb_metadata, save_raw=save_raw, save_camera_raw=save_camera_raw) def initialize_nwbfile(self): """ Creates self.nwbfile, devices and electrode group of nwb file. """ nwbfile_args = dict(identifier=str(uuid.uuid4()), ) nwbfile_args.update(**self.nwb_metadata['NWBFile']) self.nwbfile = NWBFile(**nwbfile_args) # create devices [ self.nwbfile.create_device(**idevice_meta) for idevice_meta in self.nwb_metadata['Ecephys']['Device'] ] if 'ElectrodeGroup' in self.nwb_metadata['Ecephys']: self.create_electrode_groups(self.nwb_metadata['Ecephys']) def create_electrode_groups(self, metadata_ecephys): """ This method is called at __init__. Use metadata to create ElectrodeGroup object(s) in the NWBFile Parameters ---------- metadata_ecephys : dict Dict with key:value pairs for defining the Ecephys group from where this ElectrodeGroup belongs. This should contain keys for required groups such as 'Device', 'ElectrodeGroup', etc. """ for metadata_elec_group in metadata_ecephys['ElectrodeGroup']: eg_name = metadata_elec_group['name'] # Tests if ElectrodeGroup already exists aux = [i.name == eg_name for i in self.nwbfile.children] if any(aux): print(eg_name + ' already exists in current NWBFile.') else: device_name = metadata_elec_group['device'] if device_name in self.nwbfile.devices: device = self.nwbfile.devices[device_name] else: print('Device ', device_name, ' for ElectrodeGroup ', eg_name, ' does not exist.') print('Make sure ', device_name, ' is defined in metadata.') eg_description = metadata_elec_group['description'] eg_location = metadata_elec_group['location'] self.nwbfile.create_electrode_group(name=eg_name, location=eg_location, device=device, description=eg_description) def check_module(self, name, description=None): """ Check if processing module exists. If not, create it. Then return module Parameters ---------- name: str description: str | None (optional) Returns ------- pynwb.module """ if name in self.nwbfile.processing: return self.nwbfile.processing[name] else: if description is None: description = name return self.nwbfile.create_processing_module(name, description) def create_stimulus(self): """ Creates stimulus data in nwbfile """ stimulus_list = self._get_data( self.nwb_metadata['Stimulus'].get('time_series')) for i in stimulus_list: self.nwbfile.add_stimulus(pynwb.TimeSeries(**i)) def create_units(self): """ Units table in nwbfile """ if self.no_probes == 0: return if not self.electrode_table_exist: self.create_electrode_table_ecephys() unit_table_list = self._get_data(self.nwb_metadata['Units']) # no required arguments for units table. Below are default columns in the table. default_args = [ 'id', 'waveform_mean', 'electrodes', 'electrode_group', 'spike_times', 'obs_intervals' ] default_ids = _get_default_column_ids( default_args, [i['name'] for i in unit_table_list]) if len(default_ids) != len(default_args): warnings.warn(f'could not find all of {default_args} clusters') non_default_ids = list( set(range(len(unit_table_list))).difference(set(default_ids))) default_dict = { unit_table_list[id]['name']: unit_table_list[id]['data'] for id in default_ids } for cluster_no in range(len(unit_table_list[0]['data'])): add_dict = dict() for ibl_dataset_name in default_dict: if ibl_dataset_name == 'electrodes': add_dict.update({ ibl_dataset_name: [default_dict[ibl_dataset_name][cluster_no]] }) if ibl_dataset_name == 'spike_times': add_dict.update({ ibl_dataset_name: default_dict[ibl_dataset_name][cluster_no] }) elif ibl_dataset_name == 'obs_intervals': # common across all clusters add_dict.update( {ibl_dataset_name: default_dict[ibl_dataset_name]}) elif ibl_dataset_name == 'electrode_group': add_dict.update({ ibl_dataset_name: self.nwbfile.electrode_groups[self.nwb_metadata[ 'Probes'][default_dict[ibl_dataset_name] [cluster_no]]['name']] }) elif ibl_dataset_name == 'id': if cluster_no >= self._one_data.data_attrs_dump[ 'unit_table_length'][0]: add_dict.update({ ibl_dataset_name: default_dict[ibl_dataset_name][cluster_no] + self._one_data.data_attrs_dump['unit_table_length'] [0] }) else: add_dict.update({ ibl_dataset_name: default_dict[ibl_dataset_name][cluster_no] }) elif ibl_dataset_name == 'waveform_mean': add_dict.update({ ibl_dataset_name: np.mean(default_dict[ibl_dataset_name][cluster_no], axis=1) }) # finding the mean along all the channels of the sluter self.nwbfile.add_unit(**add_dict) for id in non_default_ids: if isinstance(unit_table_list[id]['data'], object): unit_table_list[id]['data'] = unit_table_list[id][ 'data'].tolist() # convert string numpy self.nwbfile.add_unit_column( name=unit_table_list[id]['name'], description=unit_table_list[id]['description'], data=unit_table_list[id]['data']) def create_electrode_table_ecephys(self): """ Creates electrode table """ if self.no_probes == 0: return if self.electrode_table_exist: pass electrode_table_list = self._get_data( self.nwb_metadata['ElectrodeTable']) # electrode table has required arguments: required_args = ['group', 'x', 'y'] default_ids = _get_default_column_ids( required_args, [i['name'] for i in electrode_table_list]) non_default_ids = list( set(range(len(electrode_table_list))).difference(set(default_ids))) default_dict = { electrode_table_list[id]['name']: electrode_table_list[id]['data'] for id in default_ids } if 'group' in default_dict: group_labels = default_dict['group'] else: # else fill with probe zero data. group_labels = np.concatenate([ np.ones(self._one_data. data_attrs_dump['electrode_table_length'][i], dtype=int) * i for i in range(self.no_probes) ]) for electrode_no in range(len(electrode_table_list[0]['data'])): if 'x' in default_dict: x = default_dict['x'][electrode_no][0] y = default_dict['y'][electrode_no][1] else: x = float('NaN') y = float('NaN') group_data = self.nwbfile.electrode_groups[self.nwb_metadata[ 'Probes'][group_labels[electrode_no]]['name']] self.nwbfile.add_electrode(x=x, y=y, z=float('NaN'), imp=float('NaN'), location='None', group=group_data, filtering='none') for id in non_default_ids: self.nwbfile.add_electrode_column( name=electrode_table_list[id]['name'], description=electrode_table_list[id]['description'], data=electrode_table_list[id]['data']) # create probes specific DynamicTableRegion: self.probe_dt_region = [ self.nwbfile.create_electrode_table_region(region=list( range(self._one_data.data_attrs_dump['electrode_table_length'] [j])), description=i['name']) for j, i in enumerate(self.nwb_metadata['Probes']) ] self.probe_dt_region_all = self.nwbfile.create_electrode_table_region( region=list( range( sum(self._one_data. data_attrs_dump['electrode_table_length']))), description='AllProbes') self.electrode_table_exist = True def create_timeseries_ecephys(self): """ create SpikeEventSeries, ElectricalSeries, Spectrum datatypes within nwbfile>processing>ecephys """ if self.no_probes == 0: return if not self.electrode_table_exist: self.create_electrode_table_ecephys() if 'ecephys' not in self.nwbfile.processing: mod = self.nwbfile.create_processing_module( 'ecephys', 'Processed electrophysiology data of IBL') else: mod = self.nwbfile.get_processing_module('ecephys') for neurodata_type_name, neurodata_type_args_list in self.nwb_metadata[ 'Ecephys']['Ecephys'].items(): data_retrieved_args_list = self._get_data( neurodata_type_args_list ) # list of dicts with keys as argument names for no, neurodata_type_args in enumerate(data_retrieved_args_list): ibl_dataset_name = neurodata_type_args_list[no]['data'] if 'ElectricalSeries' in neurodata_type_name: timestamps_names = self._one_data.data_attrs_dump[ '_iblqc_ephysTimeRms.timestamps'] data_names = self._one_data.data_attrs_dump[ '_iblqc_ephysTimeRms.rms'] for data_idx, data in enumerate( neurodata_type_args['data']): probe_no = [ j for j in range(self.no_probes) if self.nwb_metadata['Probes'][j]['name'] in data_names[data_idx] ][0] if data.shape[1] > self._one_data.data_attrs_dump[ 'electrode_table_length'][probe_no]: if 'channels.rawInd' in self._one_data.loaded_datasets: channel_idx = self._one_data.loaded_datasets[ 'channels.rawInd'][probe_no].data.astype( 'int') else: warnings.warn('could not find channels.rawInd') break else: channel_idx = slice(None) mod.add( ElectricalSeries( name=data_names[data_idx], description=neurodata_type_args['description'], timestamps=neurodata_type_args['timestamps'] [timestamps_names.index(data_names[data_idx])], data=data[:, channel_idx], electrodes=self.probe_dt_region[probe_no])) elif 'Spectrum' in neurodata_type_name: if ibl_dataset_name in '_iblqc_ephysSpectralDensity.power': freqs_names = self._one_data.data_attrs_dump[ '_iblqc_ephysSpectralDensity.freqs'] data_names = self._one_data.data_attrs_dump[ '_iblqc_ephysSpectralDensity.power'] for data_idx, data in enumerate( neurodata_type_args['data']): mod.add( Spectrum(name=data_names[data_idx], frequencies=neurodata_type_args[ 'frequencies'][freqs_names.index( data_names[data_idx])], power=data)) elif 'SpikeEventSeries' in neurodata_type_name: neurodata_type_args.update( dict(electrodes=self.probe_dt_region_all)) mod.add( pynwb.ecephys.SpikeEventSeries(**neurodata_type_args)) def create_behavior(self): """ Create behavior processing module """ self.check_module('behavior') for behavior_datatype in self.nwb_metadata['Behavior']: if behavior_datatype == 'Position': position_cont = pynwb.behavior.Position() time_series_list_details = self._get_data( self.nwb_metadata['Behavior'][behavior_datatype] ['spatial_series']) if len(time_series_list_details) == 0: continue # rate_list = [150.0,60.0,60.0] # based on the google doc for _iblrig_body/left/rightCamera.raw, dataname_list = self._one_data.data_attrs_dump['camera.dlc'] data_list = time_series_list_details[0]['data'] timestamps_list = time_series_list_details[0]['timestamps'] for dataname, data, timestamps in zip(dataname_list, data_list, timestamps_list): colnames = data.columns data_np = data.to_numpy() x_column_ids = [ n for n, k in enumerate(colnames) if 'x' in k ] for x_column_id in x_column_ids: data_loop = data_np[:, x_column_id:x_column_id + 2] position_cont.create_spatial_series( name=dataname + colnames[x_column_id][:-2], data=data_loop, reference_frame='none', timestamps=timestamps, conversion=1e-3) self.nwbfile.processing['behavior'].add(position_cont) elif not (behavior_datatype == 'BehavioralEpochs'): time_series_func = pynwb.TimeSeries time_series_list_details = self._get_data( self.nwb_metadata['Behavior'][behavior_datatype] ['time_series']) if len(time_series_list_details) == 0: continue time_series_list_obj = [] for i in time_series_list_details: unit = 'radians/sec' if 'velocity' in i[ 'name'] else 'radians' time_series_list_obj.append( time_series_func(**i, unit=unit)) func = getattr(pynwb.behavior, behavior_datatype) self.nwbfile.processing['behavior'].add( func(time_series=time_series_list_obj)) else: time_series_func = pynwb.epoch.TimeIntervals time_series_list_details = self._get_data( self.nwb_metadata['Behavior'][behavior_datatype] ['time_intervals']) if len(time_series_list_details) == 0: continue for k in time_series_list_details: time_intervals = time_series_func('BehavioralEpochs') for time_interval in k['timestamps']: time_intervals.add_interval( start_time=time_interval[0], stop_time=time_interval[1]) time_intervals.add_column(k['name'], k['description'], data=k['data']) self.nwbfile.processing['behavior'].add(time_intervals) def create_acquisition(self): """ Acquisition data like audiospectrogram(raw beh data), nidq(raw ephys data), raw camera data. These are independent of probe type. """ for neurodata_type_name, neurodata_type_args_list in self.nwb_metadata[ 'Acquisition'].items(): data_retrieved_args_list = self._get_data(neurodata_type_args_list) for neurodata_type_args in data_retrieved_args_list: if neurodata_type_name == 'ImageSeries': for types, times in zip(neurodata_type_args['data'], neurodata_type_args['timestamps']): customargs = dict(name='camera_raw', external_file=[str(types)], format='external', timestamps=times, unit='n.a.') self.nwbfile.add_acquisition(ImageSeries(**customargs)) elif neurodata_type_name == 'DecompositionSeries': neurodata_type_args['bands'] = np.squeeze( neurodata_type_args['bands']) freqs = DynamicTable( 'bands', 'spectogram frequencies', id=np.arange(neurodata_type_args['bands'].shape[0])) freqs.add_column('freq', 'frequency value', data=neurodata_type_args['bands']) neurodata_type_args.update(dict(bands=freqs)) temp = neurodata_type_args['data'][:, :, np.newaxis] neurodata_type_args['data'] = np.moveaxis( temp, [0, 1, 2], [0, 2, 1]) ts = neurodata_type_args.pop('timestamps') starting_time = ts[0][0] if isinstance( ts[0], np.ndarray) else ts[0] neurodata_type_args.update( dict(starting_time=np.float64(starting_time), rate=1 / np.mean(np.diff(ts.squeeze())), unit='sec')) self.nwbfile.add_acquisition( DecompositionSeries(**neurodata_type_args)) elif neurodata_type_name == 'ElectricalSeries': if not self.electrode_table_exist: self.create_electrode_table_ecephys() if neurodata_type_args['name'] in ['raw.lf', 'raw.ap']: for probe_no in range(self.no_probes): if neurodata_type_args['data'][probe_no].shape[ 1] > self._one_data.data_attrs_dump[ 'electrode_table_length'][probe_no]: if 'channels.rawInd' in self._one_data.loaded_datasets: channel_idx = self._one_data.loaded_datasets[ 'channels.rawInd'][ probe_no].data.astype('int') else: warnings.warn( 'could not find channels.rawInd') break else: channel_idx = slice(None) self.nwbfile.add_acquisition( ElectricalSeries( name=neurodata_type_args['name'] + '_' + self.nwb_metadata['Probes'][probe_no] ['name'], starting_time=np.abs( np.round( neurodata_type_args['timestamps'] [probe_no][0, 1], 2) ), # round starting times of the order of 1e-5 rate=neurodata_type_args['data'] [probe_no].fs, data=H5DataIO( DataChunkIterator( _iter_datasetview( neurodata_type_args['data'] [probe_no], channel_ids=channel_idx), buffer_size=self.buffer_size), compression=True, shuffle=self.shuffle, compression_opts=self.complevel), electrodes=self.probe_dt_region[probe_no], channel_conversion=neurodata_type_args[ 'data'] [probe_no].channel_conversion_sample2v[ neurodata_type_args['data'] [probe_no].type][channel_idx])) elif neurodata_type_args['name'] in ['raw.nidq']: self.nwbfile.add_acquisition( ElectricalSeries(**neurodata_type_args)) def create_probes(self): """ Fills in all the probes metadata into the custom NeuroPixels extension. """ for i in self.nwb_metadata['Probes']: self.nwbfile.add_device(IblProbes(**i)) def create_iblsubject(self): """ Populates the custom subject extension for IBL mice daata """ self.nwbfile.subject = IblSubject(**self.nwb_metadata['IBLSubject']) def create_lab_meta_data(self): """ Populates the custom lab_meta_data extension for IBL sessions data """ self.nwbfile.add_lab_meta_data( IblSessionData(**self.nwb_metadata['IBLSessionsData'])) def create_trials(self): table_data = self._get_data(self.nwb_metadata['Trials']) required_fields = ['start_time', 'stop_time'] required_data = [i for i in table_data if i['name'] in required_fields] optional_data = [ i for i in table_data if i['name'] not in required_fields ] if len(required_fields) != len(required_data): warnings.warn( 'could not find required datasets: trials.start_time, trials.stop_time, ' 'skipping trials table') return for start_time, stop_time in zip(required_data[0]['data'][:, 0], required_data[1]['data'][:, 1]): self.nwbfile.add_trial(start_time=start_time, stop_time=stop_time) for op_data in optional_data: if op_data['data'].shape[0] == required_data[0]['data'].shape[0]: self.nwbfile.add_trial_column( name=op_data['name'], description=op_data['description'], data=op_data['data']) else: warnings.warn( f'shape of trials.{op_data["name"]} does not match other trials.* datasets' ) def _get_data(self, sub_metadata): """ Uses OneData class to query ONE datasets on server and download them locally Parameters ---------- sub_metadata: [list, dict] list of metadata dicts containing a data key with a dataset type string as value to retrieve data from(npy, tsv etc) Returns ------- out_dict: dict dictionary with actual data loaded in the data field """ include_idx = [] out_dict_trim = [] alt_datatypes = ['bands', 'power', 'frequencies', 'timestamps'] if isinstance(sub_metadata, list): out_dict = deepcopy(sub_metadata) elif isinstance(sub_metadata, dict): out_dict = deepcopy(list(sub_metadata)) else: return [] req_datatypes = ['data'] for count, neurodata_type_args in enumerate(out_dict): for alt_names in alt_datatypes: if neurodata_type_args.get( alt_names ): # in case of Decomposotion series, Spectrum neurodata_type_args[ alt_names] = self._one_data.download_dataset( neurodata_type_args[alt_names], neurodata_type_args['name']) req_datatypes.append(alt_names) if neurodata_type_args[ 'name'] == 'id': # valid in case of units table. neurodata_type_args['data'] = self._one_data.download_dataset( neurodata_type_args['data'], 'cluster_id') else: out_dict[count]['data'] = self._one_data.download_dataset( neurodata_type_args['data'], neurodata_type_args['name']) if all([out_dict[count][i] is not None for i in req_datatypes]): include_idx.extend([count]) out_dict_trim.extend([out_dict[j0] for j0 in include_idx]) return out_dict_trim def run_conversion(self): """ Single method to create all datasets and metadata in nwbfile in one go Returns ------- """ execute_list = [ self.create_stimulus, self.create_trials, self.create_electrode_table_ecephys, self.create_timeseries_ecephys, self.create_units, self.create_behavior, self.create_probes, self.create_iblsubject, self.create_lab_meta_data, self.create_acquisition ] t = tqdm(execute_list) for i in t: t.set_postfix(current=f'creating nwb ' + i.__name__.split('_')[-1]) i() print('done converting') def write_nwb(self, read_check=True): """ After run_conversion(), write nwbfile to disk with the loaded nwbfile Parameters ---------- read_check: bool Round trip verification """ print('Saving to file, please wait...') with NWBHDF5IO(self.saveloc, 'w') as io: io.write(self.nwbfile) print('File successfully saved at: ', str(self.saveloc)) if read_check: with NWBHDF5IO(self.saveloc, 'r') as io: io.read() print('Read check: OK')
class LabMetaDataExtensionTest(unittest.TestCase): def setUp(self): self.nwbfile = NWBFile('description', 'id', datetime.now().astimezone()) def test_add_lab_metadata(self): # Add rig information rig = { 'name': 'rig', 'rig': 'VRTrain6', 'simulationMode': 0, 'hasDAQ': 1, 'hasSyncComm': 0, 'minIterationDT': 0.01, 'arduinoPort': 'COM5', 'sensorDotsPerRev': [1967.6, 1967.6, 1967.6, 1967.6], 'ballCircumference': 63.8, 'toroidXFormP1': 0.5193, 'toroidXFormP2': 0.5171, 'colorAdjustment': [0., 0.4, 0.5], 'soundAdjustment': 0.2, 'nidaqDevice': 1, 'nidaqPort': 1, 'nidaqLines': [0, 11], 'syncClockChannel': 5, 'syncDataChannel': 6, 'rewardChannel': 0, 'rewardSize': 0.004, 'rewardDuration': 0.05, 'laserChannel': 1, 'rightPuffChannel': 2, 'leftPuffChannel': 3, 'webcam_name': 'Live! Cam Sync HD VF0770' } rig_extension = RigExtension(**rig) # Create mazes table maze_extension = MazeExtension(name='mazes', description='description of the mazes') mazes_dict = {k: 'test' for k in maze_extension.mazes_attr} maze_extension.add_row(**mazes_dict) # Creates LabMetaData container lab_metadata_dict = dict( name='LabMetaData', experiment_name='test', world_file_name='test', protocol_name='test', stimulus_bank_path='test', commit_id='test', location='test', num_trials=245, session_end_time=datetime.utcnow().isoformat(), rig=rig_extension, mazes=maze_extension) lab_metadata = LabMetaDataExtension(**lab_metadata_dict) # Add to file self.nwbfile.add_lab_meta_data(lab_metadata) filename = 'test_labmetadata.nwb' with NWBHDF5IO(filename, 'w') as io: io.write(self.nwbfile) with NWBHDF5IO(filename, mode='r', load_namespaces=True) as io: nwbfile = io.read() for metadata_key, metadata_value in lab_metadata_dict.items(): if isinstance(metadata_value, RigExtension): for rig_key, rig_value in rig.items(): self.assertEqual( rig_value, getattr(metadata_value, rig_key, None)) elif isinstance(metadata_value, MazeExtension): for mazes_key, mazes_value in mazes_dict.items(): assert mazes_value in getattr(metadata_value, mazes_key, None).data else: self.assertEqual( metadata_value, getattr(nwbfile.lab_meta_data['LabMetaData'], metadata_key, None)) os.remove(filename)
from pynwb import NWBHDF5IO, NWBFile from datetime import datetime from ndx_simulation_output import SimulationMetaData, CompartmentSeries, Compartments import numpy as np compartments = Compartments() compartments.add_row(number=[0, 1, 2, 3, 4], position=[0.1, 0.2, 0.3, 0.4, 0.5]) compartments.add_row(number=[0], position=[np.nan]) nwbfile = NWBFile('description', 'id', datetime.now().astimezone()) nwbfile.add_lab_meta_data(SimulationMetaData(compartments=compartments)) cs = CompartmentSeries('membrane_potential', np.random.randn(10, 6), compartments=compartments, unit='V', rate=100.) nwbfile.add_acquisition(cs) with NWBHDF5IO('test_compartment_series.nwb', 'w') as io: io.write(nwbfile) with NWBHDF5IO('test_compartment_series.nwb', mode='r') as io: io.read() assert (all(cs.find_compartments(0, [1, 3]) == [1, 3])) assert (all(cs.find_compartments(1) == 5))
def test_ext(): nwbfile = NWBFile('description', 'id', datetime.now().astimezone()) device = nwbfile.create_device('device_name') electrode_group = nwbfile.create_electrode_group('electrode_group', 'desc', 'loc', device=device) for i in np.arange(20.): nwbfile.add_electrode(i, i, i, np.nan, 'loc', 'filt', electrode_group) electrodes = DynamicTableRegion( name='electrodes', data=np.arange(0, 3), description='desc', table=nwbfile.electrodes, ) source_ec_series = ElectricalSeries( name='source_ec_series', description='desc', data=np.random.rand(100, 3), rate=1000., electrodes=electrodes, ) nwbfile.add_acquisition(source_ec_series) bipolar_scheme_table = BipolarSchemeTable(name='bipolar_scheme', description='desc') bipolar_scheme_table.add_row(anodes=[0], cathodes=[1]) bipolar_scheme_table.add_row(anodes=[0, 1], cathodes=[2, 3]) bipolar_scheme_table.add_row(anodes=[0, 1], cathodes=[2]) bipolar_scheme_table['anodes'].target.table = nwbfile.electrodes bipolar_scheme_table['cathodes'].target.table = nwbfile.electrodes bipolar_scheme_region = DynamicTableRegion( name='electrodes', data=np.arange(0, 3), description='desc', table=bipolar_scheme_table, ) ec_series = ElectricalSeries( name='dest_ec_series', description='desc', data=np.random.rand(100, 3), rate=1000., electrodes=bipolar_scheme_region, ) nwbfile.add_acquisition(ec_series) ndx_bipolar_scheme = NdxBipolarScheme( bipolar_scheme_tables=[bipolar_scheme_table], source=source_ec_series) nwbfile.add_lab_meta_data(ndx_bipolar_scheme) with NWBHDF5IO('test_nwb.nwb', 'w') as io: io.write(nwbfile) with NWBHDF5IO('test_nwb.nwb', 'r', load_namespaces=True) as io: nwbfile = io.read() assert_array_equal( nwbfile.acquisition['dest_ec_series'].electrodes.table['anodes'][2] ['x'], [0., 1.]) os.remove('test_nwb.nwb')
def convert( input_file, session_start_time, subject_date_of_birth, subject_id='I5', subject_description='naive', subject_genotype='wild-type', subject_sex='M', subject_weight='11.6g', subject_species='Mus musculus', subject_brain_region='Medial Entorhinal Cortex', surgery='Probe: +/-3.3mm ML, 0.2mm A of sinus, then as deep as possible', session_id='npI5_0417_baseline_1', experimenter='Kei Masuda', experiment_description='Virtual Hallway Task', institution='Stanford University School of Medicine', lab_name='Giocomo Lab'): """ Read in the .mat file specified by input_file and convert to .nwb format. Parameters ---------- input_file : np.ndarray (..., n_channels, n_time) the .mat file to be converted subject_id : string the unique subject ID number for the subject of the experiment subject_date_of_birth : datetime ISO 8601 the date and time the subject was born subject_description : string important information specific to this subject that differentiates it from other members of it's species subject_genotype : string the genetic strain of this species. subject_sex : string Male or Female subject_weight : the weight of the subject around the time of the experiment subject_species : string the name of the species of the subject subject_brain_region : basestring the name of the brain region where the electrode probe is recording from surgery : str information about the subject's surgery to implant electrodes session_id: string human-readable ID# for the experiment session that has a one-to-one relationship with a recording session session_start_time : datetime date and time that the experiment started experimenter : string who ran the experiment, first and last name experiment_description : string what task was being run during the session institution : string what institution was the experiment performed in lab_name : string the lab where the experiment was performed Returns ------- nwbfile : NWBFile The contents of the .mat file converted into the NWB format. The nwbfile is saved to disk using NDWHDF5 """ # input matlab data matfile = hdf5storage.loadmat(input_file) # output path for nwb data def replace_last(source_string, replace_what, replace_with): head, _sep, tail = source_string.rpartition(replace_what) return head + replace_with + tail outpath = replace_last(input_file, '.mat', '.nwb') create_date = datetime.today() timezone_cali = pytz.timezone('US/Pacific') create_date_tz = timezone_cali.localize(create_date) # if loading data from config.yaml, convert string dates into datetime if isinstance(session_start_time, str): session_start_time = datetime.strptime(session_start_time, '%B %d, %Y %I:%M%p') session_start_time = timezone_cali.localize(session_start_time) if isinstance(subject_date_of_birth, str): subject_date_of_birth = datetime.strptime(subject_date_of_birth, '%B %d, %Y %I:%M%p') subject_date_of_birth = timezone_cali.localize(subject_date_of_birth) # create unique identifier for this experimental session uuid_identifier = uuid.uuid1() # Create NWB file nwbfile = NWBFile( session_description=experiment_description, # required identifier=uuid_identifier.hex, # required session_id=session_id, experiment_description=experiment_description, experimenter=experimenter, surgery=surgery, institution=institution, lab=lab_name, session_start_time=session_start_time, # required file_create_date=create_date_tz) # optional # add information about the subject of the experiment experiment_subject = Subject(subject_id=subject_id, species=subject_species, description=subject_description, genotype=subject_genotype, date_of_birth=subject_date_of_birth, weight=subject_weight, sex=subject_sex) nwbfile.subject = experiment_subject # adding constants via LabMetaData container # constants sample_rate = float(matfile['sp'][0]['sample_rate'][0][0][0]) n_channels_dat = int(matfile['sp'][0]['n_channels_dat'][0][0][0]) dat_path = matfile['sp'][0]['dat_path'][0][0][0] offset = int(matfile['sp'][0]['offset'][0][0][0]) data_dtype = matfile['sp'][0]['dtype'][0][0][0] hp_filtered = bool(matfile['sp'][0]['hp_filtered'][0][0][0]) vr_session_offset = matfile['sp'][0]['vr_session_offset'][0][0][0] # container lab_metadata = LabMetaData_ext(name='LabMetaData', acquisition_sampling_rate=sample_rate, number_of_electrodes=n_channels_dat, file_path=dat_path, bytes_to_skip=offset, raw_data_dtype=data_dtype, high_pass_filtered=hp_filtered, movie_start_time=vr_session_offset) nwbfile.add_lab_meta_data(lab_metadata) # Adding trial information nwbfile.add_trial_column( 'trial_contrast', 'visual contrast of the maze through which the mouse is running') trial = np.ravel(matfile['trial']) trial_nums = np.unique(trial) position_time = np.ravel(matfile['post']) # matlab trial numbers start at 1. To correctly index trial_contract vector, # subtracting 1 from 'num' so index starts at 0 for num in trial_nums: trial_times = position_time[trial == num] nwbfile.add_trial(start_time=trial_times[0], stop_time=trial_times[-1], trial_contrast=matfile['trial_contrast'][num - 1][0]) # Add mouse position inside: position = Position() position_virtual = np.ravel(matfile['posx']) # position inside the virtual environment sampling_rate = 1 / (position_time[1] - position_time[0]) position.create_spatial_series( name='Position', data=position_virtual, starting_time=position_time[0], rate=sampling_rate, reference_frame='The start of the trial, which begins at the start ' 'of the virtual hallway.', conversion=0.01, description='Subject position in the virtual hallway.', comments='The values should be >0 and <400cm. Values greater than ' '400cm mean that the mouse briefly exited the maze.', ) # physical position on the mouse wheel physical_posx = position_virtual trial_gain = np.ravel(matfile['trial_gain']) for num in trial_nums: physical_posx[trial == num] = physical_posx[trial == num] / trial_gain[num - 1] position.create_spatial_series( name='PhysicalPosition', data=physical_posx, starting_time=position_time[0], rate=sampling_rate, reference_frame='Location on wheel re-referenced to zero ' 'at the start of each trial.', conversion=0.01, description='Physical location on the wheel measured ' 'since the beginning of the trial.', comments='Physical location found by dividing the ' 'virtual position by the "trial_gain"') nwbfile.add_acquisition(position) # Add timing of lick events, as well as mouse's virtual position during lick event lick_events = BehavioralEvents() lick_events.create_timeseries( 'LickEvents', data=np.ravel(matfile['lickx']), timestamps=np.ravel(matfile['lickt']), unit='centimeter', description='Subject position in virtual hallway during the lick.') nwbfile.add_acquisition(lick_events) # Add information on the visual stimulus that was shown to the subject # Assumed rate=60 [Hz]. Update if necessary # Update external_file to link to Unity environment file visualization = ImageSeries( name='ImageSeries', unit='seconds', format='external', external_file=list(['https://unity.com/VR-and-AR-corner']), starting_time=vr_session_offset, starting_frame=[[0]], rate=float(60), description='virtual Unity environment that the mouse navigates through' ) nwbfile.add_stimulus(visualization) # Add the recording device, a neuropixel probe recording_device = nwbfile.create_device(name='neuropixel_probes') electrode_group_description = 'single neuropixels probe http://www.open-ephys.org/neuropixelscorded' electrode_group_name = 'probe1' electrode_group = nwbfile.create_electrode_group( electrode_group_name, description=electrode_group_description, location=subject_brain_region, device=recording_device) # Add information about each electrode xcoords = np.ravel(matfile['sp'][0]['xcoords'][0]) ycoords = np.ravel(matfile['sp'][0]['ycoords'][0]) data_filtered_flag = matfile['sp'][0]['hp_filtered'][0][0] if data_filtered_flag: filter_desc = 'The raw voltage signals from the electrodes were high-pass filtered' else: filter_desc = 'The raw voltage signals from the electrodes were not high-pass filtered' num_recording_electrodes = xcoords.shape[0] recording_electrodes = range(0, num_recording_electrodes) # create electrode columns for the x,y location on the neuropixel probe # the standard x,y,z locations are reserved for Allen Brain Atlas location nwbfile.add_electrode_column('rel_x', 'electrode x-location on the probe') nwbfile.add_electrode_column('rel_y', 'electrode y-location on the probe') for idx in recording_electrodes: nwbfile.add_electrode(id=idx, x=np.nan, y=np.nan, z=np.nan, rel_x=float(xcoords[idx]), rel_y=float(ycoords[idx]), imp=np.nan, location='medial entorhinal cortex', filtering=filter_desc, group=electrode_group) # Add information about each unit, termed 'cluster' in giocomo data # create new columns in unit table nwbfile.add_unit_column( 'quality', 'labels given to clusters during manual sorting in phy (1=MUA, ' '2=Good, 3=Unsorted)') # cluster information cluster_ids = matfile['sp'][0]['cids'][0][0] cluster_quality = matfile['sp'][0]['cgs'][0][0] # spikes in time spike_times = np.ravel(matfile['sp'][0]['st'][0]) # the time of each spike spike_cluster = np.ravel( matfile['sp'][0]['clu'][0]) # the cluster_id that spiked at that time for i, cluster_id in enumerate(cluster_ids): unit_spike_times = spike_times[spike_cluster == cluster_id] waveforms = matfile['sp'][0]['temps'][0][cluster_id] nwbfile.add_unit(id=int(cluster_id), spike_times=unit_spike_times, quality=cluster_quality[i], waveform_mean=waveforms, electrode_group=electrode_group) # Trying to add another Units table to hold the results of the automatic spike sorting # create TemplateUnits units table template_units = Units( name='TemplateUnits', description='units assigned during automatic spike sorting') template_units.add_column( 'tempScalingAmps', 'scaling amplitude applied to the template when extracting spike', index=True) # information on extracted spike templates spike_templates = np.ravel(matfile['sp'][0]['spikeTemplates'][0]) spike_template_ids = np.unique(spike_templates) # template scaling amplitudes temp_scaling_amps = np.ravel(matfile['sp'][0]['tempScalingAmps'][0]) for i, spike_template_id in enumerate(spike_template_ids): template_spike_times = spike_times[spike_templates == spike_template_id] temp_scaling_amps_per_template = temp_scaling_amps[spike_templates == spike_template_id] template_units.add_unit(id=int(spike_template_id), spike_times=template_spike_times, electrode_group=electrode_group, tempScalingAmps=temp_scaling_amps_per_template) # create ecephys processing module spike_template_module = nwbfile.create_processing_module( name='ecephys', description='units assigned during automatic spike sorting') # add template_units table to processing module spike_template_module.add(template_units) print(nwbfile) print('converted to NWB:N') print('saving ...') with NWBHDF5IO(outpath, 'w') as io: io.write(nwbfile) print('saved', outpath)
from ndx_task import Task, Tasks from pynwb import NWBHDF5IO, NWBFile from datetime import datetime task1 = Task(name='rest', description='animal is resting', rest=True) task2 = Task(name='theta_maze', description='animal is doing an figure-8 task in the theta maze', navigation=True) tasks = Tasks(name='tasks', tasks=[task1, task2]) session_start_time = datetime.now().astimezone() nwb = NWBFile('session_description', 'identifier', session_start_time) nwb.add_lab_meta_data(tasks) with NWBHDF5IO('test_task.nwb', 'w') as io: io.write(nwb) with NWBHDF5IO('test_task.nwb', 'r') as io: nwb2 = io.read() assert nwb.lab_meta_data['tasks']['rest'].rest
def nwb_copy_file(old_file, new_file, cp_objs={}, save_to_file=True): """ Copy fields defined in 'obj', from existing NWB file to new NWB file. Parameters ---------- old_file : str, path, nwbfile String or path to nwb file '/path/to/old_file.nwb'. Alternatively, the nwbfile object. new_file : str, path String such as '/path/to/new_file.nwb'. cp_objs : dict Name:Value pairs (Group:Children) listing the groups and respective children from the current NWB file to be copied. Children can be: - Boolean, indicating an attribute (e.g. for institution, lab) - List of strings, containing several children names Example: {'institution':True, 'lab':True, 'acquisition':['microphone'], 'ecephys':['LFP','DecompositionSeries']} save_to_file: Boolean If True, saves directly to new_file.nwb. If False, only returns nwb_new. Returns: -------- nwb_new : nwbfile object """ manager = get_manager() # Get from nwbfile object in memory or from file if isinstance(old_file, NWBFile): nwb_old = old_file io1 = False else: io1 = NWBHDF5IO(str(old_file), 'r', manager=manager, load_namespaces=True) nwb_old = io1.read() # Creates new file nwb_new = NWBFile( session_description=str(nwb_old.session_description), identifier=id_generator(), session_start_time=nwb_old.session_start_time, ) with NWBHDF5IO(new_file, mode='w', manager=manager, load_namespaces=False) as io2: # Institution name ------------------------------------------------ if 'institution' in cp_objs: nwb_new.institution = str(nwb_old.institution) # Lab name -------------------------------------------------------- if 'lab' in cp_objs: nwb_new.lab = str(nwb_old.lab) # Session id ------------------------------------------------------ if 'session' in cp_objs: nwb_new.session_id = nwb_old.session_id # Devices --------------------------------------------------------- if 'devices' in cp_objs: for aux in list(nwb_old.devices.keys()): dev = Device(nwb_old.devices[aux].name) nwb_new.add_device(dev) # Electrode groups ------------------------------------------------ if 'electrode_groups' in cp_objs and nwb_old.electrode_groups is not None: for aux in list(nwb_old.electrode_groups.keys()): nwb_new.create_electrode_group( name=str(nwb_old.electrode_groups[aux].name), description=str(nwb_old.electrode_groups[aux].description), location=str(nwb_old.electrode_groups[aux].location), device=nwb_new.get_device( nwb_old.electrode_groups[aux].device.name)) # Electrodes ------------------------------------------------------ if 'electrodes' in cp_objs and nwb_old.electrodes is not None: nElec = len(nwb_old.electrodes['x'].data[:]) for aux in np.arange(nElec): nwb_new.add_electrode( x=nwb_old.electrodes['x'][aux], y=nwb_old.electrodes['y'][aux], z=nwb_old.electrodes['z'][aux], imp=nwb_old.electrodes['imp'][aux], location=str(nwb_old.electrodes['location'][aux]), filtering=str(nwb_old.electrodes['filtering'][aux]), group=nwb_new.get_electrode_group( nwb_old.electrodes['group'][aux].name), group_name=str(nwb_old.electrodes['group_name'][aux])) # if there are custom variables new_vars = list(nwb_old.electrodes.colnames) default_vars = [ 'x', 'y', 'z', 'imp', 'location', 'filtering', 'group', 'group_name' ] [new_vars.remove(var) for var in default_vars] for var in new_vars: if var == 'label': var_data = [ str(elem) for elem in nwb_old.electrodes[var].data[:] ] else: var_data = np.array(nwb_old.electrodes[var].data[:]) nwb_new.add_electrode_column( name=str(var), description=str(nwb_old.electrodes[var].description), data=var_data) # If Bipolar scheme for electrodes for v in nwb_old.lab_meta_data.values(): if isinstance(v, EcephysExt) and hasattr( v, 'bipolar_scheme_table'): bst_old = v.bipolar_scheme_table bst_new = BipolarSchemeTable( name=bst_old.name, description=bst_old.description) ecephys_ext = EcephysExt(name=v.name) ecephys_ext.bipolar_scheme_table = bst_new nwb_new.add_lab_meta_data(ecephys_ext) # Epochs ---------------------------------------------------------- if 'epochs' in cp_objs and nwb_old.epochs is not None: nEpochs = len(nwb_old.epochs['start_time'].data[:]) for i in np.arange(nEpochs): nwb_new.add_epoch( start_time=nwb_old.epochs['start_time'].data[i], stop_time=nwb_old.epochs['stop_time'].data[i]) # if there are custom variables new_vars = list(nwb_old.epochs.colnames) default_vars = ['start_time', 'stop_time', 'tags', 'timeseries'] [new_vars.remove(var) for var in default_vars if var in new_vars] for var in new_vars: nwb_new.add_epoch_column( name=var, description=nwb_old.epochs[var].description, data=nwb_old.epochs[var].data[:]) # Invalid times --------------------------------------------------- if 'invalid_times' in cp_objs and nwb_old.invalid_times is not None: nInvalid = len(nwb_old.invalid_times['start_time'][:]) for aux in np.arange(nInvalid): nwb_new.add_invalid_time_interval( start_time=nwb_old.invalid_times['start_time'][aux], stop_time=nwb_old.invalid_times['stop_time'][aux]) # Trials ---------------------------------------------------------- if 'trials' in cp_objs and nwb_old.trials is not None: nTrials = len(nwb_old.trials['start_time']) for aux in np.arange(nTrials): nwb_new.add_trial(start_time=nwb_old.trials['start_time'][aux], stop_time=nwb_old.trials['stop_time'][aux]) # if there are custom variables new_vars = list(nwb_old.trials.colnames) default_vars = ['start_time', 'stop_time'] [new_vars.remove(var) for var in default_vars] for var in new_vars: nwb_new.add_trial_column( name=var, description=nwb_old.trials[var].description, data=nwb_old.trials[var].data[:]) # Intervals ------------------------------------------------------- if 'intervals' in cp_objs and nwb_old.intervals is not None: all_objs_names = list(nwb_old.intervals.keys()) for obj_name in all_objs_names: obj_old = nwb_old.intervals[obj_name] # create and add TimeIntervals obj = TimeIntervals(name=obj_old.name, description=obj_old.description) nInt = len(obj_old['start_time']) for ind in np.arange(nInt): obj.add_interval(start_time=obj_old['start_time'][ind], stop_time=obj_old['stop_time'][ind]) # Add to file nwb_new.add_time_intervals(obj) # Stimulus -------------------------------------------------------- if 'stimulus' in cp_objs: all_objs_names = list(nwb_old.stimulus.keys()) for obj_name in all_objs_names: obj_old = nwb_old.stimulus[obj_name] obj = TimeSeries(name=obj_old.name, description=obj_old.description, data=obj_old.data[:], rate=obj_old.rate, resolution=obj_old.resolution, conversion=obj_old.conversion, starting_time=obj_old.starting_time, unit=obj_old.unit) nwb_new.add_stimulus(obj) # Processing modules ---------------------------------------------- if 'ecephys' in cp_objs: interfaces = [ nwb_old.processing['ecephys'].data_interfaces[key] for key in cp_objs['ecephys'] ] # Add ecephys module to NWB file ecephys_module = ProcessingModule( name='ecephys', description='Extracellular electrophysiology data.') nwb_new.add_processing_module(ecephys_module) for interface_old in interfaces: obj = copy_obj(interface_old, nwb_old, nwb_new) if obj is not None: ecephys_module.add_data_interface(obj) if 'behavior' in cp_objs: interfaces = [ nwb_old.processing['behavior'].data_interfaces[key] for key in cp_objs['behavior'] ] if 'behavior' not in nwb_new.processing: # Add behavior module to NWB file behavior_module = ProcessingModule( name='behavior', description='behavioral data.') nwb_new.add_processing_module(behavior_module) for interface_old in interfaces: obj = copy_obj(interface_old, nwb_old, nwb_new) if obj is not None: behavior_module.add_data_interface(obj) # Acquisition ----------------------------------------------------- # Can get raw ElecetricalSeries and Mic recording if 'acquisition' in cp_objs: for acq_name in cp_objs['acquisition']: obj_old = nwb_old.acquisition[acq_name] acq = copy_obj(obj_old, nwb_old, nwb_new) nwb_new.add_acquisition(acq) # Surveys --------------------------------------------------------- if 'surveys' in cp_objs and 'behavior' in nwb_old.processing: surveys_list = [ v for v in nwb_old.processing['behavior'].data_interfaces.values() if v.neurodata_type == 'SurveyTable' ] if cp_objs['surveys'] and len(surveys_list) > 0: if 'behavior' not in nwb_new.processing: # Add behavior module to NWB file behavior_module = ProcessingModule( name='behavior', description='behavioral data.') nwb_new.add_processing_module(behavior_module) for obj_old in surveys_list: srv = copy_obj(obj_old, nwb_old, nwb_new) behavior_module.add_data_interface(srv) # Subject --------------------------------------------------------- if nwb_old.subject is not None: if 'subject' in cp_objs: try: cortical_surfaces = CorticalSurfaces() surfaces = nwb_old.subject.cortical_surfaces.surfaces for sfc in list(surfaces.keys()): cortical_surfaces.create_surface( name=surfaces[sfc].name, faces=surfaces[sfc].faces, vertices=surfaces[sfc].vertices) nwb_new.subject = ECoGSubject( cortical_surfaces=cortical_surfaces, subject_id=nwb_old.subject.subject_id, age=nwb_old.subject.age, description=nwb_old.subject.description, genotype=nwb_old.subject.genotype, sex=nwb_old.subject.sex, species=nwb_old.subject.species, weight=nwb_old.subject.weight, date_of_birth=nwb_old.subject.date_of_birth) except: nwb_new.subject = Subject(**nwb_old.subject.fields) # Write new file with copied fields if save_to_file: io2.write(nwb_new, link_data=False) # Close old file and return new nwbfile object if io1: io1.close() return nwb_new
fluorophores_table.add_row( label='dlight', location='VTA', coordinates=(3.0,2.0,1.0) ) fibers_table = FibersTable( description="fibers table" ) # Here we add the metadata tables to the metadata section nwbfile.add_lab_meta_data( FiberPhotometry( fibers=fibers_table, excitation_sources=excitationsources_table, photodetectors=photodetectors_table, fluorophores=fluorophores_table, commanded_voltages=multi_commanded_voltage ) ) # Important: we add the fibers to the fibers table _after_ adding the metadata # This ensures that we can find this data in their tables of origin fibers_table.add_fiber( excitation_source=0, #integers indicated rows of excitation sources table photodetector=0, fluorophores=[0], #potentially multiple fluorophores, so list of indices location='my location', notes='notes' )
name='w_maze', edges=[Edge(name=n[0] + '<->' + n[1], edge_nodes=n) for n in edge_pairs], nodes=segments+points) sleep_box = Environment( name='sleep_box', nodes=[ PolygonNode( name='sleep_box', coords=sleep_box_polygon_coords ) ] ) environments = Environments(name='environments', environments=[w_maze, sleep_box]) session_start_time = datetime.now().astimezone() nwb = NWBFile('session_description', 'identifier', session_start_time) nwb.add_lab_meta_data(environments) with NWBHDF5IO('test_maze.nwb', 'w') as io: io.write(nwb) with NWBHDF5IO('test_maze.nwb', 'r') as io: nwb2 = io.read() assert_array_equal(nwb2.lab_meta_data['environments']['w_maze']. nodes['left_arm'].coords[:], w_maze.nodes['left_arm'].coords[:])