def generate_position_data(filename): ''' Read position data from .bin or .pos file and convert to pynwb.behavior.SpatialSeries objects. Parameters: ------- filename (Path or Str): Full filename of Axona file with any extension. Returns: ------- position (pynwb.behavior.Position) ''' position = Position() position_channel_names = 't,x1,y1,x2,y2,numpix1,numpix2,unused'.split(',') position_data = read_bin_file_position_data(filename) position_timestamps = position_data[:, 0] for ichan in range(0, position_data.shape[1]): spatial_series = SpatialSeries( name=position_channel_names[ichan], timestamps=position_timestamps, data=position_data[:, ichan], reference_frame='start of raw aquisition (.bin file)') position.add_spatial_series(spatial_series) return position
def yuta2nwb(session_path='/Users/bendichter/Desktop/Buzsaki/SenzaiBuzsaki2017/YutaMouse41/YutaMouse41-150903', subject_xls=None, include_spike_waveforms=True, stub=True): subject_path, session_id = os.path.split(session_path) fpath_base = os.path.split(subject_path)[0] identifier = session_id mouse_number = session_id[9:11] if '-' in session_id: subject_id, date_text = session_id.split('-') b = False else: subject_id, date_text = session_id.split('b') b = True if subject_xls is None: subject_xls = os.path.join(subject_path, 'YM' + mouse_number + ' exp_sheet.xlsx') else: if not subject_xls[-4:] == 'xlsx': subject_xls = os.path.join(subject_xls, 'YM' + mouse_number + ' exp_sheet.xlsx') session_start_time = dateparse(date_text, yearfirst=True) df = pd.read_excel(subject_xls) subject_data = {} for key in ['genotype', 'DOB', 'implantation', 'Probe', 'Surgery', 'virus injection', 'mouseID']: names = df.iloc[:, 0] if key in names.values: subject_data[key] = df.iloc[np.argmax(names == key), 1] if isinstance(subject_data['DOB'], datetime): age = session_start_time - subject_data['DOB'] else: age = None subject = Subject(subject_id=subject_id, age=str(age), genotype=subject_data['genotype'], species='mouse') nwbfile = NWBFile(session_description='mouse in open exploration and theta maze', identifier=identifier, session_start_time=session_start_time.astimezone(), file_create_date=datetime.now().astimezone(), experimenter='Yuta Senzai', session_id=session_id, institution='NYU', lab='Buzsaki', subject=subject, related_publications='DOI:10.1016/j.neuron.2016.12.011') print('reading and writing raw position data...', end='', flush=True) ns.add_position_data(nwbfile, session_path) shank_channels = ns.get_shank_channels(session_path)[:8] all_shank_channels = np.concatenate(shank_channels) print('setting up electrodes...', end='', flush=True) hilus_csv_path = os.path.join(fpath_base, 'early_session_hilus_chans.csv') lfp_channel = get_reference_elec(subject_xls, hilus_csv_path, session_start_time, session_id, b=b) print(lfp_channel) custom_column = [{'name': 'theta_reference', 'description': 'this electrode was used to calculate LFP canonical bands', 'data': all_shank_channels == lfp_channel}] ns.write_electrode_table(nwbfile, session_path, custom_columns=custom_column, max_shanks=max_shanks) print('reading LFPs...', end='', flush=True) lfp_fs, all_channels_data = ns.read_lfp(session_path, stub=stub) lfp_data = all_channels_data[:, all_shank_channels] print('writing LFPs...', flush=True) # lfp_data[:int(len(lfp_data)/4)] lfp_ts = ns.write_lfp(nwbfile, lfp_data, lfp_fs, name='lfp', description='lfp signal for all shank electrodes') for name, channel in special_electrode_dict.items(): ts = TimeSeries(name=name, description='environmental electrode recorded inline with neural data', data=all_channels_data[channel], rate=lfp_fs, unit='V', conversion=np.nan, resolution=np.nan) nwbfile.add_acquisition(ts) # compute filtered LFP print('filtering LFP...', end='', flush=True) all_lfp_phases = [] for passband in ('theta', 'gamma'): lfp_fft = filter_lfp(lfp_data[:, all_shank_channels == lfp_channel].ravel(), lfp_fs, passband=passband) lfp_phase, _ = hilbert_lfp(lfp_fft) all_lfp_phases.append(lfp_phase[:, np.newaxis]) data = np.dstack(all_lfp_phases) print('done.', flush=True) if include_spike_waveforms: print('writing waveforms...', end='', flush=True) for shankn in np.arange(1, 9, dtype=int): ns.write_spike_waveforms(nwbfile, session_path, shankn, stub=stub) print('done.', flush=True) decomp_series = DecompositionSeries(name='LFPDecompositionSeries', description='Theta and Gamma phase for reference LFP', data=data, rate=lfp_fs, source_timeseries=lfp_ts, metric='phase', unit='radians') decomp_series.add_band(band_name='theta', band_limits=(4, 10)) decomp_series.add_band(band_name='gamma', band_limits=(30, 80)) check_module(nwbfile, 'ecephys', 'contains processed extracellular electrophysiology data').add_data_interface(decomp_series) [nwbfile.add_stimulus(x) for x in ns.get_events(session_path)] # create epochs corresponding to experiments/environments for the mouse sleep_state_fpath = os.path.join(session_path, '{}--StatePeriod.mat'.format(session_id)) exist_pos_data = any(os.path.isfile(os.path.join(session_path, '{}__{}.mat'.format(session_id, task_type['name']))) for task_type in task_types) if exist_pos_data: nwbfile.add_epoch_column('label', 'name of epoch') for task_type in task_types: label = task_type['name'] file = os.path.join(session_path, session_id + '__' + label + '.mat') if os.path.isfile(file): print('loading position for ' + label + '...', end='', flush=True) pos_obj = Position(name=label + '_position') matin = loadmat(file) tt = matin['twhl_norm'][:, 0] exp_times = find_discontinuities(tt) if 'conversion' in task_type: conversion = task_type['conversion'] else: conversion = np.nan for pos_type in ('twhl_norm', 'twhl_linearized'): if pos_type in matin: pos_data_norm = matin[pos_type][:, 1:] spatial_series_object = SpatialSeries( name=label + '_{}_spatial_series'.format(pos_type), data=H5DataIO(pos_data_norm, compression='gzip'), reference_frame='unknown', conversion=conversion, resolution=np.nan, timestamps=H5DataIO(tt, compression='gzip')) pos_obj.add_spatial_series(spatial_series_object) check_module(nwbfile, 'behavior', 'contains processed behavioral data').add_data_interface(pos_obj) for i, window in enumerate(exp_times): nwbfile.add_epoch(start_time=window[0], stop_time=window[1], label=label + '_' + str(i)) print('done.') # there are occasional mismatches between the matlab struct and the neuroscope files # regions: 3: 'CA3', 4: 'DG' df_unit_features = get_UnitFeatureCell_features(fpath_base, session_id, session_path) celltype_names = [] for celltype_id, region_id in zip(df_unit_features['fineCellType'].values, df_unit_features['region'].values): if celltype_id == 1: if region_id == 3: celltype_names.append('pyramidal cell') elif region_id == 4: celltype_names.append('granule cell') else: raise Exception('unknown type') elif not np.isfinite(celltype_id): celltype_names.append('missing') else: celltype_names.append(celltype_dict[celltype_id]) custom_unit_columns = [ { 'name': 'cell_type', 'description': 'name of cell type', 'data': celltype_names}, { 'name': 'global_id', 'description': 'global id for cell for entire experiment', 'data': df_unit_features['unitID'].values}, { 'name': 'max_electrode', 'description': 'electrode that has the maximum amplitude of the waveform', 'data': get_max_electrodes(nwbfile, session_path), 'table': nwbfile.electrodes }] ns.add_units(nwbfile, session_path, custom_unit_columns, max_shanks=max_shanks) trialdata_path = os.path.join(session_path, session_id + '__EightMazeRun.mat') if os.path.isfile(trialdata_path): trials_data = loadmat(trialdata_path)['EightMazeRun'] trialdatainfo_path = os.path.join(fpath_base, 'EightMazeRunInfo.mat') trialdatainfo = [x[0] for x in loadmat(trialdatainfo_path)['EightMazeRunInfo'][0]] features = trialdatainfo[:7] features[:2] = 'start_time', 'stop_time', [nwbfile.add_trial_column(x, 'description') for x in features[4:] + ['condition']] for trial_data in trials_data: if trial_data[3]: cond = 'run_left' else: cond = 'run_right' nwbfile.add_trial(start_time=trial_data[0], stop_time=trial_data[1], condition=cond, error_run=trial_data[4], stim_run=trial_data[5], both_visit=trial_data[6]) """ mono_syn_fpath = os.path.join(session_path, session_id+'-MonoSynConvClick.mat') matin = loadmat(mono_syn_fpath) exc = matin['FinalExcMonoSynID'] inh = matin['FinalInhMonoSynID'] #exc_obj = CatCellInfo(name='excitatory_connections', # indices_values=[], cell_index=exc[:, 0] - 1, indices=exc[:, 1] - 1) #module_cellular.add_container(exc_obj) #inh_obj = CatCellInfo(name='inhibitory_connections', # indices_values=[], cell_index=inh[:, 0] - 1, indices=inh[:, 1] - 1) #module_cellular.add_container(inh_obj) """ if os.path.isfile(sleep_state_fpath): matin = loadmat(sleep_state_fpath)['StatePeriod'] table = TimeIntervals(name='states', description='sleep states of animal') table.add_column(name='label', description='sleep state') data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append({'start_time': row[0], 'stop_time': row[1], 'label': name}) [table.add_row(**row) for row in sorted(data, key=lambda x: x['start_time'])] check_module(nwbfile, 'behavior', 'contains behavioral data').add_data_interface(table) if stub: out_fname = session_path + '_stub.nwb' else: out_fname = session_path + '.nwb' print('writing NWB file...', end='', flush=True) with NWBHDF5IO(out_fname, mode='w') as io: io.write(nwbfile) print('done.') print('testing read...', end='', flush=True) # test read with NWBHDF5IO(out_fname, mode='r') as io: io.read() print('done.')
def convert_data( self, nwbfile: NWBFile, metadata_dict: dict, stub_test: bool = False, include_spike_waveforms: bool = False, ): """Convert the behavioral portion of a particular session of the GrosmarkAD dataset.""" session_path = self.input_args["folder_path"] subject_path, session_id = os.path.split(session_path) # Stimuli [nwbfile.add_stimulus(x) for x in get_events(session_path)] # States sleep_state_fpath = os.path.join(session_path, "{session_id}.SleepState.states.mat") # label renaming specific to Watson state_label_names = dict(WAKEstate="Awake", NREMstate="Non-REM", REMstate="REM") if os.path.isfile(sleep_state_fpath): matin = loadmat(sleep_state_fpath)["SleepState"]["ints"][0][0] table = TimeIntervals(name="states", description="Sleep states of animal.") table.add_column(name="label", description="Sleep state.") data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append( dict( start_time=row[0], stop_time=row[1], label=state_label_names[name], )) [ table.add_row(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] check_module(nwbfile, "behavior", "contains behavioral data").add_data_interface(table) # Position pos_filepath = Path( session_path) / f"{session_id}.position.behavior.mat" pos_mat = loadmat(str(pos_filepath.absolute())) starting_time = float( pos_mat["position"]["timestamps"][0][0] [0]) # confirmed to be a regularly sampled series rate = float( pos_mat["position"]["timestamps"][0][0][1]) - starting_time if pos_mat["position"]["units"][0][0][0] == "m": conversion = 1.0 else: warnings.warn( f"Spatial units ({pos_mat['position']['units'][0][0][0]}) not listed in meters; " "setting conversion to nan.") conversion = np.nan pos_data = [[x[0], y[0]] for x, y in zip( pos_mat["position"]["position"][0][0]["x"][0][0], pos_mat["position"]["position"][0][0]["y"][0][0], )] linearized_data = [[ lin[0] ] for lin in pos_mat["position"]["position"][0][0]["lin"][0][0]] label = pos_mat["position"]["behaviorinfo"][0][0]["MazeType"][0][0][ 0].replace(" ", "") pos_obj = Position(name=f"{label}Position") spatial_series_object = SpatialSeries( name=f"{label}SpatialSeries", description= "(x,y) coordinates tracking subject movement through the maze.", data=H5DataIO(pos_data, compression="gzip"), reference_frame="unknown", conversion=conversion, starting_time=starting_time, rate=rate, resolution=np.nan, ) pos_obj.add_spatial_series(spatial_series_object) check_module( nwbfile, "behavior", "contains processed behavioral data").add_data_interface(pos_obj) lin_pos_obj = Position(name=f"{label}LinearizedPosition") lin_spatial_series_object = SpatialSeries( name=f"{label}LinearizedTimeSeries", description= "Linearized position, defined as starting at the edge of reward area, " "and increasing clockwise, terminating at the opposing edge of the reward area.", data=H5DataIO(linearized_data, compression="gzip"), reference_frame="unknown", conversion=conversion, starting_time=starting_time, rate=rate, resolution=np.nan, ) lin_pos_obj.add_spatial_series(lin_spatial_series_object) check_module(nwbfile, "behavior", "contains processed behavioral data").add_data_interface( lin_pos_obj) # Epochs epoch_names = list(pos_mat["position"]["Epochs"][0][0].dtype.names) epoch_windows = [[float(start), float(stop)] for x in pos_mat["position"]["Epochs"][0][0][0][0] for start, stop in x] nwbfile.add_epoch_column("label", "name of epoch") for j, epoch_name in enumerate(epoch_names): nwbfile.add_epoch( start_time=epoch_windows[j][0], stop_time=epoch_windows[j][1], label=epoch_name, )
def run_conversion(self, nwbfile: NWBFile, metadata_dict: dict, stub_test: bool = False): session_path = Path(self.source_data["folder_path"]) session_id = session_path.stem # Stimuli [nwbfile.add_stimulus(x) for x in get_events(session_path)] # States sleep_state_fpath = session_path / f"{session_id}.SleepState.states.mat" # label renaming specific to Peyrache state_label_names = dict(WAKEstate="Awake", NREMstate="Non-REM", REMstate="REM") if sleep_state_fpath.is_file(): matin = loadmat(sleep_state_fpath)["SleepState"]["ints"][0][0] table = TimeIntervals(name="states", description="Sleep states of animal.") table.add_column(name="label", description="Sleep state.") data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append( dict(start_time=row[0], stop_time=row[1], label=state_label_names[name])) [ table.add_row(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] check_module(nwbfile, "behavior", "Contains behavioral data.").add(table) # Position pos_names = ["RedLED", "BlueLED"] pos_idx_from = [0, 2] pos_idx_to = [2, 4] # Raw position whlfile_path = session_path / f"{session_id}.whl" whl_data = np.loadtxt(whlfile_path) for name, idx_from, idx_to in zip(pos_names, pos_idx_from, pos_idx_to): nwbfile.add_acquisition( peyrache_spatial_series( name=name, description= "Raw sensor data. Values of -1 indicate that LED detection failed.", data=whl_data[:, idx_from:idx_to], conversion=np.nan, # whl file is in arbitrary grid units )) # Processed position posfile_path = session_path / f"{session_id}.pos" if posfile_path.is_file( ): # at least Mouse32-140820 was missing a .pos file try: pos_data = np.loadtxt(posfile_path) pos_obj = Position(name="SubjectPosition") for name, idx_from, idx_to in zip(pos_names, pos_idx_from, pos_idx_to): pos_obj.add_spatial_series( peyrache_spatial_series( name=name, description= ("(x,y) coordinates tracking subject movement through the maze." "Values of -1 indicate that LED detection failed." ), data=pos_data[:, idx_from:idx_to], conversion=1e-2, # from cm to m )) check_module(nwbfile, "behavior", "Contains behavioral data.").add(pos_obj) except ValueError: # data issue present in at least Mouse17-170201 warn(f"Skipping .pos file for session {session_id}!") # Epochs - only available for sessions with raw data epoch_file = session_path / "raw" / f"{session_id}-raw-info" / f"{session_id}-behaviors.txt" if epoch_file.is_file(): epoch_data = pd.read_csv(epoch_file, header=1)[f"{session_id}:"] epoch_dat_inds = [] epoch_names = [] for epochs in epoch_data: inds, name = epochs.split(": ") epoch_dat_inds.append(inds.split(" ")) epoch_names.append(name) epoch_windows = [0] for epoch in epoch_dat_inds: exp_end_times = [] for dat_ind in epoch: recording_file = session_path / "raw" / f"{session_id}{dat_ind}.dat" info_extractor = NeuroscopeRecordingExtractor( recording_file) dat_end_time = info_extractor.get_num_frames( ) / info_extractor.get_sampling_frequency() # seconds exp_end_times.extend([dat_end_time]) epoch_windows.extend([epoch_windows[-1] + sum(exp_end_times)] * 2) epoch_windows = np.array(epoch_windows[:-1]).reshape(-1, 2) for j, epoch_name in enumerate(epoch_names): nwbfile.add_epoch(start_time=epoch_windows[j][0], stop_time=epoch_windows[j][1], tags=[epoch_name])
position.create_spatial_series(name='position1', data=np.linspace(0, 1, 20), rate=50., reference_frame='starting gate') #################### # or you can add pre-existing objects: from pynwb.behavior import SpatialSeries spatial_series = SpatialSeries(name='position2', data=np.linspace(0, 1, 20), rate=50., reference_frame='starting gate') position.add_spatial_series(spatial_series) #################### # or include the object during construction: spatial_series = SpatialSeries(name='position2', data=np.linspace(0, 1, 20), rate=50., reference_frame='starting gate') position = Position(spatial_series=spatial_series) #################### # Each data interface stores its own type of data. We suggest you read the documentation for the # data interface of interest in the :ref:`API documentation <api_docs>` to figure out what data the # data interface allows and/or requires and what methods you will need to call to add this data.
def run_conversion(self, nwbfile: NWBFile, metadata: dict): mat_file_path = self.source_data["mat_file_path"] mat_file = loadmat(mat_file_path) trial_info = mat_file["SessionNP"] nwbfile.add_trial_column( name="reward_time", description="Time when subject began consuming reward.") nwbfile.add_trial_column( name="left_or_right", description="Time when subject began consuming reward.") l_r_dict = {1: "Right", 2: "Left"} for trial in trial_info: nwbfile.add_trial(start_time=trial[0], stop_time=trial[1], reward_time=trial[2], left_or_right=l_r_dict[int(trial[3])]) # Position pos_info = mat_file["whlrl"] pos_data = [pos_info[:, 0:1], pos_info[:, 2:3]] starting_time = 0.0 rate = 20000 / 512 # from CRCNS info conversion = np.nan # whl are arbitrary units pos_obj = Position(name="Position") for j in range(2): spatial_series_object = SpatialSeries( name=f"SpatialSeries{j+1}", description= "(x,y) coordinates tracking subject movement through the maze.", data=H5DataIO(pos_data[j], compression="gzip"), reference_frame="unknown", conversion=conversion, starting_time=starting_time, rate=rate, resolution=np.nan, ) pos_obj.add_spatial_series(spatial_series_object) get_module(nwbfile=nwbfile, name="behavior", description="Contains processed behavioral data." ).add_data_interface(pos_obj) linearized_pos = mat_file["whlrld"][:, 6] lin_pos_obj = Position(name="LinearizedPosition") lin_spatial_series_object = SpatialSeries( name="LinearizedTimeSeries", description= ("Linearized position, with '1' defined as start position (the position at the time of last nose-poking " "in the trial), and d=2 being the end position (position at the tiome just before reward consumption). " "d=0 means subject is not performing working memory trials."), data=H5DataIO(linearized_pos, compression="gzip"), reference_frame="unknown", conversion=conversion, starting_time=starting_time, rate=rate, resolution=np.nan, ) lin_pos_obj.add_spatial_series(lin_spatial_series_object) get_module(nwbfile=nwbfile, name="behavior").add_data_interface(lin_pos_obj)
def convert_data( self, nwbfile: NWBFile, metadata_dict: dict, stub_test: bool = False, include_spike_waveforms: bool = False ): session_path = self.input_args["folder_path"] # TODO: check/enforce format? task_types = metadata_dict.get("task_types", []) subject_path, session_id = os.path.split(session_path) fpath_base = os.path.split(subject_path)[0] [nwbfile.add_stimulus(x) for x in get_events(session_path)] exist_pos_data = any( os.path.isfile(os.path.join(session_path, "{}__{}.mat".format(session_id, task_type["name"]))) for task_type in task_types ) if exist_pos_data: nwbfile.add_epoch_column("label", "name of epoch") for task_type in task_types: label = task_type["name"] file = os.path.join(session_path, session_id + "__" + label + ".mat") if os.path.isfile(file): pos_obj = Position(name=label + "_position") matin = loadmat(file) tt = matin["twhl_norm"][:, 0] exp_times = find_discontinuities(tt) if "conversion" in task_type: conversion = task_type["conversion"] else: conversion = np.nan for pos_type in ("twhl_norm", "twhl_linearized"): if pos_type in matin: pos_data_norm = matin[pos_type][:, 1:] spatial_series_object = SpatialSeries( name=label + "_{}_spatial_series".format(pos_type), data=H5DataIO(pos_data_norm, compression="gzip"), reference_frame="unknown", conversion=conversion, resolution=np.nan, timestamps=H5DataIO(tt, compression="gzip"), ) pos_obj.add_spatial_series(spatial_series_object) check_module(nwbfile, "behavior", "contains processed behavioral data").add_data_interface(pos_obj) for i, window in enumerate(exp_times): nwbfile.add_epoch(start_time=window[0], stop_time=window[1], label=label + "_" + str(i)) trialdata_path = os.path.join(session_path, session_id + "__EightMazeRun.mat") if os.path.isfile(trialdata_path): trials_data = loadmat(trialdata_path)["EightMazeRun"] trialdatainfo_path = os.path.join(fpath_base, "EightMazeRunInfo.mat") trialdatainfo = [x[0] for x in loadmat(trialdatainfo_path)["EightMazeRunInfo"][0]] features = trialdatainfo[:7] features[:2] = ( "start_time", "stop_time", ) [nwbfile.add_trial_column(x, "description") for x in features[4:] + ["condition"]] for trial_data in trials_data: if trial_data[3]: cond = "run_left" else: cond = "run_right" nwbfile.add_trial( start_time=trial_data[0], stop_time=trial_data[1], condition=cond, error_run=trial_data[4], stim_run=trial_data[5], both_visit=trial_data[6], ) sleep_state_fpath = os.path.join(session_path, "{}.SleepState.states.mat".format(session_id)) # label renaming specific to Watson state_label_names = {"WAKEstate": "Awake", "NREMstate": "Non-REM", "REMstate": "REM"} if os.path.isfile(sleep_state_fpath): matin = loadmat(sleep_state_fpath)["SleepState"]["ints"][0][0] table = TimeIntervals(name="states", description="Sleep states of animal.") table.add_column(name="label", description="Sleep state.") data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append({"start_time": row[0], "stop_time": row[1], "label": state_label_names[name]}) [table.add_row(**row) for row in sorted(data, key=lambda x: x["start_time"])] check_module(nwbfile, "behavior", "contains behavioral data").add_data_interface(table)
def run_conversion(self, nwbfile: NWBFile, metadata: dict): session_path = Path(self.source_data["folder_path"]) session_id = session_path.stem # Load the file with behavioral data behavior_file_path = Path(session_path) / f"{session_id}.behavior.mat" behavior_mat = read_matlab_file(str(behavior_file_path))["behavior"] # Add trials events = behavior_mat["events"] trial_interval_list = events["trialIntervals"] data = [] for start_time, stop_time in trial_interval_list: data.append( dict( start_time=float(start_time), stop_time=float(stop_time), )) [ nwbfile.add_trial(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] trial_list = events["trials"] direction_list = [trial.get("direction", "") for trial in trial_list] trial_type_list = [trial.get("type", "") for trial in trial_list] if not all([direction == "" for direction in direction_list]): nwbfile.add_trial_column(name="direction", description="direction of the trial", data=direction_list) if not all([trial_type == "" for trial_type in trial_type_list]): nwbfile.add_trial_column(name="trial_type", description="type of trial", data=trial_type_list) # Position module_name = "behavior" module_description = "Contains behavioral data concerning position." processing_module = get_module(nwbfile=nwbfile, name=module_name, description=module_description) timestamps = np.array(behavior_mat["timestamps"])[..., 0] position = behavior_mat["position"] pos_data = [[x, y, z] for (x, y, z) in zip(position["x"], position["y"], position["y"]) ] pos_data = np.array(pos_data)[..., 0] unit = behavior_mat.get("units", "") if unit == ["m", "meter", "meters"]: conversion = 1.0 else: warnings.warn(f"Spatial units {unit} not listed in meters; " "setting conversion to nan.") conversion = np.nan description = behavior_mat.get("description", "generic_position_tracking").replace( "/", "-") rotation_type = behavior_mat.get("rotationType", "non_specified") pos_obj = Position(name=f"{description}_task".replace(" ", "_")) spatial_series_object = SpatialSeries( name="position", description="(x,y,z) coordinates tracking subject movement.", data=H5DataIO(pos_data, compression="gzip"), reference_frame="unknown", unit=unit, conversion=conversion, timestamps=timestamps, resolution=np.nan, ) pos_obj.add_spatial_series(spatial_series_object) # Add error if available errorPerMarker = behavior_mat.get("errorPerMarker", None) if errorPerMarker: error_data = np.array([error for error in errorPerMarker])[..., 0] spatial_series_object = SpatialSeries( name="error_per_marker", description= "Estimated error for marker tracking from optitrack system.", data=H5DataIO(error_data, compression="gzip"), reference_frame="unknown", conversion=conversion, timestamps=timestamps, resolution=np.nan, ) pos_obj.add_spatial_series(spatial_series_object) processing_module.add_data_interface(pos_obj) # Compass try: orientation = behavior_mat["orientation"] orientation_data = [[ x, y, z, w ] for (x, y, z, w) in zip(orientation["x"], orientation["y"], orientation["z"], orientation["w"])] orientation_data = np.array(orientation_data)[..., 0] compass_obj = CompassDirection(name=f"allocentric_frame_tracking") spatial_series_object = SpatialSeries( name="orientation", description= f"(x, y, z, w) orientation coordinates, orientation type: {rotation_type}", data=H5DataIO(orientation_data, compression="gzip"), reference_frame="unknown", conversion=conversion, timestamps=timestamps, resolution=np.nan, ) compass_obj.add_spatial_series(spatial_series_object) processing_module.add_data_interface(compass_obj) except KeyError: warnings.warn(f"Orientation data not found") # States module_name = "ecephys" module_description = "Contains behavioral data concerning classified states." processing_module = get_module(nwbfile=nwbfile, name=module_name, description=module_description) # Sleep states sleep_file_path = session_path / f"{session_id}.SleepState.states.mat" if Path(sleep_file_path).exists(): mat_file = read_matlab_file(sleep_file_path) state_label_names = dict(WAKEstate="Awake", NREMstate="Non-REM", REMstate="REM", MAstate="MA") sleep_state_dic = mat_file["SleepState"]["ints"] table = TimeIntervals(name="sleep_states", description="Sleep state of the animal.") table.add_column(name="label", description="Sleep state.") data = [] for sleep_state in state_label_names: values = sleep_state_dic[sleep_state] if len(values) != 0 and isinstance(values[0], int): values = [values] for start_time, stop_time in values: data.append( dict( start_time=float(start_time), stop_time=float(stop_time), label=state_label_names[sleep_state], )) [ table.add_row(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] processing_module.add(table) # Add epochs lfp_file_path = session_path / f"{session_path.name}.lfp" raw_file_path = session_path / f"{session_id}.dat" xml_file_path = session_path / f"{session_id}.xml" if raw_file_path.is_file(): recorder = NeuroscopeRecordingExtractor( file_path=raw_file_path, xml_file_path=xml_file_path) else: recorder = NeuroscopeRecordingExtractor( file_path=lfp_file_path, xml_file_path=xml_file_path) num_frames = recorder.get_num_frames() sampling_frequency = recorder.get_sampling_frequency() end_of_the_session = num_frames / sampling_frequency session_start = 0.0 start_trials_time = min( [interval[0] for interval in trial_interval_list]) end_trials_time = max( [interval[1] for interval in trial_interval_list]) end_of_the_session = end_of_the_session nwbfile.add_epoch(start_time=session_start, stop_time=start_trials_time, tags="before trials") nwbfile.add_epoch(start_time=start_trials_time, stop_time=end_trials_time, tags="during trials") nwbfile.add_epoch(start_time=end_trials_time, stop_time=end_of_the_session, tags="after trials")
def run_conversion(self, nwbfile: NWBFile, metadata: dict, stub_test: bool = False): session_path = Path(self.source_data["folder_path"]) task_types = [ dict(name="OpenFieldPosition_ExtraLarge"), dict(name="OpenFieldPosition_New_Curtain", conversion=0.46), dict(name="OpenFieldPosition_New", conversion=0.46), dict(name="OpenFieldPosition_Old_Curtain", conversion=0.46), dict(name="OpenFieldPosition_Old", conversion=0.46), dict(name="OpenFieldPosition_Oldlast", conversion=0.46), dict(name="EightMazePosition", conversion=0.65 / 2), ] subject_path = session_path.parent session_id = session_path.stem [nwbfile.add_stimulus(x) for x in get_events(session_path)] sleep_state_fpath = session_path / f"{session_id}--StatePeriod.mat" exist_pos_data = any([ (session_path / "{session_id}__{task_type['name']}.mat").is_file() for task_type in task_types ]) if exist_pos_data: nwbfile.add_epoch_column("label", "Name of epoch.") # Epoch intervals for task_type in task_types: label = task_type["name"] file = session_path / f"{session_id}__{label}.mat" if file.is_file(): pos_obj = Position(name=f"{label}_position") matin = loadmat(file) tt = matin["twhl_norm"][:, 0] exp_times = find_discontinuities(tt) if "conversion" in task_type: conversion = task_type["conversion"] else: conversion = np.nan for pos_type in ("twhl_norm", "twhl_linearized"): if pos_type in matin: pos_data_norm = matin[pos_type][:, 1:] spatial_series_object = SpatialSeries( name=f"{label}_{pos_type}_spatial_series", data=H5DataIO(pos_data_norm, compression="gzip"), reference_frame="unknown", conversion=conversion, resolution=np.nan, timestamps=H5DataIO(tt, compression="gzip"), ) pos_obj.add_spatial_series(spatial_series_object) check_module( nwbfile, "behavior", "Contains processed behavioral data.").add_data_interface( pos_obj) for i, window in enumerate(exp_times): nwbfile.add_epoch( start_time=window[0], stop_time=window[1], tags=f"{label}_{str(i)}", ) # Trial intervals trialdata_path = session_path / f"{session_id}__EightMazeRun.mat" if trialdata_path.is_file(): trials_data = loadmat(trialdata_path)["EightMazeRun"] trialdatainfo_path = subject_path / "EightMazeRunInfo.mat" trialdatainfo = [ x[0] for x in loadmat(trialdatainfo_path)["EightMazeRunInfo"][0] ] features = trialdatainfo[:7] features[:2] = ( "start_time", "stop_time", ) [ nwbfile.add_trial_column(x, "description") for x in features[4:] + ["condition"] ] for trial_data in trials_data: if trial_data[3]: cond = "run_left" else: cond = "run_right" nwbfile.add_trial( start_time=trial_data[0], stop_time=trial_data[1], condition=cond, error_run=trial_data[4], stim_run=trial_data[5], both_visit=trial_data[6], ) # SLeep states if sleep_state_fpath.is_file(): matin = loadmat(sleep_state_fpath)["StatePeriod"] table = TimeIntervals(name="states", description="sleep states of animal") table.add_column(name="label", description="sleep state") data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append( dict(start_time=row[0], stop_time=row[1], label=name)) [ table.add_row(**row) for row in sorted(data, key=lambda x: x["start_time"]) ] check_module(nwbfile, "behavior", "Contains behavioral data.").add_data_interface(table)
def convert_data(self, nwbfile: NWBFile, metadata_dict: dict, stub_test: bool = False, include_spike_waveforms: bool = False): session_path = self.input_args['folder_path'] # TODO: check/enforce format? task_types = metadata_dict['task_types'] subject_path, session_id = os.path.split(session_path) fpath_base = os.path.split(subject_path)[0] [nwbfile.add_stimulus(x) for x in get_events(session_path)] sleep_state_fpath = os.path.join( session_path, '{}--StatePeriod.mat'.format(session_id)) exist_pos_data = any( os.path.isfile( os.path.join( session_path, '{}__{}.mat'.format(session_id, task_type['name']))) for task_type in task_types) if exist_pos_data: nwbfile.add_epoch_column('label', 'name of epoch') for task_type in task_types: label = task_type['name'] file = os.path.join(session_path, session_id + '__' + label + '.mat') if os.path.isfile(file): pos_obj = Position(name=label + '_position') matin = loadmat(file) tt = matin['twhl_norm'][:, 0] exp_times = find_discontinuities(tt) if 'conversion' in task_type: conversion = task_type['conversion'] else: conversion = np.nan for pos_type in ('twhl_norm', 'twhl_linearized'): if pos_type in matin: pos_data_norm = matin[pos_type][:, 1:] spatial_series_object = SpatialSeries( name=label + '_{}_spatial_series'.format(pos_type), data=H5DataIO(pos_data_norm, compression='gzip'), reference_frame='unknown', conversion=conversion, resolution=np.nan, timestamps=H5DataIO(tt, compression='gzip')) pos_obj.add_spatial_series(spatial_series_object) check_module( nwbfile, 'behavior', 'contains processed behavioral data').add_data_interface( pos_obj) for i, window in enumerate(exp_times): nwbfile.add_epoch(start_time=window[0], stop_time=window[1], label=label + '_' + str(i)) trialdata_path = os.path.join(session_path, session_id + '__EightMazeRun.mat') if os.path.isfile(trialdata_path): trials_data = loadmat(trialdata_path)['EightMazeRun'] trialdatainfo_path = os.path.join(fpath_base, 'EightMazeRunInfo.mat') trialdatainfo = [ x[0] for x in loadmat(trialdatainfo_path)['EightMazeRunInfo'][0] ] features = trialdatainfo[:7] features[:2] = 'start_time', 'stop_time', [ nwbfile.add_trial_column(x, 'description') for x in features[4:] + ['condition'] ] for trial_data in trials_data: if trial_data[3]: cond = 'run_left' else: cond = 'run_right' nwbfile.add_trial(start_time=trial_data[0], stop_time=trial_data[1], condition=cond, error_run=trial_data[4], stim_run=trial_data[5], both_visit=trial_data[6]) if os.path.isfile(sleep_state_fpath): matin = loadmat(sleep_state_fpath)['StatePeriod'] table = TimeIntervals(name='states', description='sleep states of animal') table.add_column(name='label', description='sleep state') data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append({ 'start_time': row[0], 'stop_time': row[1], 'label': name }) [ table.add_row(**row) for row in sorted(data, key=lambda x: x['start_time']) ] check_module(nwbfile, 'behavior', 'contains behavioral data').add_data_interface(table)
def yuta2nwb( session_path='D:/BuzsakiData/SenzaiY/YutaMouse41/YutaMouse41-150903', # '/Users/bendichter/Desktop/Buzsaki/SenzaiBuzsaki2017/YutaMouse41/YutaMouse41-150903', subject_xls=None, include_spike_waveforms=True, stub=True, cache_spec=True): subject_path, session_id = os.path.split(session_path) fpath_base = os.path.split(subject_path)[0] identifier = session_id mouse_number = session_id[9:11] if '-' in session_id: subject_id, date_text = session_id.split('-') b = False else: subject_id, date_text = session_id.split('b') b = True if subject_xls is None: subject_xls = os.path.join(subject_path, 'YM' + mouse_number + ' exp_sheet.xlsx') else: if not subject_xls[-4:] == 'xlsx': subject_xls = os.path.join(subject_xls, 'YM' + mouse_number + ' exp_sheet.xlsx') session_start_time = dateparse(date_text, yearfirst=True) df = pd.read_excel(subject_xls) subject_data = {} for key in [ 'genotype', 'DOB', 'implantation', 'Probe', 'Surgery', 'virus injection', 'mouseID' ]: names = df.iloc[:, 0] if key in names.values: subject_data[key] = df.iloc[np.argmax(names == key), 1] if isinstance(subject_data['DOB'], datetime): age = session_start_time - subject_data['DOB'] else: age = None subject = Subject(subject_id=subject_id, age=str(age), genotype=subject_data['genotype'], species='mouse') nwbfile = NWBFile( session_description='mouse in open exploration and theta maze', identifier=identifier, session_start_time=session_start_time.astimezone(), file_create_date=datetime.now().astimezone(), experimenter='Yuta Senzai', session_id=session_id, institution='NYU', lab='Buzsaki', subject=subject, related_publications='DOI:10.1016/j.neuron.2016.12.011') print('reading and writing raw position data...', end='', flush=True) ns.add_position_data(nwbfile, session_path) shank_channels = ns.get_shank_channels(session_path)[:8] nshanks = len(shank_channels) all_shank_channels = np.concatenate(shank_channels) print('setting up electrodes...', end='', flush=True) hilus_csv_path = os.path.join(fpath_base, 'early_session_hilus_chans.csv') lfp_channel = get_reference_elec(subject_xls, hilus_csv_path, session_start_time, session_id, b=b) custom_column = [{ 'name': 'theta_reference', 'description': 'this electrode was used to calculate LFP canonical bands', 'data': all_shank_channels == lfp_channel }] ns.write_electrode_table(nwbfile, session_path, custom_columns=custom_column, max_shanks=max_shanks) print('reading raw electrode data...', end='', flush=True) if stub: # example recording extractor for fast testing xml_filepath = os.path.join(session_path, session_id + '.xml') xml_root = et.parse(xml_filepath).getroot() acq_sampling_frequency = float( xml_root.find('acquisitionSystem').find('samplingRate').text) num_channels = 4 num_frames = 10000 X = np.random.normal(0, 1, (num_channels, num_frames)) geom = np.random.normal(0, 1, (num_channels, 2)) X = (X * 100).astype(int) sre = se.NumpyRecordingExtractor( timeseries=X, sampling_frequency=acq_sampling_frequency, geom=geom) else: nre = se.NeuroscopeRecordingExtractor('{}/{}.dat'.format( session_path, session_id)) sre = se.SubRecordingExtractor(nre, channel_ids=all_shank_channels) print('writing raw electrode data...', end='', flush=True) se.NwbRecordingExtractor.add_electrical_series(sre, nwbfile) print('done.') print('reading spiking units...', end='', flush=True) if stub: spike_times = [200, 300, 400] num_frames = 10000 allshanks = [] for k in range(nshanks): SX = se.NumpySortingExtractor() for j in range(len(spike_times)): SX.add_unit(unit_id=j + 1, times=np.sort( np.random.uniform(0, num_frames, spike_times[j]))) allshanks.append(SX) se_allshanks = se.MultiSortingExtractor(allshanks) se_allshanks.set_sampling_frequency(acq_sampling_frequency) else: se_allshanks = se.NeuroscopeMultiSortingExtractor(session_path, keep_mua_units=False) electrode_group = [] for shankn in np.arange(1, nshanks + 1, dtype=int): for id in se_allshanks.sortings[shankn - 1].get_unit_ids(): electrode_group.append(nwbfile.electrode_groups['shank' + str(shankn)]) df_unit_features = get_UnitFeatureCell_features(fpath_base, session_id, session_path) celltype_names = [] for celltype_id, region_id in zip(df_unit_features['fineCellType'].values, df_unit_features['region'].values): if celltype_id == 1: if region_id == 3: celltype_names.append('pyramidal cell') elif region_id == 4: celltype_names.append('granule cell') else: raise Exception('unknown type') elif not np.isfinite(celltype_id): celltype_names.append('missing') else: celltype_names.append(celltype_dict[celltype_id]) # Add custom column data into the SortingExtractor so it can be written by the converter # Note there is currently a hidden assumption that the way in which the NeuroscopeSortingExtractor # merges the cluster IDs matches one-to-one with the get_UnitFeatureCell_features extraction property_descriptions = { 'cell_type': 'name of cell type', 'global_id': 'global id for cell for entire experiment', 'shank_id': '0-indexed id of cluster of shank', 'electrode_group': 'the electrode group that each spike unit came from' } property_values = { 'cell_type': celltype_names, 'global_id': df_unit_features['unitID'].values, 'shank_id': [x - 2 for x in df_unit_features['unitIDshank'].values], # - 2 b/c the get_UnitFeatureCell_features removes 0 and 1 IDs from each shank 'electrode_group': electrode_group } for unit_id in se_allshanks.get_unit_ids(): for property_name in property_descriptions.keys(): se_allshanks.set_unit_property( unit_id, property_name, property_values[property_name][unit_id]) se.NwbSortingExtractor.write_sorting( se_allshanks, nwbfile=nwbfile, property_descriptions=property_descriptions) print('done.') # Read and write LFP's print('reading LFPs...', end='', flush=True) lfp_fs, all_channels_lfp_data = ns.read_lfp(session_path, stub=stub) lfp_data = all_channels_lfp_data[:, all_shank_channels] print('writing LFPs...', flush=True) # lfp_data[:int(len(lfp_data)/4)] lfp_ts = ns.write_lfp(nwbfile, lfp_data, lfp_fs, name='lfp', description='lfp signal for all shank electrodes') # Read and add special environmental electrodes for name, channel in special_electrode_dict.items(): ts = TimeSeries( name=name, description= 'environmental electrode recorded inline with neural data', data=all_channels_lfp_data[:, channel], rate=lfp_fs, unit='V', #conversion=np.nan, resolution=np.nan) nwbfile.add_acquisition(ts) # compute filtered LFP print('filtering LFP...', end='', flush=True) all_lfp_phases = [] for passband in ('theta', 'gamma'): lfp_fft = filter_lfp( lfp_data[:, all_shank_channels == lfp_channel].ravel(), lfp_fs, passband=passband) lfp_phase, _ = hilbert_lfp(lfp_fft) all_lfp_phases.append(lfp_phase[:, np.newaxis]) data = np.dstack(all_lfp_phases) print('done.', flush=True) if include_spike_waveforms: print('writing waveforms...', end='', flush=True) nshanks = min((max_shanks, len(ns.get_shank_channels(session_path)))) for shankn in np.arange(nshanks, dtype=int) + 1: # Get spike activty from .spk file on a per-shank and per-sample basis ns.write_spike_waveforms(nwbfile, session_path, shankn, stub=stub) print('done.', flush=True) # Get the LFP Decomposition Series decomp_series = DecompositionSeries( name='LFPDecompositionSeries', description='Theta and Gamma phase for reference LFP', data=data, rate=lfp_fs, source_timeseries=lfp_ts, metric='phase', unit='radians') decomp_series.add_band(band_name='theta', band_limits=(4, 10)) decomp_series.add_band(band_name='gamma', band_limits=(30, 80)) check_module(nwbfile, 'ecephys', 'contains processed extracellular electrophysiology data' ).add_data_interface(decomp_series) [nwbfile.add_stimulus(x) for x in ns.get_events(session_path)] # create epochs corresponding to experiments/environments for the mouse sleep_state_fpath = os.path.join(session_path, '{}--StatePeriod.mat'.format(session_id)) exist_pos_data = any( os.path.isfile( os.path.join(session_path, '{}__{}.mat'.format( session_id, task_type['name']))) for task_type in task_types) if exist_pos_data: nwbfile.add_epoch_column('label', 'name of epoch') for task_type in task_types: label = task_type['name'] file = os.path.join(session_path, session_id + '__' + label + '.mat') if os.path.isfile(file): print('loading position for ' + label + '...', end='', flush=True) pos_obj = Position(name=label + '_position') matin = loadmat(file) tt = matin['twhl_norm'][:, 0] exp_times = find_discontinuities(tt) if 'conversion' in task_type: conversion = task_type['conversion'] else: conversion = np.nan for pos_type in ('twhl_norm', 'twhl_linearized'): if pos_type in matin: pos_data_norm = matin[pos_type][:, 1:] spatial_series_object = SpatialSeries( name=label + '_{}_spatial_series'.format(pos_type), data=H5DataIO(pos_data_norm, compression='gzip'), reference_frame='unknown', conversion=conversion, resolution=np.nan, timestamps=H5DataIO(tt, compression='gzip')) pos_obj.add_spatial_series(spatial_series_object) check_module( nwbfile, 'behavior', 'contains processed behavioral data').add_data_interface( pos_obj) for i, window in enumerate(exp_times): nwbfile.add_epoch(start_time=window[0], stop_time=window[1], label=label + '_' + str(i)) print('done.') # there are occasional mismatches between the matlab struct and the neuroscope files # regions: 3: 'CA3', 4: 'DG' trialdata_path = os.path.join(session_path, session_id + '__EightMazeRun.mat') if os.path.isfile(trialdata_path): trials_data = loadmat(trialdata_path)['EightMazeRun'] trialdatainfo_path = os.path.join(fpath_base, 'EightMazeRunInfo.mat') trialdatainfo = [ x[0] for x in loadmat(trialdatainfo_path)['EightMazeRunInfo'][0] ] features = trialdatainfo[:7] features[:2] = 'start_time', 'stop_time', [ nwbfile.add_trial_column(x, 'description') for x in features[4:] + ['condition'] ] for trial_data in trials_data: if trial_data[3]: cond = 'run_left' else: cond = 'run_right' nwbfile.add_trial(start_time=trial_data[0], stop_time=trial_data[1], condition=cond, error_run=trial_data[4], stim_run=trial_data[5], both_visit=trial_data[6]) """ mono_syn_fpath = os.path.join(session_path, session_id+'-MonoSynConvClick.mat') matin = loadmat(mono_syn_fpath) exc = matin['FinalExcMonoSynID'] inh = matin['FinalInhMonoSynID'] #exc_obj = CatCellInfo(name='excitatory_connections', # indices_values=[], cell_index=exc[:, 0] - 1, indices=exc[:, 1] - 1) #module_cellular.add_container(exc_obj) #inh_obj = CatCellInfo(name='inhibitory_connections', # indices_values=[], cell_index=inh[:, 0] - 1, indices=inh[:, 1] - 1) #module_cellular.add_container(inh_obj) """ if os.path.isfile(sleep_state_fpath): matin = loadmat(sleep_state_fpath)['StatePeriod'] table = TimeIntervals(name='states', description='sleep states of animal') table.add_column(name='label', description='sleep state') data = [] for name in matin.dtype.names: for row in matin[name][0][0]: data.append({ 'start_time': row[0], 'stop_time': row[1], 'label': name }) [ table.add_row(**row) for row in sorted(data, key=lambda x: x['start_time']) ] check_module(nwbfile, 'behavior', 'contains behavioral data').add_data_interface(table) print('writing NWB file...', end='', flush=True) if stub: out_fname = session_path + '_stub.nwb' else: out_fname = session_path + '.nwb' with NWBHDF5IO(out_fname, mode='w') as io: io.write(nwbfile, cache_spec=cache_spec) print('done.') print('testing read...', end='', flush=True) # test read with NWBHDF5IO(out_fname, mode='r') as io: io.read() print('done.')
def run_conversion( self, nwbfile: NWBFile, metadata_dict: dict, stub_test: bool = False, ): session_path = Path(self.source_data["folder_path"]) session_id = session_path.name # Trials take_file_paths = [x for x in session_path.iterdir() if "Take" in x.name] # Some sessions had duplicate/non-corresponding Take files if len(take_file_paths) == 1: take_file_path = take_file_paths[0] take_file = pd.read_csv(take_file_path, header=5) take_file_time_name = [x for x in take_file if "Time" in x][0] # Can be either 'Time' or 'Time (Seconds)' take_frame_to_time = {x: y for x, y in zip(take_file["Frame"], take_file[take_file_time_name])} trial_info = loadmat(str(session_path / f"{session_id}.trials.behavior.mat"))["trials"] trial_start_frames = trial_info["start"][0][0] n_trials = len(trial_start_frames) trial_end_frames = trial_info["end"][0][0] trial_stat = trial_info["stat"][0][0] trial_stat_labels = [x[0][0] for x in trial_info["labels"][0][0]] cooling_info = trial_info["cooling"][0][0] cooling_map = dict({0: "Cooling off", 1: "Pre-Cooling", 2: "Cooling on", 3: "Post-Cooling"}) trial_error = trial_info["error"][0][0] error_trials = np.array([False] * n_trials) error_trials[np.array(trial_error).astype(int) - 1] = True # -1 from Matlab indexing trial_starts = [] trial_ends = [] trial_condition = [] for k in range(n_trials): trial_starts.append(take_frame_to_time[trial_start_frames[k]]) trial_ends.append(take_frame_to_time[trial_end_frames[k]]) nwbfile.add_trial(start_time=trial_starts[k], stop_time=trial_ends[k]) trial_condition.append(trial_stat_labels[int(trial_stat[k]) - 1]) nwbfile.add_trial_column( name="condition", description="Whether the maze condition was left or right.", data=trial_condition, ) nwbfile.add_trial_column( name="error", description="Whether the subject made a mistake.", data=error_trials, ) if "temperature" in trial_info: # Some sessions don't have this for some reason trial_temperature = trial_info["temperature"][0][0] nwbfile.add_trial_column( name="temperature", description="Average brain temperature for the trial.", data=trial_temperature, ) if len(cooling_info) == n_trials: # some sessions had incomplete cooling info trial_cooling = [cooling_map[int(cooling_info[k])] for k in range(n_trials)] nwbfile.add_trial_column( name="cooling state", description="The labeled cooling state of the subject during the trial.", data=trial_cooling, ) # Position animal_file_path = session_path / "animal.mat" if animal_file_path.is_file(): behavioral_processing_module = get_module(nwbfile, "behavior", "Contains processed behavioral data.") animal_mat = loadmat(str(animal_file_path))["animal"] animal_time = animal_mat["time"][0][0][0] animal_time_kwargs = dict() if check_regular_timestamps(animal_time): animal_time_kwargs.update(rate=animal_time[1] - animal_time[0], starting_time=animal_time[0]) else: animal_time_kwargs.update(timestamps=H5DataIO(animal_time, compression="gzip")) # Processed (x,y,z) position pos_obj = Position(name="SubjectPosition") pos_obj.add_spatial_series( SpatialSeries( name="SpatialSeries", description="(x,y,z) coordinates tracking subject movement through the maze.", reference_frame="Unknown", conversion=1e-2, resolution=np.nan, data=H5DataIO(np.array(animal_mat["pos"][0][0]).T, compression="gzip"), **animal_time_kwargs, ) ) behavioral_processing_module.add(pos_obj) # Linearized position if "pos_linearized" in animal_mat: # Some sessions don't have this for some reason lin_pos_obj = Position(name="LinearizedPosition") lin_pos_obj.add_spatial_series( SpatialSeries( name="LinearizedSpatialSeries", description="Linearization of the (x,y,z) coordinates tracking subject movement through maze.", reference_frame="Unknown", conversion=1e-2, resolution=np.nan, data=H5DataIO(animal_mat["pos_linearized"][0][0][0], compression="gzip"), **animal_time_kwargs, ) ) behavioral_processing_module.add(lin_pos_obj) # Speed behavioral_processing_module.add( TimeSeries( name="SubjectSpeed", description="Instantaneous speed of subject through the maze.", unit="cm/s", resolution=np.nan, data=H5DataIO(animal_mat["speed"][0][0][0], compression="gzip"), **animal_time_kwargs, ) ) # Acceleration behavioral_processing_module.add( TimeSeries( name="Acceleration", description="Instantaneous acceleration of subject through the maze.", unit="cm/s^2", resolution=np.nan, data=H5DataIO(animal_mat["acceleration"][0][0][0], compression="gzip"), **animal_time_kwargs, ) ) # Temperature behavioral_processing_module.add( TimeSeries( name="Temperature", description="Internal brain temperature throughout the session.", unit="Celsius", resolution=np.nan, data=H5DataIO(animal_mat["temperature"][0][0][0], compression="gzip"), **animal_time_kwargs, ) )