def createAxonaData_for_NWBfile(OpenEphysDataPath, spike_name='first_available', channel_map=None, subfolder='AxonaData', eegChans=None, pixels_per_metre=None, show_output=False, clustering_name=None): # Construct path for AxonaData and get experiment info AxonaDataPath = os.path.join(os.path.dirname(OpenEphysDataPath), subfolder) experiment_info = getExperimentInfo(OpenEphysDataPath) # Get channel_map for this dataset if channel_map is None: if NWBio.check_if_settings_available(OpenEphysDataPath,'/General/channel_map/'): channel_map = NWBio.load_settings(OpenEphysDataPath,'/General/channel_map/') else: raise ValueError('Channel map could not be generated. Enter channels to process.') # Get position data for this recording data_time_edges = NWBio.get_processed_tracking_data_timestamp_edges(OpenEphysDataPath) if NWBio.check_if_processed_position_data_available(OpenEphysDataPath): posdata = NWBio.load_processed_tracking_data(OpenEphysDataPath) else: posdata = None # Create AxonaData separately for each recording area for area in channel_map.keys(): # Load spike data tetrode_nrs = hfunct.get_tetrode_nrs(channel_map[area]['list']) print('Loading spikes for tetrodes nr: ' + ', '.join(map(str, tetrode_nrs))) if spike_name == 'first_available': spike_data = get_first_available_spike_data(OpenEphysDataPath, tetrode_nrs, use_idx_keep=True, use_badChan=True, clustering_name=clustering_name) else: spike_data = NWBio.load_spikes(OpenEphysDataPath, tetrode_nrs=tetrode_nrs, spike_name=spike_name, use_idx_keep=True, use_badChan=True, clustering_name=clustering_name) # Load eeg data if eegChans is None: eegData = None else: eegChansInArea_Bool = [x in channel_map[area]['list'] for x in eegChans] if any(eegChansInArea_Bool): eegChansInArea = [x for (x,y) in zip(eegChans, eegChansInArea_Bool) if y] print('Loading LFP data for channels: ' + ', '.join(map(str, eegChansInArea))) eegData = load_eegData(OpenEphysDataPath, eegChansInArea) else: eegData = None createAxonaData(AxonaDataPath, spike_data, data_time_edges, posdata=posdata, experiment_info=experiment_info, axona_file_name=area, eegData=eegData, pixels_per_metre=pixels_per_metre, show_output=show_output)
def createAxonaData_for_multiple_NWBfiles(OpenEphysDataPaths, AxonaDataPath, spike_name='first_available', channel_map=None, eegChans=None, pixels_per_metre=None, show_output=False, clustering_name=None): # Get experiment info if len(OpenEphysDataPaths) > 1: print('Using experiment_info from first recording only.') experiment_info = getExperimentInfo(OpenEphysDataPaths[0]) # Get channel_map for this dataset if channel_map is None: if len(OpenEphysDataPaths) > 1: print('Using channel_map from first recording only.') if NWBio.check_if_settings_available(OpenEphysDataPaths[0],'/General/channel_map/'): channel_map = NWBio.load_settings(OpenEphysDataPaths[0],'/General/channel_map/') else: raise ValueError('Channel map could not be generated. Enter channels to process.') # Compute start and end times of each segment of the recording data_time_edges = [] for OpenEphysDataPath in OpenEphysDataPaths: data_time_edges.append(NWBio.get_processed_tracking_data_timestamp_edges(OpenEphysDataPath)) recording_edges = [] recording_duration = 0 for dte in data_time_edges: end_of_this_recording = recording_duration + (dte[1] - dte[0]) recording_edges.append([recording_duration, end_of_this_recording]) recording_duration = end_of_this_recording combined_data_time_edges = [recording_edges[0][0], recording_edges[-1][1]] # Get position data for these recordings print('Loading position data.') posdata = [] for OpenEphysDataPath in OpenEphysDataPaths: if NWBio.check_if_processed_position_data_available(OpenEphysDataPath): posdata.append(NWBio.load_processed_tracking_data(OpenEphysDataPath)) else: posdata.append(None) if any([x is None for x in posdata]): posdata = None else: posdata = concatenate_posdata_across_recordings(posdata, data_time_edges, recording_edges) # Create AxonaData separately for each recording area for area in channel_map.keys(): # Load spike data tetrode_nrs = hfunct.get_tetrode_nrs(channel_map[area]['list']) print('Loading spikes for tetrodes nr: ' + ', '.join(map(str, tetrode_nrs))) spike_data = [] for OpenEphysDataPath in OpenEphysDataPaths: if spike_name == 'first_available': spike_data.append(get_first_available_spike_data(OpenEphysDataPath, tetrode_nrs, use_idx_keep=True, use_badChan=True)) else: print([hfunct.time_string(), 'DEBUG: loading spikes of tet ', tetrode_nrs, ' from ', OpenEphysDataPath]) spike_data.append(NWBio.load_spikes(OpenEphysDataPath, tetrode_nrs=tetrode_nrs, spike_name=spike_name, use_idx_keep=True, use_badChan=True, clustering_name=clustering_name)) spike_data = concatenate_spike_data_across_recordings(spike_data, data_time_edges, recording_edges) # Load eeg data if eegChans is None: eegData = None else: eegChansInArea_Bool = [x in channel_map[area]['list'] for x in eegChans] if any(eegChansInArea_Bool): eegChansInArea = [x for (x,y) in zip(eegChans, eegChansInArea_Bool) if y] print('Loading LFP data for channels: ' + ', '.join(map(str, eegChansInArea))) eegData = [] for OpenEphysDataPath in OpenEphysDataPaths: print([hfunct.time_string(), 'DEBUG: loading eegData for ', OpenEphysDataPath]) eegData.append(load_eegData(OpenEphysDataPath, eegChansInArea)) print([hfunct.time_string(), 'DEBUG: concatenating eeg data']) eegData = concatenate_eegData_across_recordings(eegData, data_time_edges, recording_edges) else: eegData = None createAxonaData(AxonaDataPath, spike_data, combined_data_time_edges, posdata=posdata, experiment_info=experiment_info, axona_file_name=area, eegData=eegData, pixels_per_metre=pixels_per_metre, show_output=show_output) with open(os.path.join(AxonaDataPath, 'recording_edges'), 'w') as file: for edges, OpenEphysDataPath in zip(recording_edges, OpenEphysDataPaths): file.write(str(edges) + ' path: ' + OpenEphysDataPath + '\n')