def extract_nidq_trial_data(session_key, channel): from pipeline.ingest import ephys as ephys_ingest h2o = (lab.WaterRestriction & session_key).fetch1('water_restriction_number') sess_datetime = (Session & session_key).proj( sess_datetime="cast(concat(session_date, ' ', session_time) as datetime)" ).fetch1('sess_datetime') rigpaths = ephys_ingest.get_ephys_paths() for rigpath in rigpaths: session_ephys_dir, dglob = ephys_ingest._get_sess_dir( rigpath, h2o, sess_datetime) if session_ephys_dir is not None: break else: raise FileNotFoundError( 'Error - No session folder found for {}/{}'.format( h2o, sess_datetime)) try: nidq_bin_fp = next(session_ephys_dir.glob('*.nidq.bin')) except StopIteration: raise FileNotFoundError( '*.nidq.bin file not found in {}'.format(session_ephys_dir)) ephys_bitcodes, trial_start_times = ephys_ingest.build_bitcode( session_ephys_dir) behav_trials, behavior_bitcodes = ( TrialNote & { **session_key, 'trial_note_type': 'bitcode' }).fetch('trial', 'trial_note', order_by='trial') chan_list = [channel] breathing_data, sampling_rate = ephys_ingest.read_SGLX_bin( nidq_bin_fp, chan_list) trial_starts_indices = (trial_start_times * sampling_rate).astype(int) # segment to per-trial all_trials_data = [] for idx in range(len(trial_starts_indices)): start_idx = trial_starts_indices[idx] end_idx = trial_starts_indices[ idx + 1] if start_idx < trial_starts_indices[-1] else -1 trial_data = breathing_data[:, start_idx:end_idx].flatten() ephys_bitcode = ephys_bitcodes[idx] matched_trial_idx = np.where(behavior_bitcodes == ephys_bitcode)[0] if len(matched_trial_idx): all_trials_data.append({ **session_key, 'trial': behav_trials[matched_trial_idx[0]], 'data': trial_data, 'timestamps': np.arange(len(trial_data)) / sampling_rate }) return all_trials_data
def make(self, key): from pipeline.ingest import ephys as ephys_ingest # import here to avoid circular imports ephys_file = (ephys_ingest.EphysIngest.EphysFile.proj( insertion_number='probe_insertion_number') & key).fetch1('ephys_file') rigpaths = ephys_ingest.get_ephys_paths() for rigpath in rigpaths: if (rigpath / ephys_file).exists(): session_ephys_dir = rigpath / ephys_file break else: raise FileNotFoundError( 'Error - No ephys data directory found for {}'.format(ephys_file)) ks = ephys_ingest.Kilosort(session_ephys_dir) curated_cluster_notes = ks.extract_curated_cluster_notes() cluster_notes = [] for curation_source, cluster_note in curated_cluster_notes.items(): if curation_source == 'group': continue cluster_notes.extend([{**key, 'note_source': curation_source, 'unit': unit, 'unit_quality': note} for unit, note in zip(cluster_note['cluster_ids'], cluster_note['cluster_notes'])]) self.insert(cluster_notes)
def _update_one_session(key): log.info('\n======================================================') log.info('Waveform update for key: {k}'.format(k=key)) # # Find Ephys Recording # key = (experiment.Session & key).fetch1() sinfo = ((lab.WaterRestriction * lab.Subject.proj() * experiment.Session.proj(..., '-session_time')) & key).fetch1() rigpaths = get_ephys_paths() h2o = sinfo['water_restriction_number'] sess_time = (datetime.min + key['session_time']).time() sess_datetime = datetime.combine(key['session_date'], sess_time) for rigpath in rigpaths: dpath, dglob = _get_sess_dir(rigpath, h2o, sess_datetime) if dpath is not None: break if dpath is not None: log.info('Found session folder: {}'.format(dpath)) else: log.warning('Error - No session folder found for {}/{}. Skipping...'.format(h2o, key['session_date'])) return False try: clustering_files = _match_probe_to_ephys(h2o, dpath, dglob) except FileNotFoundError as e: log.warning(str(e) + '. Skipping...') return False with ephys.Unit.connection.transaction: for probe_no, (f, cluster_method, npx_meta) in clustering_files.items(): try: log.info('------ Start loading clustering results for probe: {} ------'.format(probe_no)) loader = cluster_loader_map[cluster_method] dj.conn().ping() _add_spike_sites_and_depths(loader(sinfo, *f), probe_no, npx_meta, rigpath) except (ProbeInsertionError, ClusterMetricError, FileNotFoundError) as e: dj.conn().cancel_transaction() # either successful fix of all probes, or none at all if isinstance(e, ProbeInsertionError): log.warning('Probe Insertion Error: \n{}. \nSkipping...'.format(str(e))) else: log.warning('Error: {}'.format(str(e))) return False with dj.config(safemode=False): (ephys.UnitCellType & key).delete() return True
def get_session_ephys_data_directory(session_key): from pipeline.ingest import ephys as ephys_ingest h2o = (lab.WaterRestriction & session_key).fetch1('water_restriction_number') sess_datetime = (Session & session_key).proj( sess_datetime="cast(concat(session_date, ' ', session_time) as datetime)" ).fetch1('sess_datetime') rigpaths = ephys_ingest.get_ephys_paths() for rigpath in rigpaths: session_ephys_dir, dglob = ephys_ingest._get_sess_dir( rigpath, h2o, sess_datetime) if session_ephys_dir is not None: break else: raise FileNotFoundError( 'Error - No session folder found for {}/{}'.format( h2o, sess_datetime)) return session_ephys_dir
def make(self, key): # import here to avoid circular imports from pipeline.ingest import ephys as ephys_ingest from pipeline.util import _get_clustering_method ephys_file = (ephys_ingest.EphysIngest.EphysFile.proj( insertion_number='probe_insertion_number') & key).fetch1('ephys_file') rigpaths = ephys_ingest.get_ephys_paths() for rigpath in rigpaths: rigpath = pathlib.Path(rigpath) if (rigpath / ephys_file).exists(): session_ephys_dir = rigpath / ephys_file break else: raise FileNotFoundError( 'Error - No ephys data directory found for {}'.format( ephys_file)) key['clustering_method'] = _get_clustering_method(key) units = (Unit & key).fetch('unit') unit_quality_types = UnitQualityType.fetch('unit_quality') ks = ephys_ingest.Kilosort(session_ephys_dir) curated_cluster_notes = ks.extract_curated_cluster_notes() cluster_notes = [] for curation_source, cluster_note in curated_cluster_notes.items(): if curation_source == 'group': continue cluster_notes.extend([{ **key, 'note_source': curation_source, 'unit': u, 'unit_quality': note } for u, note in zip(cluster_note['cluster_ids'], cluster_note['cluster_notes']) if u in units and note in unit_quality_types ]) self.insert(cluster_notes)