def test_get_ephys_files(self): # first test at the root directory level, with a string input ephys_files = spikeglx.glob_ephys_files(self.dir.name) for ef in ephys_files: self.assertTrue(ef.label in ['probe_right', 'probe_left']) self.assertTrue(ef.ap.exists() and ef.lf.exists()) # second test at the ephys directory level, with a pathlib.Path input ephys_files = spikeglx.glob_ephys_files(Path(self.dir.name) / 'raw_ephys_data') for ef in ephys_files: self.assertTrue(ef.label in ['probe_right', 'probe_left']) self.assertTrue(ef.ap.exists() and ef.lf.exists())
def _get_all_probes_sync(session_path, bin_exists=True): # round-up of all bin ephys files in the session, infer revision and get sync map ephys_files = spikeglx.glob_ephys_files(session_path, bin_exists=bin_exists) version = spikeglx.get_neuropixel_version_from_files(ephys_files) # attach the sync information to each binary file found for ef in ephys_files: ef['sync'] = alfio.load_object(ef.path, 'sync', namespace='spikeglx', short_keys=True) ef['sync_map'] = get_ibl_sync_map(ef, version) return ephys_files
def raw_qc_session(session_path, overwrite=False): """ Wrapper that exectutes QC from a session folder and outputs the results whithin the same folder as the original raw data. :param session_path: path of the session (Subject/yyyy-mm-dd/number :param overwrite: bool (False) Force means overwriting an existing QC file :return: None """ efiles = spikeglx.glob_ephys_files(session_path) qc_files = [] for efile in efiles: if efile.get('ap') and efile.ap.exists(): qc_files.extend(extract_rmsmap(efile.ap, out_folder=None, overwrite=overwrite)) if efile.get('lf') and efile.lf.exists(): qc_files.extend(extract_rmsmap(efile.lf, out_folder=None, overwrite=overwrite)) return qc_files
def extract_sync(session_path, overwrite=False, ephys_files=None): """ Reads ephys binary file (s) and extract sync within the binary file folder Assumes ephys data is within a `raw_ephys_data` folder :param session_path: '/path/to/subject/yyyy-mm-dd/001' :param overwrite: Bool on re-extraction, forces overwrite instead of loading existing files :return: list of sync dictionaries """ session_path = Path(session_path) if not ephys_files: ephys_files = spikeglx.glob_ephys_files(session_path) syncs = [] outputs = [] for efi in ephys_files: bin_file = efi.get('ap', efi.get('nidq', None)) if not bin_file: continue alfname = dict(object='sync', namespace='spikeglx') if efi.label: alfname['extra'] = efi.label file_exists = alfio.exists(bin_file.parent, **alfname) if not overwrite and file_exists: _logger.warning( f'Skipping raw sync: SGLX sync found for probe {efi.label}!') sync = alfio.load_object(bin_file.parent, **alfname) out_files, _ = alfio._ls(bin_file.parent, **alfname) else: sr = spikeglx.Reader(bin_file) sync, out_files = _sync_to_alf(sr, bin_file.parent, save=True, parts=efi.label) outputs.extend(out_files) syncs.extend([sync]) return syncs, outputs
def probes_description(ses_path, one=None, bin_exists=True): """ Aggregate probes information into ALF files Register alyx probe insertions and Micro-manipulator trajectories Input: raw_ephys_data/probeXX/ Output: alf/probes.description.npy alf/probes.trajectory.npy """ eid = one.path2eid(ses_path, query_type='remote') ses_path = Path(ses_path) ephys_files = spikeglx.glob_ephys_files(ses_path, ext='meta') subdirs, labels, efiles_sorted = zip(*sorted([(ep.ap.parent, ep.label, ep) for ep in ephys_files if ep.get('ap')])) # Ouputs the probes description file probe_description = [] alyx_insertions = [] for label, ef in zip(labels, efiles_sorted): md = spikeglx.read_meta_data(ef.ap.with_suffix('.meta')) probe_description.append({ 'label': label, 'model': md.neuropixelVersion, 'serial': int(md.serial), 'raw_file_name': md.fileName, }) # create or update alyx probe insertions alyx_insertion = { 'session': eid, 'model': md.neuropixelVersion, 'serial': md.serial, 'name': label } pi = one.alyx.rest('insertions', 'list', session=eid, name=label) if len(pi) == 0: qc_dict = {'qc': 'NOT_SET', 'extended_qc': {}} alyx_insertion.update({'json': qc_dict}) alyx_insertions.append( one.alyx.rest('insertions', 'create', data=alyx_insertion)) else: alyx_insertions.append( one.alyx.rest('insertions', 'partial_update', data=alyx_insertion, id=pi[0]['id'])) alf_path = ses_path.joinpath('alf') alf_path.mkdir(exist_ok=True, parents=True) probe_description_file = alf_path.joinpath('probes.description.json') with open(probe_description_file, 'w+') as fid: fid.write(json.dumps(probe_description)) # Ouputs the probes trajectory file bpod_meta = raw_data_loaders.load_settings(ses_path) if not bpod_meta.get('PROBE_DATA'): _logger.error( 'No probe information in settings JSON. Skipping probes.trajectory' ) return [] def prb2alf(prb, label): return { 'label': label, 'x': prb['X'], 'y': prb['Y'], 'z': prb['Z'], 'phi': prb['A'], 'theta': prb['P'], 'depth': prb['D'], 'beta': prb['T'] } def prb2alyx(prb, probe_insertion): return { 'probe_insertion': probe_insertion, 'x': prb['X'], 'y': prb['Y'], 'z': prb['Z'], 'phi': prb['A'], 'theta': prb['P'], 'depth': prb['D'], 'roll': prb['T'], 'provenance': 'Micro-manipulator', 'coordinate_system': 'Needles-Allen' } # the labels may not match, in which case throw a warning and work in alphabetical order if labels != ('probe00', 'probe01'): _logger.warning( "Probe names do not match the json settings files. Will match coordinates" " per alphabetical order !") _ = [ _logger.warning(f" probe0{i} ---------- {lab} ") for i, lab in enumerate(labels) ] trajs = [] keys = sorted(bpod_meta['PROBE_DATA'].keys()) for i, k in enumerate(keys): if i >= len(labels): break pdict = bpod_meta['PROBE_DATA'][f'probe0{i}'] trajs.append(prb2alf(pdict, labels[i])) pid = next((ai['id'] for ai in alyx_insertions if ai['name'] == k), None) if pid: # here we don't update the micro-manipulator coordinates if the trajectory already # exists as it may have been entered manually through admin interface trj = one.alyx.rest('trajectories', 'list', probe_insertion=pid, provenance='Micro-manipulator') if len(trj) == 0: one.alyx.rest('trajectories', 'create', data=prb2alyx(pdict, pid)) probe_trajectory_file = alf_path.joinpath('probes.trajectory.json') with open(probe_trajectory_file, 'w+') as fid: fid.write(json.dumps(trajs)) return [probe_trajectory_file, probe_description_file]
def version3A(ses_path, display=True, type='smooth', tol=2.1): """ From a session path with _spikeglx_sync arrays extracted, locate ephys files for 3A and outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference probe is the one with the most synchronisation pulses. Assumes the _spikeglx_sync datasets are already extracted from binary data :param ses_path: :param type: linear, exact or smooth :return: bool True on a a successful sync """ ephys_files = spikeglx.glob_ephys_files(ses_path, ext='meta', bin_exists=False) nprobes = len(ephys_files) if nprobes == 1: timestamps = np.array([[0., 0.], [1., 1.]]) sr = _get_sr(ephys_files[0]) out_files = _save_timestamps_npy(ephys_files[0], timestamps, sr) return True, out_files def get_sync_fronts(auxiliary_name): d = Bunch({'times': [], 'nsync': np.zeros(nprobes, )}) # auxiliary_name: frame2ttl or right_camera for ind, ephys_file in enumerate(ephys_files): sync = alfio.load_object(ephys_file.ap.parent, 'sync', namespace='spikeglx', short_keys=True) sync_map = get_ibl_sync_map(ephys_file, '3A') # exits if sync label not found for current probe if auxiliary_name not in sync_map: return isync = np.in1d(sync['channels'], np.array([sync_map[auxiliary_name]])) # only returns syncs if we get fronts for all probes if np.all(~isync): return d.nsync[ind] = len(sync.channels) d['times'].append(sync['times'][isync]) return d d = get_sync_fronts('frame2ttl') if not d: _logger.warning( 'Ephys sync: frame2ttl not detected on both probes, using camera sync' ) d = get_sync_fronts('right_camera') if not min([t[0] for t in d['times']]) > 0.2: raise ( ValueError('Cameras started before ephys, no sync possible')) # chop off to the lowest number of sync points nsyncs = [t.size for t in d['times']] if len(set(nsyncs)) > 1: _logger.warning( "Probes don't have the same number of synchronizations pulses") d['times'] = np.r_[[t[:min(nsyncs)] for t in d['times']]].transpose() # the reference probe is the one with the most sync pulses detected iref = np.argmax(d.nsync) # islave = np.setdiff1d(np.arange(nprobes), iref) # get the sampling rate from the reference probe using metadata file sr = _get_sr(ephys_files[iref]) qc_all = True # output timestamps files as per ALF convention for ind, ephys_file in enumerate(ephys_files): if ind == iref: timestamps = np.array([[0., 0.], [1., 1.]]) else: timestamps, qc = sync_probe_front_times(d.times[:, ind], d.times[:, iref], sr, display=display, type=type, tol=tol) qc_all &= qc out_files = _save_timestamps_npy(ephys_file, timestamps, sr) return qc_all, out_files
def version3B(ses_path, display=True, type=None, tol=2.5): """ From a session path with _spikeglx_sync arrays extraccted, locate ephys files for 3A and outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference probe is the one with the most synchronisation pulses. Assumes the _spikeglx_sync datasets are already extracted from binary data :param ses_path: :param type: linear, exact or smooth :return: None """ DEFAULT_TYPE = 'smooth' ephys_files = spikeglx.glob_ephys_files(ses_path, ext='meta', bin_exists=False) for ef in ephys_files: ef['sync'] = alfio.load_object(ef.path, 'sync', namespace='spikeglx', short_keys=True) ef['sync_map'] = get_ibl_sync_map(ef, '3B') nidq_file = [ef for ef in ephys_files if ef.get('nidq')] ephys_files = [ef for ef in ephys_files if not ef.get('nidq')] # should have at least 2 probes and only one nidq assert (len(nidq_file) == 1) nidq_file = nidq_file[0] sync_nidq = get_sync_fronts(nidq_file.sync, nidq_file.sync_map['imec_sync']) qc_all = True out_files = [] for ef in ephys_files: sync_probe = get_sync_fronts(ef.sync, ef.sync_map['imec_sync']) sr = _get_sr(ef) try: # we say that the number of pulses should be within 10 % assert (np.isclose(sync_nidq.times.size, sync_probe.times.size, rtol=0.1)) except AssertionError: raise Neuropixel3BSyncFrontsNonMatching(f"{ses_path}") # Find the indexes in case the sizes don't match if sync_nidq.times.size != sync_probe.times.size: _logger.warning( f'Sync mismatch by {np.abs(sync_nidq.times.size - sync_probe.times.size)} ' f'NIDQ sync times: {sync_nidq.times.size}, Probe sync times {sync_probe.times.size}' ) sync_idx = np.min([sync_nidq.times.size, sync_probe.times.size]) # if the qc of the diff finds anomalies, do not attempt to smooth the interp function qcdiff = _check_diff_3b(sync_probe) if not qcdiff: qc_all = False type_probe = type or 'exact' else: type_probe = type or DEFAULT_TYPE timestamps, qc = sync_probe_front_times(sync_probe.times[:sync_idx], sync_nidq.times[:sync_idx], sr, display=display, type=type_probe, tol=tol) qc_all &= qc out_files.extend(_save_timestamps_npy(ef, timestamps, sr)) return qc_all, out_files