def testReadChannelMap(self): for meta_data_file in self.meta_files: md = spikeglx.read_meta_data(meta_data_file) cm = spikeglx._map_channels_from_meta(md) if 'snsShankMap' in md.keys(): self.assertEqual(set(cm.keys()), set(['shank', 'col', 'row', 'flag']))
def testReadChannelGain(self): for meta_data_file in self.meta_files: md = spikeglx.read_meta_data(meta_data_file) cg = spikeglx._gain_channels_from_meta(md) self.assertTrue(np.all(cg['lf'][0:-1] == 250)) self.assertTrue(np.all(cg['ap'][0:-1] == 500)) self.assertTrue(len(cg['ap']) == len(cg['lf']) == int(sum(md.get('snsApLfSy'))))
def testReadChannelGain(self): meta_data_file = self.workdir / 'FC034_g0_t0.imec.lf.meta' md = spikeglx.read_meta_data(meta_data_file) cg = spikeglx._gain_channels(md) self.assertTrue(np.all(cg['lf'][0:-1] == 250)) self.assertTrue(np.all(cg['ap'][0:-1] == 500)) self.assertTrue(len(cg['ap']) == len(cg['lf']) == int(sum(md.get('snsApLfSy'))))
def _check_session_sync(ses_path, channel): """ Resync the original cam pulses :param ses_path: :return: """ efiles = spikeglx.glob_ephys_files(ses_path, bin_exists=False) tprobe = [] tinterp = [] for ef in efiles: if not ef.get('ap'): continue sync_events = alf.io.load_object(ef.ap.parent, 'sync', short_keys=True) # the first step is to construct list arrays with probe sync sync_file = ef.ap.parent.joinpath(ef.ap.name.replace( '.ap.', '.sync.')).with_suffix('.npy') t = sync_events.times[sync_events.channels == channel] tsync = sync_probes.apply_sync(sync_file, t, forward=True) tprobe.append(t) tinterp.append(tsync) # the second step is to make sure sample / time_ref files match time / time_ref files ts_file = ef.ap.parent.joinpath( ef.ap.name.replace('.ap.', '.timestamps.')).with_suffix('.npy') fs = spikeglx._get_fs_from_meta( spikeglx.read_meta_data(ef.ap.with_suffix('.meta'))) tstamp = sync_probes.apply_sync(ts_file, t * fs, forward=True) assert (np.all(tstamp - tsync < 1e-12)) return tinterp[0] - tinterp[1]
def testGetAnalogSyncIndex(self): for meta_data_file in self.meta_files: md = spikeglx.read_meta_data(meta_data_file) if spikeglx._get_type_from_meta(md) in ['ap', 'lf']: self.assertTrue(spikeglx._get_analog_sync_trace_indices_from_meta(md) == []) else: self.assertEqual(spikeglx._get_analog_sync_trace_indices_from_meta(md), [0])
def _sample2v(ap_file): """ Convert raw ephys data to Volts """ md = spikeglx.read_meta_data(ap_file.with_suffix('.meta')) s2v = spikeglx._conversion_sample2v_from_meta(md) return s2v['ap'][0]
def testGetSerialNumber(self): self.meta_files.sort() expected = [ 641251510, 641251510, 641251510, 18005116811, 18005116811, None ] for meta_data_file, res in zip(self.meta_files, expected): md = spikeglx.read_meta_data(meta_data_file) self.assertEqual(md.serial, res)
def testGetRevisionAndType(self): for meta_data_file in self.meta_files: md = spikeglx.read_meta_data(meta_data_file) self.assertTrue(len(md.keys()) >= 37) # test getting revision revision = meta_data_file.name[6:8] self.assertEqual(spikeglx._get_neuropixel_version_from_meta(md)[0:2], revision) # test getting acquisition type type = meta_data_file.name.split('.')[-2] self.assertEqual(spikeglx._get_type_from_meta(md), type)
def probes_description(ses_path, bin_exists=True): """ Aggregate probes information into ALF files Input: raw_ephys_data/probeXX/ Output: alf/probes.description.npy alf/probes.trajecory.npy """ ses_path = Path(ses_path) ephys_files = spikeglx.glob_ephys_files(ses_path, bin_exists=bin_exists) subdirs, labels, efiles_sorted = zip( *sorted([(ep.ap.parent, ep.label, ep) for ep in ephys_files if ep.get('ap')])) """Ouputs the probes description file""" probe_description = [] for label, ef in zip(labels, efiles_sorted): md = spikeglx.read_meta_data(ef.ap.with_suffix('.meta')) probe_description.append({'label': label, 'model': md.neuropixelVersion, 'serial': int(md.serial), 'raw_file_name': md.fileName, }) alf_path = ses_path.joinpath('alf') alf_path.mkdir(exist_ok=True, parents=True) probe_description_file = alf_path.joinpath('probes.description.json') with open(probe_description_file, 'w+') as fid: fid.write(json.dumps(probe_description)) """Ouputs the probes trajectory file""" bpod_meta = raw_data_loaders.load_settings(ses_path) if not bpod_meta.get('PROBE_DATA'): _logger.error('No probe information in settings JSON. Skipping probes.trajectory') return def prb2alf(prb, label): return {'label': label, 'x': prb['X'], 'y': prb['Y'], 'z': prb['Z'], 'phi': prb['A'], 'theta': prb['P'], 'depth': prb['D'], 'beta': prb['T']} # the labels may not match, in which case throw a warning and work in alphabetical order if labels != ('probe00', 'probe01'): _logger.warning("Probe names do not match the json settings files. Will match coordinates" " per alphabetical order !") _ = [_logger.warning(f" probe0{i} ---------- {lab} ") for i, lab in enumerate(labels)] trajs = [] keys = sorted(bpod_meta['PROBE_DATA'].keys()) for i, k in enumerate(keys): if i >= len(labels): break trajs.append(prb2alf(bpod_meta['PROBE_DATA'][f'probe0{i}'], labels[i])) probe_trajectory_file = alf_path.joinpath('probes.trajectory.json') with open(probe_trajectory_file, 'w+') as fid: fid.write(json.dumps(trajs)) return [probe_trajectory_file, probe_description_file]
def testReadChannelGainNIDQ(self): for meta_data_file in self.meta_files: if meta_data_file.name.split('.')[-2] not in ['nidq']: continue md = spikeglx.read_meta_data(meta_data_file) nc = spikeglx._get_nchannels_from_meta(md) cg = spikeglx._conversion_sample2v_from_meta(md) i2v = md.get('niAiRangeMax') / 32768 self.assertTrue(np.all(cg['nidq'][slice(0, int(np.sum(md.acqMnMaXaDw[:3])))] == i2v)) self.assertTrue(np.all(cg['nidq'][slice(int(np.sum(md.acqMnMaXaDw[-1])), None)] == 1.)) self.assertTrue(len(cg['nidq']) == nc)
def testReadChannelGainAPLF(self): for meta_data_file in self.meta_files: if meta_data_file.name.split('.')[-2] not in ['lf', 'ap']: continue md = spikeglx.read_meta_data(meta_data_file) cg = spikeglx._conversion_sample2mv_from_meta(md) i2v = md.get('imAiRangeMax') / 512 self.assertTrue(np.all(cg['lf'][0:-1] == i2v / 250)) self.assertTrue(np.all(cg['ap'][0:-1] == i2v / 500)) # also test consistent dimension with nchannels nc = spikeglx._get_nchannels_from_meta(md) self.assertTrue(len(cg['ap']) == len(cg['lf']) == nc)
def phy_model_from_ks2_path(ks2_path, bin_path, bin_file=None): if not bin_file: bin_file = next(bin_path.rglob('*.ap.*bin'), None) meta_file = next(bin_path.rglob('*.ap.meta'), None) if meta_file and meta_file.exists(): meta = spikeglx.read_meta_data(meta_file) fs = spikeglx._get_fs_from_meta(meta) nch = (spikeglx._get_nchannels_from_meta(meta) - len(spikeglx._get_sync_trace_indices_from_meta(meta))) else: fs = 30000 nch = 384 m = model.TemplateModel(dir_path=ks2_path, dat_path=bin_file, # this assumes the raw data is in the same folder sample_rate=fs, n_channels_dat=nch, n_closest_channels=NCH_WAVEFORMS) m.depths = m.get_depths() return m
def phy_model_from_ks2_path(ks2_path): params_file = ks2_path.joinpath('params.py') if params_file.exists(): m = model.load_model(params_file) else: meta_file = next(ks2_path.rglob('*.ap.meta'), None) if meta_file and meta_file.exists(): meta = spikeglx.read_meta_data(meta_file) fs = spikeglx._get_fs_from_meta(meta) nch = (spikeglx._get_nchannels_from_meta(meta) - len(spikeglx._get_sync_trace_indices_from_meta(meta))) else: fs = 30000 nch = 384 m = model.TemplateModel(dir_path=ks2_path, dat_path=[], sample_rate=fs, n_channels_dat=nch) return m
def sync_probe_folders_3A(ses_path): """ From a session path with _spikeglx_sync arrays extracted, locate ephys files for 3A and outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference probe is the one with the most synchronisation pulses. :param ses_path: :return: None """ ephys_files = ibllib.io.spikeglx.glob_ephys_files(ses_path) nprobes = len(ephys_files) assert (nprobes >= 2) d = Bunch({'times': None, 'nsync': np.zeros(nprobes, )}) for ind, ephys_file in enumerate(ephys_files): sync = alf.io.load_object(ephys_file.ap.parent, '_spikeglx_sync', short_keys=True) sync_map = ibllib.io.spikeglx.get_sync_map(ephys_file.ap.parent) isync = np.in1d(sync['channels'], np.array([sync_map['right_camera'], sync_map['left_camera'], sync_map['body_camera']])) d.nsync[ind] = len(sync.channels) # this is designed to break if the number of fronts per probe are not equal if ind == 0: d['times'] = np.zeros((np.sum(isync), nprobes)) d['times'][:, ind] = sync['times'][isync] # the reference probe is the one with the most sync pulses detected iref = np.argmax(d.nsync) # islave = np.setdiff1d(np.arange(nprobes), iref) # get the sampling rate from the reference probe using metadata file meta = spikeglx.read_meta_data(Path(ephys_files[iref].ap).with_suffix('.meta')) sr = meta['imSampRate'] # output timestamps files as per ALF convention for ind, ephys_file in enumerate(ephys_files): if ind == iref: timestamps = np.array([[0, 0], [sr, 1]]) else: timestamps = sync_probe_front_times(d.times[:, iref], d.times[:, ind], sr) alf.io.save_object_npy(ephys_file.ap.parent, {'timestamps': timestamps}, object='sync', parts=ephys_file.label)
def testReadMetaData(self): meta_data_file = self.workdir / 'FC034_g0_t0.imec.lf.meta' md = spikeglx.read_meta_data(meta_data_file) self.assertTrue(len(md.keys()) == 37)
def _get_sr(ephys_file): meta = spikeglx.read_meta_data(ephys_file.ap.with_suffix('.meta')) return spikeglx._get_fs_from_meta(meta)
def probes_description(ses_path, one=None, bin_exists=True): """ Aggregate probes information into ALF files Register alyx probe insertions and Micro-manipulator trajectories Input: raw_ephys_data/probeXX/ Output: alf/probes.description.npy alf/probes.trajectory.npy """ eid = one.eid_from_path(ses_path) ses_path = Path(ses_path) ephys_files = spikeglx.glob_ephys_files(ses_path, bin_exists=bin_exists) subdirs, labels, efiles_sorted = zip(*sorted([(ep.ap.parent, ep.label, ep) for ep in ephys_files if ep.get('ap')])) # Ouputs the probes description file probe_description = [] alyx_insertions = [] for label, ef in zip(labels, efiles_sorted): md = spikeglx.read_meta_data(ef.ap.with_suffix('.meta')) probe_description.append({ 'label': label, 'model': md.neuropixelVersion, 'serial': int(md.serial), 'raw_file_name': md.fileName, }) # create or update alyx probe insertions alyx_insertion = { 'session': eid, 'model': md.neuropixelVersion, 'serial': md.serial, 'name': label } pi = one.alyx.rest('insertions', 'list', session=eid, name=label) if len(pi) == 0: alyx_insertions.append( one.alyx.rest('insertions', 'create', data=alyx_insertion)) else: alyx_insertions.append( one.alyx.rest('insertions', 'partial_update', data=alyx_insertion, id=pi[0]['id'])) alf_path = ses_path.joinpath('alf') alf_path.mkdir(exist_ok=True, parents=True) probe_description_file = alf_path.joinpath('probes.description.json') with open(probe_description_file, 'w+') as fid: fid.write(json.dumps(probe_description)) # Ouputs the probes trajectory file bpod_meta = raw_data_loaders.load_settings(ses_path) if not bpod_meta.get('PROBE_DATA'): _logger.error( 'No probe information in settings JSON. Skipping probes.trajectory' ) return def prb2alf(prb, label): return { 'label': label, 'x': prb['X'], 'y': prb['Y'], 'z': prb['Z'], 'phi': prb['A'], 'theta': prb['P'], 'depth': prb['D'], 'beta': prb['T'] } def prb2alyx(prb, probe_insertion): return { 'probe_insertion': probe_insertion, 'x': prb['X'], 'y': prb['Y'], 'z': prb['Z'], 'phi': prb['A'], 'theta': prb['P'], 'depth': prb['D'], 'roll': prb['T'], 'provenance': 'Micro-manipulator', 'coordinate_system': 'Needles-Allen' } # the labels may not match, in which case throw a warning and work in alphabetical order if labels != ('probe00', 'probe01'): _logger.warning( "Probe names do not match the json settings files. Will match coordinates" " per alphabetical order !") _ = [ _logger.warning(f" probe0{i} ---------- {lab} ") for i, lab in enumerate(labels) ] trajs = [] keys = sorted(bpod_meta['PROBE_DATA'].keys()) for i, k in enumerate(keys): if i >= len(labels): break pdict = bpod_meta['PROBE_DATA'][f'probe0{i}'] trajs.append(prb2alf(pdict, labels[i])) pid = next((ai['id'] for ai in alyx_insertions if ai['name'] == k), None) if pid: # here we don't update the micro-manipulator coordinates if the trajectory already # exists as it may have been entered manually through admin interface trj = one.alyx.rest('trajectories', 'list', probe_insertion=pid, provenance='Micro-manipulator') if len(trj) == 0: one.alyx.rest('trajectories', 'create', data=prb2alyx(pdict, pid)) probe_trajectory_file = alf_path.joinpath('probes.trajectory.json') with open(probe_trajectory_file, 'w+') as fid: fid.write(json.dumps(trajs)) return [probe_trajectory_file, probe_description_file]
def _sr(ap_file): # gets sampling rate from data md = spikeglx.read_meta_data(ap_file.with_suffix('.meta')) return spikeglx._get_fs_from_meta(md)
def _sample2v(ap_file): md = spikeglx.read_meta_data(ap_file.with_suffix('.meta')) s2v = spikeglx._conversion_sample2v_from_meta(md) return s2v['ap'][0]
def _sr(ap_file): md = spikeglx.read_meta_data(ap_file.with_suffix('.meta')) return spikeglx._get_fs_from_meta(md)
def testReadMetaData(self): for meta_data_file in self.meta_files: md = spikeglx.read_meta_data(meta_data_file) self.assertTrue(len(md.keys()) >= 37)