Exemplo n.º 1
0
    def test_glob_ephys(self):
        def dict_equals(d1, d2):
            return all([x in d1 for x in d2]) and all([x in d2 for x in d1])

        ef3b = spikeglx.glob_ephys_files(self.dir3b)
        ef3a = spikeglx.glob_ephys_files(self.dir3a)
        ef3b_ch = spikeglx.glob_ephys_files(self.dir3b, ext='ch')
        # test glob
        self.assertTrue(dict_equals(self.dict3a, ef3a))
        self.assertTrue(dict_equals(self.dict3b, ef3b))
        self.assertTrue(dict_equals(self.dict3b_ch, ef3b_ch))
        # test the version from glob
        self.assertTrue(
            spikeglx.get_neuropixel_version_from_files(ef3a) == '3A')
        self.assertTrue(
            spikeglx.get_neuropixel_version_from_files(ef3b) == '3B')
        # test the version from paths
        self.assertTrue(
            spikeglx.get_neuropixel_version_from_folder(self.dir3a) == '3A')
        self.assertTrue(
            spikeglx.get_neuropixel_version_from_folder(self.dir3b) == '3B')
        self.dir3b.joinpath('imec1',
                            'sync_testing_g0_t0.imec1.ap.bin').unlink()
        self.assertEqual(
            spikeglx.glob_ephys_files(self.dir3b.joinpath('imec1')), [])
Exemplo n.º 2
0
    def _run(self):
        """
        Compress ephys files looking for `compress_ephys.flag` whithin the probes folder
        Original bin file will be removed
        The registration flag created contains targeted file names at the root of the session
        """
        out_files = []
        ephys_files = spikeglx.glob_ephys_files(self.session_path)
        ephys_files += spikeglx.glob_ephys_files(self.session_path, ext="ch")
        ephys_files += spikeglx.glob_ephys_files(self.session_path, ext="meta")

        for ef in ephys_files:
            for typ in ["ap", "lf", "nidq"]:
                bin_file = ef.get(typ)
                if not bin_file:
                    continue
                if bin_file.suffix.find("bin") == 1:
                    sr = spikeglx.Reader(bin_file)
                    if sr.is_mtscomp:
                        out_files.append(bin_file)
                    else:
                        _logger.info(f"Compressing binary file {bin_file}")
                        out_files.append(sr.compress_file(keep_original=False))
                        out_files.append(bin_file.with_suffix('.ch'))
                else:
                    out_files.append(bin_file)

        return out_files
Exemplo n.º 3
0
 def test_get_ephys_files(self):
     # first test at the root directory level, with a string input
     ephys_files = spikeglx.glob_ephys_files(self.dir.name)
     for ef in ephys_files:
         self.assertTrue(ef.label in ['probe_right', 'probe_left'])
         self.assertTrue(ef.ap.exists() and ef.lf.exists())
     # second test at the ephys directory level, with a pathlib.Path input
     ephys_files = spikeglx.glob_ephys_files(
         Path(self.dir.name) / 'raw_ephys_data')
     for ef in ephys_files:
         self.assertTrue(ef.label in ['probe_right', 'probe_left'])
         self.assertTrue(ef.ap.exists() and ef.lf.exists())
Exemplo n.º 4
0
def compress_ephys(root_data_folder, dry=False, max_sessions=5):
    """
    Compress ephys files looking for `compress_ephys.flag` whithin the probes folder
    Original bin file will be removed
    The registration flag created contains targeted file names at the root of the session
    """
    qcflags = Path(root_data_folder).rglob('compress_ephys.flag')
    c = 0
    for qcflag in qcflags:
        probe_path = qcflag.parent
        c += 1
        if c > max_sessions:
            return
        if dry:
            print(qcflag.parent)
            continue
        # no rglob: only the folder in which the flag is located gets searched
        ephys_files = spikeglx.glob_ephys_files(probe_path, recursive=False)
        out_files = []
        for ef in ephys_files:
            for typ in ['ap', 'lf', 'nidq']:
                bin_file = ef.get(typ)
                if not bin_file:
                    continue
                sr = spikeglx.Reader(bin_file)
                if not sr.is_mtscomp:
                    out_files.append(sr.compress_file(keep_original=False))
        qcflag.unlink()
        if out_files:
            session_path = probe_path.parents[1]
            file_list = [str(f.relative_to(session_path)) for f in out_files]
            flags.write_flag_file(probe_path.joinpath('register_me.flag'), file_list=file_list)
Exemplo n.º 5
0
def extract_sync(session_path, save=False, force=False, ephys_files=None):
    """
    Reads ephys binary file (s) and extract sync within the binary file folder
    Assumes ephys data is within a `raw_ephys_data` folder

    :param session_path: '/path/to/subject/yyyy-mm-dd/001'
    :param save: Bool, defaults to False
    :param force: Bool on re-extraction, forces overwrite instead of loading existing sync files
    :return: list of sync dictionaries
    """
    session_path = Path(session_path)
    if not ephys_files:
        ephys_files = glob_ephys_files(session_path)
    syncs = []
    for efi in ephys_files:
        glob_filter = f'*{efi.label}*' if efi.label else '*'
        bin_file = efi.get('ap', efi.get('nidq', None))
        if not bin_file:
            continue
        file_exists = alf.io.exists(bin_file.parent, object='_spikeglx_sync', glob=glob_filter)
        if not force and file_exists:
            _logger.warning(f'Skipping raw sync: SGLX sync found for probe {efi.label} !')
            sync = alf.io.load_object(bin_file.parent, object='_spikeglx_sync', glob=glob_filter)
        else:
            sr = ibllib.io.spikeglx.Reader(bin_file)
            sync = _sync_to_alf(sr, bin_file.parent, save=save, parts=efi.label)
        syncs.extend([sync])
    return syncs
def _check_session_sync(ses_path, channel):
    """
    Resync the original cam pulses
    :param ses_path:
    :return:
    """
    efiles = spikeglx.glob_ephys_files(ses_path, bin_exists=False)
    tprobe = []
    tinterp = []
    for ef in efiles:
        if not ef.get('ap'):
            continue
        sync_events = alf.io.load_object(ef.ap.parent, 'sync', short_keys=True)
        # the first step is to construct list arrays with probe sync
        sync_file = ef.ap.parent.joinpath(ef.ap.name.replace(
            '.ap.', '.sync.')).with_suffix('.npy')
        t = sync_events.times[sync_events.channels == channel]
        tsync = sync_probes.apply_sync(sync_file, t, forward=True)
        tprobe.append(t)
        tinterp.append(tsync)
        # the second step is to make sure sample / time_ref files match time / time_ref files
        ts_file = ef.ap.parent.joinpath(
            ef.ap.name.replace('.ap.', '.timestamps.')).with_suffix('.npy')
        fs = spikeglx._get_fs_from_meta(
            spikeglx.read_meta_data(ef.ap.with_suffix('.meta')))
        tstamp = sync_probes.apply_sync(ts_file, t * fs, forward=True)
        assert (np.all(tstamp - tsync < 1e-12))
    return tinterp[0] - tinterp[1]
Exemplo n.º 7
0
    def _run(self, overwrite=False):
        """
        Multiple steps. For each probe:
        - Runs ks2 (skips if it already ran)
        - synchronize the spike sorting
        - output the probe description files
        :param overwrite:
        :return: list of files to be registered on database
        """
        efiles = spikeglx.glob_ephys_files(self.session_path)
        ap_files = [(ef.get('ap'), ef.get('label')) for ef in efiles
                    if 'ap' in ef.keys()]
        out_files = []
        for ap_file, label in ap_files:
            try:
                ks2_dir = self._run_ks2(
                    ap_file)  # runs ks2, skips if it already ran
                probe_out_path = self.session_path.joinpath('alf', label)
                probe_out_path.mkdir(parents=True, exist_ok=True)
                spikes.ks2_to_alf(ks2_dir,
                                  bin_path=ap_file.parent,
                                  out_path=probe_out_path,
                                  bin_file=ap_file,
                                  ampfactor=self._sample2v(ap_file))
                out, _ = spikes.sync_spike_sorting(ap_file=ap_file,
                                                   out_path=probe_out_path)
                out_files.extend(out)
            except BaseException as err:
                _logger.error(err)
                self.status = -1
                continue

        probe_files = spikes.probes_description(self.session_path,
                                                one=self.one)
        return out_files + probe_files
Exemplo n.º 8
0
    def phy2alf_conversion(session_path, ks2_path, alf_path, probe_label):
        try:
            # Find spikeglx meta data files associated with the session and probe
            files = spikeglx.glob_ephys_files(session_path, ext='meta')
            ap_files = [(ef.get("ap"), ef.get("label")) for ef in files
                        if "ap" in ef.keys()]
            meta_file = next(ap[0] for ap in ap_files if ap[1] == probe_label)

            # The .cbin file doesn't always still exist on server so point to it from meta
            ap_file = meta_file.with_suffix('.cbin')

            # Convert to alf format
            spikes.ks2_to_alf(
                ks2_path,
                bin_path=meta_file.parent,
                out_path=alf_path,
                bin_file=None,
                ampfactor=SpikeSorting_KS2_Matlab._sample2v(ap_file))

            # Sync the probes
            out_files, _ = spikes.sync_spike_sorting(ap_file=ap_file,
                                                     out_path=alf_path)

            return 0, out_files, None

        except BaseException as err:
            _logger.error(
                f'{session_path} and {probe_label} errored with message: {err}'
            )

            return -1, None, err
Exemplo n.º 9
0
def sorting_sync_and_alf(session_path, overwrite=False):
    """
    Multiple steps. For each probe:
    - Runs ks2 (skips if it already ran)
    - synchronize the spike sorting
    - output the probe description files
    :param overwrite:
    :return: list of files to be registered on database
    """
    efiles = spikeglx.glob_ephys_files(session_path)
    ap_files = [(ef.get("ap"), ef.get("label")) for ef in efiles
                if "ap" in ef.keys()]
    out_files = []
    for ap_file, label in ap_files:
        ks2_dir = session_path.joinpath("spike_sorters", "ks2_matlab", label)
        probe_out_path = session_path.joinpath("alf", label)
        probe_out_path.mkdir(parents=True, exist_ok=True)
        spikes.ks2_to_alf(
            ks2_dir,
            bin_path=ap_file.parent,
            out_path=probe_out_path,
            bin_file=ap_file,
            ampfactor=_sample2v(ap_file),
        )
        out, _ = spikes.sync_spike_sorting(ap_file=ap_file,
                                           out_path=probe_out_path)
        out_files.extend(out)
        # convert ks2_output into tar file and also register
        # Make this in case spike sorting is in old raw_ephys_data folders, for new
        # sessions it should already exist
        tar_dir = session_path.joinpath('spike_sorters', 'ks2_matlab', label)
        tar_dir.mkdir(parents=True, exist_ok=True)
        out = spikes.ks2_to_tar(ks2_dir, tar_dir)
        out_files.extend(out)
def extract_sync(session_path, overwrite=False, ephys_files=None):
    """
    Reads ephys binary file (s) and extract sync within the binary file folder
    Assumes ephys data is within a `raw_ephys_data` folder

    :param session_path: '/path/to/subject/yyyy-mm-dd/001'
    :param overwrite: Bool on re-extraction, forces overwrite instead of loading existing files
    :return: list of sync dictionaries
    """
    session_path = Path(session_path)
    if not ephys_files:
        ephys_files = spikeglx.glob_ephys_files(session_path)
    syncs = []
    outputs = []
    for efi in ephys_files:
        bin_file = efi.get('ap', efi.get('nidq', None))
        if not bin_file:
            continue
        alfname = dict(object='sync', namespace='spikeglx')
        if efi.label:
            alfname['extra'] = efi.label
        file_exists = alfio.exists(bin_file.parent, **alfname)
        if not overwrite and file_exists:
            _logger.warning(f'Skipping raw sync: SGLX sync found for probe {efi.label} !')
            sync = alfio.load_object(bin_file.parent, **alfname)
            out_files, _ = alfio._ls(bin_file.parent, **alfname)
        else:
            sr = spikeglx.Reader(bin_file)
            sync, out_files = _sync_to_alf(sr, bin_file.parent, save=True, parts=efi.label)
        outputs.extend(out_files)
        syncs.extend([sync])

    return syncs, outputs
Exemplo n.º 11
0
def raw_qc_session(session_path, dry=False, force=False):
    """
    Wrapper that exectutes QC from a session folder and outputs the results whithin the same folder
    as the original raw data.
    :param session_path: path of the session (Subject/yyyy-mm-dd/number
    :param dry: bool (False) Dry run if True
    :param force: bool (False) Force means overwriting an existing QC file
    :return: None
    """
    efiles = spikeglx.glob_ephys_files(session_path)
    for efile in efiles:
        if dry:
            print(efile.ap)
            print(efile.lf)
            continue
        if efile.ap and efile.ap.exists():
            extract_rmsmap(efile.ap,
                           out_folder=None,
                           force=force,
                           label=efile.label)
        if efile.lf and efile.lf.exists():
            extract_rmsmap(efile.lf,
                           out_folder=None,
                           force=force,
                           label=efile.label)
Exemplo n.º 12
0
def sync_spike_sortings(ses_path):
    """
    Converts the KS2 outputs for each probe in ALF format. Creates:
    alf/probeXX/spikes.*
    alf/probeXX/clusters.*
    alf/probeXX/templates.*
    :param ses_path: session containing probes to be merged
    :return: None
    """
    def _sr(ap_file):
        # gets sampling rate from data
        md = spikeglx.read_meta_data(ap_file.with_suffix('.meta'))
        return spikeglx._get_fs_from_meta(md)

    def _sample2v(ap_file):
        md = spikeglx.read_meta_data(ap_file.with_suffix('.meta'))
        s2v = spikeglx._conversion_sample2v_from_meta(md)
        return s2v['ap'][0]

    ses_path = Path(ses_path)
    ephys_files = spikeglx.glob_ephys_files(ses_path)
    subdirs, labels, efiles_sorted, srates = zip(
        *sorted([(ep.ap.parent, ep.label, ep, _sr(ep.ap)) for ep in ephys_files if ep.get('ap')]))

    _logger.info('converting  spike-sorting outputs to ALF')
    out_files = []
    for subdir, label, ef, sr in zip(subdirs, labels, efiles_sorted, srates):
        if not subdir.joinpath('spike_times.npy').exists():
            _logger.warning(f"No KS2 spike sorting found in {subdir}, skipping probe !")
            continue
        probe_out_path = ses_path.joinpath('alf', label)
        probe_out_path.mkdir(parents=True, exist_ok=True)
        # handles the probes synchronization
        sync_file = ef.ap.parent.joinpath(ef.ap.name.replace('.ap.', '.sync.')
                                          ).with_suffix('.npy')
        if not sync_file.exists():
            """
            if there is no sync file it means something went wrong. Outputs the spike sorting
            in time according the the probe by following ALF convention on the times objects
            """
            error_msg = f'No synchronisation file for {label}: {sync_file}. The spike-' \
                        f'sorting is not synchronized and data not uploaded on Flat-Iron'
            _logger.error(error_msg)
            # remove the alf folder if the sync failed
            shutil.rmtree(probe_out_path)
            continue
        # converts the folder to ALF
        ks2_to_alf(subdir, probe_out_path, ampfactor=_sample2v(ef.ap), label=None, force=True)
        # patch the spikes.times files manually
        st_file = ses_path.joinpath(probe_out_path, 'spikes.times.npy')
        spike_samples = np.load(ses_path.joinpath(probe_out_path, 'spikes.samples.npy'))
        interp_times = apply_sync(sync_file, spike_samples / sr, forward=True)
        np.save(st_file, interp_times)
        # get the list of output files
        out_files.extend([f for f in ses_path.joinpath(probe_out_path).glob("*.*") if
                          f.name.startswith(('channels.', 'clusters.', 'spikes.', 'templates.',
                                             '_kilosort_', '_phy_spikes_subset'))])
    return out_files
def _get_all_probes_sync(session_path, bin_exists=True):
    # round-up of all bin ephys files in the session, infer revision and get sync map
    ephys_files = spikeglx.glob_ephys_files(session_path, bin_exists=bin_exists)
    version = spikeglx.get_neuropixel_version_from_files(ephys_files)
    # attach the sync information to each binary file found
    for ef in ephys_files:
        ef['sync'] = alfio.load_object(ef.path, 'sync', namespace='spikeglx', short_keys=True)
        ef['sync_map'] = get_ibl_sync_map(ef, version)
    return ephys_files
Exemplo n.º 14
0
 def test_compress_session(self):
     EphysMtscomp(self.main_folder).run()
     ephys_files = spikeglx.glob_ephys_files(self.main_folder)
     for ef in ephys_files:
         # there is only one compressed file afterwards
         self.assertTrue(ef.ap.suffix == '.cbin')
         self.assertFalse(ef.ap.with_suffix('.bin').exists())
         # the compressed file is readable
         sr = spikeglx.Reader(ef.ap)
         self.assertTrue(sr.is_mtscomp)
Exemplo n.º 15
0
def probes_description(ses_path, bin_exists=True):
    """
    Aggregate probes information into ALF files
    Input:
        raw_ephys_data/probeXX/
    Output:
        alf/probes.description.npy
        alf/probes.trajecory.npy
    """

    ses_path = Path(ses_path)
    ephys_files = spikeglx.glob_ephys_files(ses_path, bin_exists=bin_exists)
    subdirs, labels, efiles_sorted = zip(
        *sorted([(ep.ap.parent, ep.label, ep) for ep in ephys_files if ep.get('ap')]))

    """Ouputs the probes description file"""
    probe_description = []
    for label, ef in zip(labels, efiles_sorted):
        md = spikeglx.read_meta_data(ef.ap.with_suffix('.meta'))
        probe_description.append({'label': label,
                                  'model': md.neuropixelVersion,
                                  'serial': int(md.serial),
                                  'raw_file_name': md.fileName,
                                  })
    alf_path = ses_path.joinpath('alf')
    alf_path.mkdir(exist_ok=True, parents=True)
    probe_description_file = alf_path.joinpath('probes.description.json')
    with open(probe_description_file, 'w+') as fid:
        fid.write(json.dumps(probe_description))

    """Ouputs the probes trajectory file"""
    bpod_meta = raw_data_loaders.load_settings(ses_path)
    if not bpod_meta.get('PROBE_DATA'):
        _logger.error('No probe information in settings JSON. Skipping probes.trajectory')
        return

    def prb2alf(prb, label):
        return {'label': label, 'x': prb['X'], 'y': prb['Y'], 'z': prb['Z'], 'phi': prb['A'],
                'theta': prb['P'], 'depth': prb['D'], 'beta': prb['T']}

    # the labels may not match, in which case throw a warning and work in alphabetical order
    if labels != ('probe00', 'probe01'):
        _logger.warning("Probe names do not match the json settings files. Will match coordinates"
                        " per alphabetical order !")
        _ = [_logger.warning(f"  probe0{i} ----------  {lab} ") for i, lab in enumerate(labels)]
    trajs = []
    keys = sorted(bpod_meta['PROBE_DATA'].keys())
    for i, k in enumerate(keys):
        if i >= len(labels):
            break
        trajs.append(prb2alf(bpod_meta['PROBE_DATA'][f'probe0{i}'], labels[i]))
    probe_trajectory_file = alf_path.joinpath('probes.trajectory.json')
    with open(probe_trajectory_file, 'w+') as fid:
        fid.write(json.dumps(trajs))
    return [probe_trajectory_file, probe_description_file]
Exemplo n.º 16
0
 def _run(self, overwrite=False):
     efiles = spikeglx.glob_ephys_files(self.session_path)
     apfiles = [ef.get('ap') for ef in efiles if 'ap' in ef.keys()]
     for apfile in apfiles:
         ks2log = apfile.parent.joinpath('spike_sorting_ks2.log')
         if not ks2log.exists():
             # this will label the job with "empty" status in the database
             return None
         with open(ks2log) as fid:
             line = fid.readline()
         self.version = re.compile("[a-f0-9]{36}").findall(line)[0]
         return []  # the job will be labeled as complete with empty string
Exemplo n.º 17
0
def _get_all_probes_sync(session_path):
    # round-up of all bin ephys files in the session, infer revision and get sync map
    ephys_files = glob_ephys_files(session_path)
    version = get_neuropixel_version_from_files(ephys_files)
    extract_sync(session_path, save=True)
    # attach the sync information to each binary file found
    for ef in ephys_files:
        ef['sync'] = alf.io.load_object(ef.path,
                                        '_spikeglx_sync',
                                        short_keys=True)
        ef['sync_map'] = get_ibl_sync_map(ef, version)

    return ephys_files
Exemplo n.º 18
0
def version3B(ses_path, display=True, type=None, tol=2.5):
    """
    From a session path with _spikeglx_sync arrays extraccted, locate ephys files for 3A and
     outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference
     probe is the one with the most synchronisation pulses.
     Assumes the _spikeglx_sync datasets are already extracted from binary data
    :param ses_path:
    :param type: linear, exact or smooth
    :return: None
    """
    DEFAULT_TYPE = 'smooth'
    ephys_files = spikeglx.glob_ephys_files(ses_path, bin_exists=False)
    for ef in ephys_files:
        ef['sync'] = alf.io.load_object(ef.path,
                                        'sync',
                                        namespace='spikeglx',
                                        short_keys=True)
        ef['sync_map'] = get_ibl_sync_map(ef, '3B')
    nidq_file = [ef for ef in ephys_files if ef.get('nidq')]
    ephys_files = [ef for ef in ephys_files if not ef.get('nidq')]
    # should have at least 2 probes and only one nidq
    assert (len(nidq_file) == 1)
    nidq_file = nidq_file[0]
    sync_nidq = _get_sync_fronts(nidq_file.sync,
                                 nidq_file.sync_map['imec_sync'])

    qc_all = True
    out_files = []
    for ef in ephys_files:
        sync_probe = _get_sync_fronts(ef.sync, ef.sync_map['imec_sync'])
        sr = _get_sr(ef)
        try:
            assert (sync_nidq.times.size == sync_probe.times.size)
        except AssertionError:
            raise Neuropixel3BSyncFrontsNonMatching(f"{ses_path}")
        # if the qc of the diff finds anomalies, do not attempt to smooth the interp function
        qcdiff = _check_diff_3b(sync_probe)
        if not qcdiff:
            qc_all = False
            type_probe = type or 'exact'
        else:
            type_probe = type or DEFAULT_TYPE
        timestamps, qc = sync_probe_front_times(sync_probe.times,
                                                sync_nidq.times,
                                                sr,
                                                display=display,
                                                type=type_probe,
                                                tol=tol)
        qc_all &= qc
        out_files.extend(_save_timestamps_npy(ef, timestamps, sr))
    return qc_all, out_files
Exemplo n.º 19
0
def version3A(ses_path, display=True, linear=False, tol=1.5):
    """
    From a session path with _spikeglx_sync arrays extracted, locate ephys files for 3A and
     outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference
     probe is the one with the most synchronisation pulses.
     Assumes the _spikeglx_sync datasets are already extracted from binary data
    :param ses_path:
    :return: bool True on a a successful sync
    """
    ephys_files = spikeglx.glob_ephys_files(ses_path)
    nprobes = len(ephys_files)
    if nprobes <= 1:
        _logger.warning(f"Skipping single probe session: {ses_path}")
        return True
    d = Bunch({'times': [], 'nsync': np.zeros(nprobes, )})
    for ind, ephys_file in enumerate(ephys_files):
        sync = alf.io.load_object(ephys_file.ap.parent,
                                  '_spikeglx_sync',
                                  short_keys=True)
        sync_map = get_ibl_sync_map(ephys_file, '3A')
        isync = np.in1d(sync['channels'], np.array([sync_map['right_camera']]))
        d.nsync[ind] = len(sync.channels)
        d['times'].append(sync['times'][isync])
    # chop off to the lowest number of sync points
    nsyncs = [t.size for t in d['times']]
    if len(set(nsyncs)) > 1:
        _logger.warning(
            "Probes don't have the same number of synchronizations pulses")
    d['times'] = np.r_[[t[:min(nsyncs)] for t in d['times']]].transpose()

    # the reference probe is the one with the most sync pulses detected
    iref = np.argmax(d.nsync)
    # islave = np.setdiff1d(np.arange(nprobes), iref)
    # get the sampling rate from the reference probe using metadata file
    sr = _get_sr(ephys_files[iref])
    qc_all = True
    # output timestamps files as per ALF convention
    for ind, ephys_file in enumerate(ephys_files):
        if ind == iref:
            timestamps = np.array([[0, 0], [1, 1]])
        else:
            timestamps, qc = sync_probe_front_times(d.times[:, iref],
                                                    d.times[:, ind],
                                                    sr,
                                                    display=display,
                                                    linear=linear,
                                                    tol=tol)
            qc_all &= qc
        _save_timestamps_npy(ephys_file, timestamps)
    return qc_all
Exemplo n.º 20
0
def extract_data(ks_path, ephys_path, out_path):
    efiles = spikeglx.glob_ephys_files(ephys_path)

    for efile in efiles:
        if efile.get('ap') and efile.ap.exists():
            ks2_to_alf(ks_path,
                       ephys_path,
                       out_path,
                       bin_file=efile.ap,
                       ampfactor=_sample2v(efile.ap),
                       label=None,
                       force=True)
            extract_rmsmap(efile.ap, out_folder=out_path, spectra=False)
        if efile.get('lf') and efile.lf.exists():
            extract_rmsmap(efile.lf, out_folder=out_path)
Exemplo n.º 21
0
def raw_qc_session(session_path, overwrite=False):
    """
    Wrapper that exectutes QC from a session folder and outputs the results whithin the same folder
    as the original raw data.
    :param session_path: path of the session (Subject/yyyy-mm-dd/number
    :param overwrite: bool (False) Force means overwriting an existing QC file
    :return: None
    """
    efiles = spikeglx.glob_ephys_files(session_path)
    qc_files = []
    for efile in efiles:
        if efile.get('ap') and efile.ap.exists():
            qc_files.extend(extract_rmsmap(efile.ap, out_folder=None, overwrite=overwrite))
        if efile.get('lf') and efile.lf.exists():
            qc_files.extend(extract_rmsmap(efile.lf, out_folder=None, overwrite=overwrite))
    return qc_files
def check_ephys_file(root_path, hash=False):
    root_path = Path(root_path)
    efiles = spikeglx.glob_ephys_files(root_path)
    for ef in efiles:
        for lab in ['nidq', 'ap', 'lf']:
            if not ef.get(lab, None):
                continue
            try:
                sr = spikeglx.Reader(ef[lab])
                if hash:
                    ok = sr.verify_hash()
                    if not ok:
                        raise ValueError("hashes don't match")
                _logger.info(f"PASS {ef[lab]}")
            except (Exception) as e:
                _logger.error(f"FAILED {ef[lab]} is corrupt !!")
Exemplo n.º 23
0
Arquivo: one.py Projeto: ablot/ibllib
def load_lfp(eid, one=None, dataset_types=None):
    """
    From an eid, hits the Alyx database and downloads the standard set of datasets
    needed for LFP
    :param eid:
    :param dataset_types: additional dataset types to add to the list
    :return: spikeglx.Reader
    """
    if dataset_types is None:
        dataset_types = []
    dtypes = dataset_types + ['ephysData.raw.lf', 'ephysData.raw.meta', 'ephysData.raw.ch']
    one.load(eid, dataset_types=dtypes, download_only=True)
    session_path = one.path_from_eid(eid)

    efiles = [ef for ef in spikeglx.glob_ephys_files(session_path, bin_exists=False)
              if ef.get('lf', None)]
    return [spikeglx.Reader(ef['lf']) for ef in efiles]
Exemplo n.º 24
0
 def _run(self, overwrite=False):
     """
     Multiple steps. For each probe:
     - Runs ks2 (skips if it already ran)
     - synchronize the spike sorting
     - output the probe description files
     :param overwrite:
     :return: list of files to be registered on database
     """
     efiles = spikeglx.glob_ephys_files(self.session_path)
     ap_files = [(ef.get("ap"), ef.get("label")) for ef in efiles
                 if "ap" in ef.keys()]
     out_files = []
     for ap_file, label in ap_files:
         try:
             ks2_dir = self._run_ks2(
                 ap_file)  # runs ks2, skips if it already ran
             probe_out_path = self.session_path.joinpath("alf", label)
             probe_out_path.mkdir(parents=True, exist_ok=True)
             spikes.ks2_to_alf(
                 ks2_dir,
                 bin_path=ap_file.parent,
                 out_path=probe_out_path,
                 bin_file=ap_file,
                 ampfactor=self._sample2v(ap_file),
             )
             out, _ = spikes.sync_spike_sorting(ap_file=ap_file,
                                                out_path=probe_out_path)
             out_files.extend(out)
             # convert ks2_output into tar file and also register
             # Make this in case spike sorting is in old raw_ephys_data folders, for new
             # sessions it should already exist
             tar_dir = self.session_path.joinpath('spike_sorters',
                                                  'ks2_matlab', label)
             tar_dir.mkdir(parents=True, exist_ok=True)
             out = spikes.ks2_to_tar(ks2_dir, tar_dir)
             out_files.extend(out)
         except BaseException:
             _logger.error(traceback.format_exc())
             self.status = -1
             continue
     probe_files = spikes.probes_description(self.session_path,
                                             one=self.one)
     return out_files + probe_files
Exemplo n.º 25
0
 def _run(self):
     """
     Compress ephys files looking for `compress_ephys.flag` whithin the probes folder
     Original bin file will be removed
     The registration flag created contains targeted file names at the root of the session
     """
     ephys_files = spikeglx.glob_ephys_files(self.session_path)
     out_files = []
     for ef in ephys_files:
         for typ in ['ap', 'lf', 'nidq']:
             bin_file = ef.get(typ)
             if not bin_file:
                 continue
             sr = spikeglx.Reader(bin_file)
             if sr.is_mtscomp:
                 out_files.append(bin_file)
             else:
                 _logger.info(f"Compressing binary file {bin_file}")
                 out_files.append(sr.compress_file(keep_original=False))
     return out_files
Exemplo n.º 26
0
def version3B(ses_path, display=True, linear=False, tol=2.5):
    """
    From a session path with _spikeglx_sync arrays extraccted, locate ephys files for 3A and
     outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference
     probe is the one with the most synchronisation pulses.
     Assumes the _spikeglx_sync datasets are already extracted from binary data
    :param ses_path:
    :return: None
    """
    ephys_files = spikeglx.glob_ephys_files(ses_path)
    for ef in ephys_files:
        ef['sync'] = alf.io.load_object(ef.path,
                                        '_spikeglx_sync',
                                        short_keys=True)
        ef['sync_map'] = get_ibl_sync_map(ef, '3B')
    nidq_file = [ef for ef in ephys_files if ef.get('nidq')]
    ephys_files = [ef for ef in ephys_files if not ef.get('nidq')]
    nprobes = len(ephys_files)
    # should have at least 2 probes and only one nidq
    if nprobes <= 1:
        return True
    assert (len(nidq_file) == 1)
    nidq_file = nidq_file[0]
    sync_nidq = _get_sync_fronts(nidq_file.sync,
                                 nidq_file.sync_map['imec_sync'])

    qc_all = True
    for ef in ephys_files:
        sync_probe = _get_sync_fronts(ef.sync, ef.sync_map['imec_sync'])
        sr = _get_sr(ef)
        assert (sync_nidq.times.size == sync_probe.times.size)
        timestamps, qc = sync_probe_front_times(sync_probe.times,
                                                sync_nidq.times,
                                                sr,
                                                display=display,
                                                linear=linear,
                                                tol=tol)
        qc_all &= qc
        _save_timestamps_npy(ef, timestamps)
    return qc_all
Exemplo n.º 27
0
def _get_task_sync(session_path):
    """
    From 3A or 3B multiprobe session, returns the main probe (3A) or nidq sync pulses
    with the attached channel map (default chmap if none)
    :param session_path:
    :return:
    """
    def _get_probe_version_from_files(ephys_files):
        if any([ef.get('nidq') for ef in ephys_files]):
            return '3B'
        else:
            return '3A'

    # round-up of all bin ephys files in the session, infer revision and get sync map
    ephys_files = glob_ephys_files(session_path)
    version = _get_probe_version_from_files(ephys_files)

    sync_chmap = CHMAPS[version]
    extract_sync(session_path, save=True)
    # attach the sync information to each binary file found
    for ef in ephys_files:
        ef['sync'] = alf.io.load_object(ef.path,
                                        '_spikeglx_sync',
                                        short_keys=True)
        ef['sync_map'] = ibllib.io.spikeglx.get_sync_map(ef['path'])

    if version == '3A':
        # the sync master is the probe with the most sync pulses
        sync_box_ind = np.argmax([ef.sync.times.size for ef in ephys_files])
    elif version == '3B':
        # the sync master is the nidq breakout box
        sync_box_ind = np.argmax(
            [1 if ef.get('nidq') else 0 for ef in ephys_files])

    sync = ephys_files[sync_box_ind].sync
    return sync, sync_chmap
Exemplo n.º 28
0
def version3A(ses_path, display=True, type='smooth', tol=2.1):
    """
    From a session path with _spikeglx_sync arrays extracted, locate ephys files for 3A and
     outputs one sync.timestamps.probeN.npy file per acquired probe. By convention the reference
     probe is the one with the most synchronisation pulses.
     Assumes the _spikeglx_sync datasets are already extracted from binary data
    :param ses_path:
    :param type: linear, exact or smooth
    :return: bool True on a a successful sync
    """
    ephys_files = spikeglx.glob_ephys_files(ses_path, bin_exists=False)
    nprobes = len(ephys_files)
    if nprobes == 1:
        timestamps = np.array([[0., 0.], [1., 1.]])
        sr = _get_sr(ephys_files[0])
        out_files = _save_timestamps_npy(ephys_files[0], timestamps, sr)
        return True, out_files

    def get_sync_fronts(auxiliary_name):
        d = Bunch({'times': [], 'nsync': np.zeros(nprobes, )})
        # auxiliary_name: frame2ttl or right_camera
        for ind, ephys_file in enumerate(ephys_files):
            sync = alf.io.load_object(ephys_file.ap.parent,
                                      '_spikeglx_sync',
                                      short_keys=True)
            sync_map = get_ibl_sync_map(ephys_file, '3A')
            # exits if sync label not found for current probe
            if auxiliary_name not in sync_map:
                return
            isync = np.in1d(sync['channels'],
                            np.array([sync_map[auxiliary_name]]))
            # only returns syncs if we get fronts for all probes
            if np.all(~isync):
                return
            d.nsync[ind] = len(sync.channels)
            d['times'].append(sync['times'][isync])
        return d

    d = get_sync_fronts('frame2ttl')
    if not d:
        _logger.warning(
            'Ephys sync: frame2ttl not detected on both probes, using camera sync'
        )
        d = get_sync_fronts('right_camera')
        if not min([t[0] for t in d['times']]) > 0.2:
            raise (
                ValueError('Cameras started before ephys, no sync possible'))
    # chop off to the lowest number of sync points
    nsyncs = [t.size for t in d['times']]
    if len(set(nsyncs)) > 1:
        _logger.warning(
            "Probes don't have the same number of synchronizations pulses")
    d['times'] = np.r_[[t[:min(nsyncs)] for t in d['times']]].transpose()

    # the reference probe is the one with the most sync pulses detected
    iref = np.argmax(d.nsync)
    # islave = np.setdiff1d(np.arange(nprobes), iref)
    # get the sampling rate from the reference probe using metadata file
    sr = _get_sr(ephys_files[iref])
    qc_all = True
    # output timestamps files as per ALF convention
    for ind, ephys_file in enumerate(ephys_files):
        if ind == iref:
            timestamps = np.array([[0., 0.], [1., 1.]])
        else:
            timestamps, qc = sync_probe_front_times(d.times[:, ind],
                                                    d.times[:, iref],
                                                    sr,
                                                    display=display,
                                                    type=type,
                                                    tol=tol)
            qc_all &= qc
        out_files = _save_timestamps_npy(ephys_file, timestamps, sr)
    return qc_all, out_files
Exemplo n.º 29
0
 def test_glob_ephys(self):
     def dict_equals(d1, d2):
         return all([l in d1 for l in d2]) and all([l in d2 for l in d1])
     self.assertTrue(dict_equals(self.dict3a, spikeglx.glob_ephys_files(self.dir3a)))
     self.assertTrue(dict_equals(self.dict3b, spikeglx.glob_ephys_files(self.dir3b)))
Exemplo n.º 30
0
one = ONE()

# Get a specific session eID
eid = one.search(subject='ZM_2240', date_range='2020-01-22')[0]

# Define and load dataset types of interest
dtypes = [
    'ephysData.raw.lf', 'ephysData.raw.meta', 'ephysData.raw.ch',
    'ephysData.raw.sync'
]
one.load(eid, dataset_types=dtypes, download_only=True)

# Get the files information
session_path = one.path_from_eid(eid)
efiles = [
    ef for ef in spikeglx.glob_ephys_files(session_path, bin_exists=False)
    if ef.get('lf', None)
]
efile = efiles[0]['lf']

# === Option 2 === You can also input a file locally, e.g.
# efile = ('/datadisk/FlatIron/churchlandlab/Subjects/CSHL045/2020-02-26/001/'
#            'raw_ephys_data/probe01/_spikeglx_ephysData_g0_t0.imec.lf.cbin')

# === Read the files and get the data ===
sr = spikeglx.Reader(efile)

sync_file = sr.file_bin.parent.joinpath(
    sr.file_bin.stem.replace('.lf', '.sync.npy'))
sync = np.load(sync_file)
sample2time = scipy.interpolate.interp1d(sync[:, 0] * sr.fs, sync[:, 1])