Ejemplo n.º 1
0
def sync_merge_ephys(root_data_folder, dry=False):
    """
    Post spike-sorting processing:
    - synchronization of probes
    - ks2 to ALF conversion for each probes in alf/probeXX folder
    - computes spike sorting QC
    - creates probes object in alf folder
    To start the job for a session, all electrophysiology ap files from session need to be
    associated with a `sync_merge_ephys.flag` file
    Outputs individual probes
    """
    syncflags = list(Path(root_data_folder).rglob('sync_merge_ephys.flag'))
    session_paths = list(set([f.parents[2] for f in syncflags]))
    for session_path in session_paths:
        print(session_path)
        if dry:
            continue
        # first remove the flags
        [f.unlink() for f in syncflags if f.parents[2] == session_path]
        # first sync the probes
        sync_probes.sync(session_path)
        # then convert ks2 to ALF and resync spike sorting data
        spikes.sync_spike_sortings(session_path)
        # outputs the probes object in the ALF folder
        spikes.probes_description(session_path)
        # wrap up by removing flags and creating register_me flag
        flags.write_flag_file(session_path.joinpath('register_me.flag'))
Ejemplo n.º 2
0
def extract_pulses(session_path, overwrite=False):
    # outputs numpy
    syncs, out_files = ephys_fpga.extract_sync(session_path, overwrite=overwrite)
    for out_file in out_files:
        _logger.info(f"extracted pulses for {out_file}")

    status, sync_files = sync_probes.sync(session_path)
    return out_files + sync_files
Ejemplo n.º 3
0
def sync_spike_sorting(ap_file, out_path):
    """
    Synchronizes the spike.times using the previously computed sync files
    :param ap_file: raw binary data file for the probe insertion
    :param out_path: probe output path (usually {session_path}/alf/{probe_label})
    """
    def _sr(ap_file):
        # gets sampling rate from data
        md = spikeglx.read_meta_data(ap_file.with_suffix('.meta'))
        return spikeglx._get_fs_from_meta(md)

    out_files = []
    label = ap_file.parts[
        -1]  # now the bin file is always in a folder bearing the name of probe
    sync_file = ap_file.parent.joinpath(ap_file.name.replace(
        '.ap.', '.sync.')).with_suffix('.npy')
    # try to get probe sync if it doesn't exist
    if not sync_file.exists():
        _, sync_files = sync_probes.sync(get_session_path(ap_file))
        out_files.extend(sync_files)
    # if it still not there, full blown error
    if not sync_file.exists():
        # if there is no sync file it means something went wrong. Outputs the spike sorting
        # in time according the the probe by following ALF convention on the times objects
        error_msg = f'No synchronisation file for {label}: {sync_file}. The spike-' \
                    f'sorting is not synchronized and data not uploaded on Flat-Iron'
        _logger.error(error_msg)
        # remove the alf folder if the sync failed
        shutil.rmtree(out_path)
        return None, 1
    # patch the spikes.times files manually
    st_file = out_path.joinpath('spikes.times.npy')
    spike_samples = np.load(out_path.joinpath('spikes.samples.npy'))
    interp_times = apply_sync(sync_file,
                              spike_samples / _sr(ap_file),
                              forward=True)
    np.save(st_file, interp_times)
    # get the list of output files
    out_files.extend([
        f for f in out_path.glob("*.*")
        if f.name.startswith(('channels.', 'drift', 'clusters.', 'spikes.',
                              'templates.', '_kilosort_', '_phy_spikes_subset',
                              '_ibl_log.info'))
    ])
    # the QC files computed during spike sorting stay within the raw ephys data folder
    out_files.extend(list(ap_file.parent.glob('_iblqc_*AP.*.npy')))
    return out_files, 0
Ejemplo n.º 4
0
 def _run(self):
     """
     Post spike-sorting processing:
     - synchronization of probes
     - ks2 to ALF conversion for each probes in alf/probeXX folder
     - computes spike sorting QC
     - creates probes object in alf folder
     To start the job for a session, all electrophysiology ap files from session need to be
     associated with a `sync_merge_ephys.flag` file
     Outputs individual probes
     """
     # first sync the probes
     status, sync_files = sync_probes.sync(self.session_path)
     # then convert ks2 to ALF and resync spike sorting data
     alf_files = spikes.sync_spike_sortings(self.session_path)
     # outputs the probes object in the ALF folder
     probe_files = spikes.probes_description(self.session_path)
     return sync_files + alf_files + probe_files
Ejemplo n.º 5
0
def job_run_ks2():

    # Look for flag files in /mnt/s0/Data and sort them in order of date they were created
    flag_files = list(Path(root_path).glob('**/sort_me.flag'))
    flag_files.sort(key=os.path.getmtime)

    # Start with the oldest flag
    session_path = flag_files[0].parent
    session = str(PosixPath(*session_path.parts[4:]))
    flag_files[0].unlink()

    # Instantiate one
    one = ONE(cache_rest=None)

    # sync the probes
    status, sync_files = sync_probes.sync(session_path)

    if not status:
        _logger.error(f'{session}: Could not sync probes')
        return
    else:
        _logger.info(f'{session}: Probes successfully synced')

    # run ks2
    task = ephys.SpikeSorting(session_path, one=one)
    status = task.run()

    if status != 0:
        _logger.error(f'{session}: Could not run ks2')
        return
    else:
        _logger.info(f'{session}: ks2 successfully completed')

        # Run the cell qc
        # qc_file = []

        # Register and upload files to FTP Patcher
        outfiles = task.outputs
        ftp_patcher = FTPPatcher(one=one)
        ftp_patcher.create_dataset(path=outfiles,
                                   created_by=one._par.ALYX_LOGIN)