예제 #1
0
 def patch_dataset(self, file_list, dry=False, ftp=False, **kwargs):
     """
     Creates a new dataset on FlatIron and uploads it from arbitrary location.
     Rules for creation/patching are the same that apply for registration via Alyx
     as this uses the registration endpoint to get the dataset.
     An existing file (same session and path relative to session) will be patched.
     :param path: full file path. Must be whithin an ALF session folder (subject/date/number)
     can also be a list of full file pathes belonging to the same session.
     :param server_repository: Alyx server repository name
     :param created_by: alyx username for the dataset (optional, defaults to root)
     :param ftp: flag for case when using ftppatcher. Don't adjust windows path in
     _patch_dataset when ftp=True
     :return: the registrations response, a list of dataset records
     """
     # first register the file
     if not isinstance(file_list, list):
         file_list = [Path(file_list)]
     assert len(set([get_session_path(f) for f in file_list])) == 1
     assert all([Path(f).exists() for f in file_list])
     response = self.register_dataset(file_list, dry=dry, **kwargs)
     if dry:
         return
     # from the dataset info, set flatIron flag to exists=True
     for p, d in zip(file_list, response):
         self._patch_dataset(p, dset_id=d['id'], dry=dry, ftp=ftp)
     return response
예제 #2
0
def rename_session(session_path: str) -> Path:
    """
    Rename a session.  Prompts the user for the new subject name, data and number then moves
    session path to new session path.
    :param session_path: A session path to rename
    :return: The renamed session path
    """
    session_path = get_session_path(session_path)
    if session_path is None:
        raise ValueError('Session path not valid ALF session folder')
    mouse = session_path.parts[-3]
    date = session_path.parts[-2]
    sess = session_path.parts[-1]
    new_mouse = input(
        f"Please insert subject NAME [current value: {mouse}]> ") or mouse
    new_date = input(
        f"Please insert new session DATE [current value: {date}]> ") or date
    new_sess = input(
        f"Please insert new session NUMBER [current value: {sess}]> ") or sess
    new_session_path = Path(
        *session_path.parts[:-3]) / new_mouse / new_date / new_sess.zfill(3)
    assert is_session_path(new_session_path), 'invalid subject, date or number'

    shutil.move(str(session_path), str(new_session_path))
    print(session_path, "--> renamed to:")
    print(new_session_path)

    return new_session_path
예제 #3
0
def get_task_protocol(session_path):
    try:
        settings = load_settings(get_session_path(session_path))
    except json.decoder.JSONDecodeError:
        _logger.error(f"Can't read settings for {session_path}")
        return
    if settings:
        return settings.get('PYBPOD_PROTOCOL', None)
    else:
        return
예제 #4
0
def sync_spike_sorting(ap_file, out_path):
    """
    Synchronizes the spike.times using the previously computed sync files
    :param ap_file: raw binary data file for the probe insertion
    :param out_path: probe output path (usually {session_path}/alf/{probe_label})
    """
    def _sr(ap_file):
        # gets sampling rate from data
        md = spikeglx.read_meta_data(ap_file.with_suffix('.meta'))
        return spikeglx._get_fs_from_meta(md)

    out_files = []
    label = ap_file.parts[
        -1]  # now the bin file is always in a folder bearing the name of probe
    sync_file = ap_file.parent.joinpath(ap_file.name.replace(
        '.ap.', '.sync.')).with_suffix('.npy')
    # try to get probe sync if it doesn't exist
    if not sync_file.exists():
        _, sync_files = sync_probes.sync(get_session_path(ap_file))
        out_files.extend(sync_files)
    # if it still not there, full blown error
    if not sync_file.exists():
        # if there is no sync file it means something went wrong. Outputs the spike sorting
        # in time according the the probe by following ALF convention on the times objects
        error_msg = f'No synchronisation file for {label}: {sync_file}. The spike-' \
                    f'sorting is not synchronized and data not uploaded on Flat-Iron'
        _logger.error(error_msg)
        # remove the alf folder if the sync failed
        shutil.rmtree(out_path)
        return None, 1
    # patch the spikes.times files manually
    st_file = out_path.joinpath('spikes.times.npy')
    spike_samples = np.load(out_path.joinpath('spikes.samples.npy'))
    interp_times = apply_sync(sync_file,
                              spike_samples / _sr(ap_file),
                              forward=True)
    np.save(st_file, interp_times)
    # get the list of output files
    out_files.extend([
        f for f in out_path.glob("*.*")
        if f.name.startswith(('channels.', 'drift', 'clusters.', 'spikes.',
                              'templates.', '_kilosort_', '_phy_spikes_subset',
                              '_ibl_log.info'))
    ])
    # the QC files computed during spike sorting stay within the raw ephys data folder
    out_files.extend(list(ap_file.parent.glob('_iblqc_*AP.*.npy')))
    return out_files, 0
예제 #5
0
 def patch_datasets(self, file_list, **kwargs):
     """
     Same as create_dataset method but works with several sessions
     """
     register_dict = {}
     # creates a dictionary of sessions with one file list per session
     for f in file_list:
         session_path = get_session_path(f)
         label = '_'.join(session_path.parts[-3:])
         if label in register_dict:
             register_dict[label]['files'].append(f)
         else:
             register_dict[label] = {
                 'session_path': session_path,
                 'files': [f]
             }
     responses = []
     nses = len(register_dict)
     for i, label in enumerate(register_dict):
         _files = register_dict[label]['files']
         _logger.info(
             f"{i}/{nses} {label}, registering {len(_files)} files")
         responses.extend(self.patch_dataset(_files, **kwargs))
     return responses
예제 #6
0
def session_name(path) -> str:
    """Returns the session name (subject/date/number) string for any filepath
    using session_path"""
    return '/'.join(get_session_path(path).parts[-3:])
예제 #7
0
def register_dataset(file_list,
                     one=None,
                     created_by=None,
                     repository=None,
                     server_only=False,
                     versions=None,
                     default=True,
                     dry=False,
                     max_md5_size=None):
    """
    Registers a set of files belonging to a session only on the server
    :param file_list: (list of pathlib.Path or pathlib.Path)
    :param one: optional (one.api.One), current one object, will create an instance if not provided
    :param created_by: (string) name of user in Alyx (defaults to 'root')
    :param repository: optional: (string) name of the repository in Alyx
    :param server_only: optional: (bool) if True only creates on the Flatiron (defaults to False)
    :param versions: optional (list of strings): versions tags (defaults to ibllib version)
    :param default: optional (bool) whether to set as default dataset (defaults to True)
    :param dry: (bool) False by default
    :param max_md5_size: (int) maximum file in bytes to compute md5 sum (always compute if None)
    defaults to None
    :return:
    """
    if created_by is None:
        created_by = one.alyx.user
    if file_list is None or file_list == '' or file_list == []:
        return
    elif not isinstance(file_list, list):
        file_list = [Path(file_list)]

    assert len(set([get_session_path(f) for f in file_list])) == 1
    assert all([Path(f).exists() for f in file_list])
    if versions is None:
        versions = version.ibllib()
    if isinstance(versions, str):
        versions = [versions for _ in file_list]
    assert isinstance(versions, list) and len(versions) == len(file_list)

    # computing the md5 can be very long, so this is an option to skip if the file is bigger
    # than a certain threshold
    if max_md5_size:
        hashes = [
            hashfile.md5(p) if p.stat().st_size < max_md5_size else None
            for p in file_list
        ]
    else:
        hashes = [hashfile.md5(p) for p in file_list]

    session_path = get_session_path(file_list[0])
    # first register the file
    r = {
        'created_by': created_by,
        'path': session_path.relative_to((session_path.parents[2])).as_posix(),
        'filenames':
        [p.relative_to(session_path).as_posix() for p in file_list],
        'name': repository,
        'server_only': server_only,
        'hashes': hashes,
        'filesizes': [p.stat().st_size for p in file_list],
        'versions': versions,
        'default': default
    }
    if not dry:
        if one is None:
            one = ONE(cache_rest=None)
        response = one.alyx.rest('register-file',
                                 'create',
                                 data=r,
                                 no_cache=True)
        for p in file_list:
            _logger.info(f"ALYX REGISTERED DATA: {p}")
        return response