コード例 #1
0
def process_sorting_record(sorting_record, comparison_result_list):
    try:
        params = {
            'recording_uri': sorting_record[RECORDING_URI_KEY],
            'gt_uri': sorting_record[GROUND_TRUTH_URI_KEY],
            'firings_uri': sorting_record[SORTING_FIRINGS_URI_KEY]
        }
        quality_metric_job = hi.Job(compute_quality_metrics_hi, params)
        ground_truth_comparison_job = hi.Job(
            compute_ground_truth_comparison_hi, params)
        comparison = make_comparison_entry(sorting_record, quality_metric_job,
                                           ground_truth_comparison_job)
        comparison_result_list.append(comparison)
    except KeyError:
        print(
            f"One of sorting/recording/gt-sorting keys missing from {json.dumps(sorting_record)}. Skipping..."
        )
コード例 #2
0
def queue_sort(sorter: SorterRecord, recording: RecordingRecord) -> hi.Job:
    if sorter.sorter_name not in KNOWN_SORTERS.keys():
        raise Exception(
            f'Sorter {sorter.sorter_name} was requested but is not recognized.'
        )
    sort_fn = KNOWN_SORTERS[sorter.sorter_name]

    base_recording = sv.LabboxEphysRecordingExtractor(recording.recording_uri,
                                                      download=True)
    params = {'recording_object': base_recording.object()}
    return hi.Job(sort_fn, params)
コード例 #3
0
def test_sorting(sorter_func):
    import sortingview as sv

    recording_name = 'paired_kampff/2014_11_25_Pair_3_0'
    recording_uri = 'sha1://a205f87cef8b7f86df7a09cddbc79a1fbe5df60f/2014_11_25_Pair_3_0.json'
    sorting_uri = 'sha1://c656add63d85a17840980084a1ff1cdc662a2cd5/2014_11_25_Pair_3_0.firings_true.json'

    recording = sv.LabboxEphysRecordingExtractor(recording_uri, download=True)
    sorting_true = sv.LabboxEphysSortingExtractor(sorting_uri)

    channel_ids = recording.get_channel_ids()
    samplerate = recording.get_sampling_frequency()
    num_timepoints = recording.get_num_frames()
    print(f'{recording_name}')
    print(
        f'Recording has {len(channel_ids)} channels and {num_timepoints} timepoints (samplerate: {samplerate})'
    )

    unit_ids = sorting_true.get_unit_ids()
    spike_train = sorting_true.get_unit_spike_train(unit_id=unit_ids[0])
    print(f'Unit {unit_ids[0]} has {len(spike_train)} events')

    jh = hi.ParallelJobHandler(num_workers=4)
    # jh = hi.SlurmJobHandler(num_jobs_per_allocation=4, max_simultaneous_allocations=4, srun_command='')
    log = hi.Log()
    with hi.Config(use_container=True,
                   job_handler=jh,
                   log=log,
                   show_console=True):
        sorting_object = hi.Job(sorter_func, {
            'recording_object': recording.object()
        }).wait().return_value
        sorting = sv.LabboxEphysSortingExtractor(sorting_object)

    unit_ids = sorting.get_unit_ids()
    spike_train = sorting.get_unit_spike_train(unit_id=unit_ids[0])
    print(f'Unit {unit_ids[0]} has {len(spike_train)} events')
コード例 #4
0
def main():
    (params, std_args) = init_configuration()
    study_sets = load_study_records(params.study_source_file)
    study_matrix = parse_sorters(params.sorter_spec_file,
                                 list(study_sets.keys()))
    sorting_matrix = populate_sorting_matrix(study_matrix, study_sets)
    sorting_matrix = remove_preexisting_records(sorting_matrix,
                                                params.workspace_uri)
    hither_config = extract_hither_config(std_args)
    jobs: hi.Job = []

    try:
        with hi.Config(**hither_config):
            sortings = list(sorting_loop(sorting_matrix))
            with hi.Config(job_handler=None, job_cache=None):
                for sorting in sortings:
                    p = {
                        'sorting_entry': sorting,
                        'workspace_uri': params.workspace_uri
                    }
                    jobs.append(hi.Job(hi_post_result_to_workspace, p))
        hi.wait(None)
    finally:
        call_cleanup(hither_config)
コード例 #5
0
def test_sorting(sorter_func,
                 *,
                 show_console=True,
                 job_handler: Union[None, hi.JobHandler] = None):
    import sortingview as sv

    recording_name = 'paired_kampff/2014_11_25_Pair_3_0'
    recording_uri = 'sha1://a205f87cef8b7f86df7a09cddbc79a1fbe5df60f/2014_11_25_Pair_3_0.json'
    sorting_uri = 'sha1://c656add63d85a17840980084a1ff1cdc662a2cd5/2014_11_25_Pair_3_0.firings_true.json'

    recording = sv.LabboxEphysRecordingExtractor(recording_uri, download=True)
    sorting_true = sv.LabboxEphysSortingExtractor(sorting_uri)

    channel_ids = recording.get_channel_ids()
    samplerate = recording.get_sampling_frequency()
    num_timepoints = recording.get_num_frames()
    print(f'{recording_name}')
    print(
        f'Recording has {len(channel_ids)} channels and {num_timepoints} timepoints (samplerate: {samplerate})'
    )

    unit_ids = sorting_true.get_unit_ids()
    spike_train = sorting_true.get_unit_spike_train(unit_id=unit_ids[0])
    print(f'Unit {unit_ids[0]} has {len(spike_train)} events')

    with hi.Config(use_container=True,
                   show_console=show_console,
                   job_handler=job_handler):
        sorting_object = hi.Job(sorter_func, {
            'recording_object': recording.object()
        }).wait().return_value
        sorting = sv.LabboxEphysSortingExtractor(sorting_object)

    unit_ids = sorting.get_unit_ids()
    spike_train = sorting.get_unit_spike_train(unit_id=unit_ids[0])
    print(f'Unit {unit_ids[0]} has {len(spike_train)} events')
コード例 #6
0
def run_sorter_docker(sorter_name,
                      recording,
                      output_folder,
                      delete_output_folder=False,
                      grouping_property=None,
                      parallel=False,
                      verbose=False,
                      raise_error=True,
                      n_jobs=-1,
                      joblib_backend='loky',
                      use_docker=True,
                      container=None,
                      **params):
    if use_docker:
        # if container is None:
        #     assert sorter_name in default_docker_images, f"Default docker image for {sorter_name} not found"
        #     docker_image = default_docker_images[sorter_name]
        #
        # print(f"Running in docker image {docker_image.get_name()}")
        output_folder = Path(output_folder).absolute()
        output_folder.mkdir(exist_ok=True, parents=True)

        # dump recording with relative file paths to docker container /input folder
        dump_dict_container, input_directory = modify_input_folder(
            recording.dump_to_dict(), '/input')

        with hither.Config(use_container=False, show_console=True):
            kwargs = dict(recording_dict=dump_dict_container,
                          sorter_name=sorter_name,
                          output_folder=str(output_folder),
                          delete_output_folder=False,
                          grouping_property=grouping_property,
                          parallel=parallel,
                          verbose=verbose,
                          raise_error=raise_error,
                          n_jobs=n_jobs,
                          joblib_backend=joblib_backend)
            kwargs.update(params)
            kwargs.update({
                'input_directory': str(input_directory),
                'output_directory': str(output_folder)
            })

            sorting_job = hither.Job(run_sorter_docker_with_container, kwargs)
            sorting_job.wait()
        sorting = se.NpzSortingExtractor(output_folder / "sorting_docker.npz")
    else:
        # standard call
        sorting = ss.run_sorter(sorter_name,
                                recording,
                                output_folder=output_folder,
                                delete_output_folder=delete_output_folder,
                                grouping_property=grouping_property,
                                parallel=parallel,
                                verbose=verbose,
                                raise_error=raise_error,
                                n_jobs=n_jobs,
                                joblib_backend=joblib_backend,
                                **params)

    return sorting