def test_run_sorters_multiprocessing(): recording_dict = {} for i in range(8): rec, _ = se.example_datasets.toy_example(num_channels=8, duration=30, seed=0) recording_dict['rec_' + str(i)] = rec # sorter_list = ['mountainsort4', 'klusta', 'tridesclous'] sorter_list = [ 'tridesclous', 'klusta', ] #~ sorter_list = ['tridesclous', 'herdingspikes'] working_folder = 'test_run_sorters_mp' if os.path.exists(working_folder): shutil.rmtree(working_folder) # multiprocessing t0 = time.perf_counter() run_sorters(sorter_list, recording_dict, working_folder, engine='multiprocessing', engine_kargs={'processes': 4}) t1 = time.perf_counter() print(t1 - t0)
def test_run_sorters_with_list(): rec0, _ = se.example_datasets.toy_example(num_channels=4, duration=30) rec1, _ = se.example_datasets.toy_example(num_channels=8, duration=30) recording_list = [rec0, rec1] sorter_list = ['tridesclous'] working_folder = 'test_run_sorters_list' if os.path.exists(working_folder): shutil.rmtree(working_folder) run_sorters(sorter_list, recording_list, working_folder, debug=False)
def run_study_sorters(study_folder, sorter_list, sorter_params={}, mode='keep', engine='loop', engine_kargs={}): """ Run all sorter on all recordings. Wrapper on top of st.sorter.run_sorters(...) Parameters ---------- study_folder: str The study folder. sorter_params: dict of dict with sorter_name as key This allow to overwritte default params for sorter. mode: 'raise_if_exists' or 'overwrite' or 'keep' The mode when the subfolder of recording/sorter already exists. * 'raise' : raise error if subfolder exists * 'overwrite' : force recompute * 'keep' : do not compute again if f=subfolder exists and log is OK engine: str 'loop' or 'multiprocessing' engine_kargs: dict This contains kargs specific to the launcher engine: * 'loop' : no kargs * 'multiprocessing' : {'processes' : } number of processes """ study_folder = Path(study_folder) sorter_folders = study_folder / 'sorter_folders' recording_dict = get_recordings(study_folder) run_sorters(sorter_list, recording_dict, sorter_folders, sorter_params=sorter_params, grouping_property=None, mode=mode, engine=engine, engine_kargs=engine_kargs, with_output=False) # results are copied so the heavy sorter_folders can be removed copy_sorting(study_folder) collect_run_times(study_folder)
def test_run_sorters_with_dict(): rec0, _ = se.example_datasets.toy_example(num_channels=4, duration=30) rec1, _ = se.example_datasets.toy_example(num_channels=8, duration=30) recording_dict = {'toy_tetrode': rec0, 'toy_octotrode': rec1} # sorter_list = ['mountainsort4', 'klusta', 'tridesclous'] sorter_list = [ 'tridesclous', 'klusta', ] #~ sorter_list = ['tridesclous', ] #~ sorter_list = ['tridesclous', 'herdingspikes'] working_folder = 'test_run_sorters_dict' if os.path.exists(working_folder): shutil.rmtree(working_folder) # simple loop t0 = time.perf_counter() results = run_sorters(sorter_list, recording_dict, working_folder, engine=None) t1 = time.perf_counter() print(t1 - t0) print(results)
def test_run_sorters_with_dict(): rec0, _ = se.example_datasets.toy_example(num_channels=4, duration=30, seed=0) rec1, _ = se.example_datasets.toy_example(num_channels=8, duration=30, seed=0) recording_dict = {'toy_tetrode': rec0, 'toy_octotrode': rec1} # sorter_list = ['mountainsort4', 'klusta', 'tridesclous'] #~ sorter_list = ['tridesclous', 'klusta',] #~ sorter_list = ['tridesclous', 'mountainsort4'] sorter_list = ['tridesclous', 'herdingspikes'] working_folder = 'test_run_sorters_dict' if os.path.exists(working_folder): shutil.rmtree(working_folder) sorter_params = { 'tridesclous': dict(relative_threshold=5.6), 'herdingspikes': dict(detection_threshold=20.1), } # simple loop t0 = time.perf_counter() results = run_sorters(sorter_list, recording_dict, working_folder, sorter_params=sorter_params, engine=None) t1 = time.perf_counter() print(t1 - t0) print(results) shutil.rmtree(working_folder + '/toy_tetrode/tridesclous') results = run_sorters(sorter_list, recording_dict, working_folder, engine=None, mode='keep')
def _run_sorters(): path = 'test_TDC_vs_HS2/' if os.path.exists(path): shutil.rmtree(path) os.mkdir(path) # cerate several recording/sorting and save to disk rec0, gt_sorting0 = se.example_datasets.toy_example(num_channels=4, duration=30) rec1, gt_sorting1 = se.example_datasets.toy_example(num_channels=32, duration=30) pickle.dump(gt_sorting0, open(path + 'gt_sorting0', mode='wb')) pickle.dump(gt_sorting1, open(path + 'gt_sorting1', mode='wb')) # run all sorter recording_dict = {'toy_tetrode': rec0, 'toy_probe32': rec1} sorter_list = ['tridesclous', 'herdingspikes'] #~ sorter_list = ['tridesclous', 'klusta'] t0 = time.perf_counter() run_sorters(sorter_list, recording_dict, working_folder, engine=None) t1 = time.perf_counter() print('total run time', t1 - t0)