def createjob_get_similar_units(labbox, recording_object, sorting_object): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition3') jc = labbox.get_default_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return get_similar_units.run(snippets_h5=snippets_h5)
def preload_extract_snippets(labbox, recording_object, sorting_object): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition2') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return snippets_h5
def createjob_fetch_pca_features(labbox, recording_object, sorting_object, unit_ids): jh = labbox.get_job_handler('partition2') jc = labbox.get_default_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return fetch_pca_features.run(snippets_h5=snippets_h5, unit_ids=unit_ids)
def createjob_fetch_spike_amplitudes(labbox, recording_object, sorting_object, unit_id): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition1') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return fetch_spike_amplitudes.run(snippets_h5=snippets_h5, unit_id=unit_id)
def createjob_get_unit_snrs(labbox, sorting_object, recording_object, configuration={}): jh = labbox.get_job_handler('partition1') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, use_container=jh.is_remote()): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return get_unit_snrs.run(snippets_h5=snippets_h5)
def createjob_fetch_average_waveform_plot_data(labbox, recording_object, sorting_object, unit_id): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition2') jc = labbox.get_default_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return fetch_average_waveform_plot_data.run(snippets_h5=snippets_h5, unit_id=unit_id)
def createjob_individual_cluster_features(labbox, recording_object, sorting_object, unit_id): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition1') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, use_container=jh.is_remote()): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return individual_cluster_features.run(snippets_h5=snippets_h5, unit_id=unit_id)
def createjob_fetch_spike_waveforms(labbox, recording_object, sorting_object, unit_ids, spike_indices): jh = labbox.get_job_handler('partition1') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return fetch_spike_waveforms.run(snippets_h5=snippets_h5, unit_ids=unit_ids, spike_indices=spike_indices)
def createjob_fetch_average_waveform_2(labbox: LabboxContext, recording_object, sorting_object, unit_id): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition1') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, use_container=jh.is_remote()): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return fetch_average_waveform_2.run(snippets_h5=snippets_h5, unit_id=unit_id)
def createjob_get_peak_channels(labbox, sorting_object, recording_object, configuration={}): jh = labbox.get_job_handler('partition2') jc = labbox.get_default_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return get_peak_channels.run(snippets_h5=snippets_h5)
def createjob_get_sorting_unit_snippets(labbox, recording_object, sorting_object, unit_id, time_range, max_num_snippets): from labbox_ephys import prepare_snippets_h5 jh = labbox.get_job_handler('partition1') jc = labbox.get_job_cache() with hi.Config(job_cache=jc, job_handler=jh, container=jh.is_remote): snippets_h5 = prepare_snippets_h5.run( recording_object=recording_object, sorting_object=sorting_object) return get_sorting_unit_snippets.run(snippets_h5=snippets_h5, unit_id=unit_id, time_range=time_range, max_num_snippets=max_num_snippets)