def _run(self, recording, output_folder): nb_chan = recording.get_num_channels() # check params and OpenCL when many channels use_sparse_template = False use_opencl_with_sparse = False if nb_chan >64: # this limit depend on the platform of course if tdc.cltools.HAVE_PYOPENCL: # force opencl self.params['fullchain_kargs']['preprocessor']['signalpreprocessor_engine'] = 'opencl' use_sparse_template = True use_opencl_with_sparse = True else: print('OpenCL is not available processing will be slow, try install it') tdc_dataio = tdc.DataIO(dirname=str(output_folder)) # make catalogue nested_params = make_nested_tdc_params(**self.params) chan_grps = list(tdc_dataio.channel_groups.keys()) for chan_grp in chan_grps: cc = tdc.CatalogueConstructor(dataio=tdc_dataio, chan_grp=chan_grp) tdc.apply_all_catalogue_steps(cc, verbose=self.debug, **nested_params) if self.debug: print(cc) cc.make_catalogue_for_peeler() # apply Peeler (template matching) initial_catalogue = tdc_dataio.load_catalogue(chan_grp=chan_grp) peeler = tdc.Peeler(tdc_dataio) peeler.change_params(catalogue=initial_catalogue, use_sparse_template=use_sparse_template, sparse_threshold_mad=1.5, use_opencl_with_sparse=use_opencl_with_sparse,) peeler.run(duration=None, progressbar=self.debug)
def _run(self, recording, output_folder): recording = recover_recording(recording) tdc_dataio = tdc.DataIO(dirname=str(output_folder)) params = dict(self.params) del params["chunk_mb"], params["n_jobs_bin"] clean_catalogue_gui = params.pop('clean_catalogue_gui') # make catalogue chan_grps = list(tdc_dataio.channel_groups.keys()) for chan_grp in chan_grps: # parameters can change depending the group catalogue_nested_params = make_nested_tdc_params( tdc_dataio, chan_grp, **params) if self.verbose: print('catalogue_nested_params') pprint(catalogue_nested_params) peeler_params = tdc.get_auto_params_for_peelers( tdc_dataio, chan_grp) if self.verbose: print('peeler_params') pprint(peeler_params) cc = tdc.CatalogueConstructor(dataio=tdc_dataio, chan_grp=chan_grp) tdc.apply_all_catalogue_steps(cc, catalogue_nested_params, verbose=self.verbose) if clean_catalogue_gui: import pyqtgraph as pg app = pg.mkQApp() win = tdc.CatalogueWindow(cc) win.show() app.exec_() if self.verbose: print(cc) if distutils.version.LooseVersion(tdc.__version__) < '1.6.0': print('You should upgrade tridesclous') t0 = time.perf_counter() cc.make_catalogue_for_peeler() if self.verbose: t1 = time.perf_counter() print('make_catalogue_for_peeler', t1 - t0) # apply Peeler (template matching) initial_catalogue = tdc_dataio.load_catalogue(chan_grp=chan_grp) peeler = tdc.Peeler(tdc_dataio) peeler.change_params(catalogue=initial_catalogue, **peeler_params) t0 = time.perf_counter() peeler.run(duration=None, progressbar=False) if self.verbose: t1 = time.perf_counter() print('peeler.tun', t1 - t0)
def run_all_catalogues(): files = pd.read_excel(basedir + 'file_list.xlsx') group_names = np.unique(files['group_name']) for group_name in group_names: print('*******') print(group_name) dirname = tdc_workdir + group_name dataio = tdc.DataIO(dirname=dirname) print(dataio) for chan_grp in dataio.channel_groups.keys(): cc = tdc.CatalogueConstructor(dataio=dataio, chan_grp=chan_grp) print(cc) fullchain_kargs = { 'duration': 300., 'preprocessor': { 'highpass_freq': 400., 'lowpass_freq': 5000., 'smooth_size': 0, 'chunksize': 1024, 'lostfront_chunksize': 128, 'signalpreprocessor_engine': 'numpy', }, 'peak_detector': { 'peakdetector_engine': 'numpy', 'peak_sign': '-', 'relative_threshold': 4.5, 'peak_span': 0.0002, }, 'noise_snippet': { 'nb_snippet': 300, }, 'extract_waveforms': { 'n_left': -20, 'n_right': 30, 'mode': 'rand', 'nb_max': 20000, 'align_waveform': False, }, 'clean_waveforms': { 'alien_value_threshold': 25., }, } feat_method = 'global_pca' feat_kargs = {} clust_method = 'sawchaincut' clust_kargs = {} tdc.apply_all_catalogue_steps(cc, fullchain_kargs, feat_method, feat_kargs, clust_method, clust_kargs) print(cc)
def _run(self, recording, output_folder): nb_chan = recording.get_num_channels() tdc_dataio = tdc.DataIO(dirname=str(output_folder)) params = dict(self.params) clean_catalogue_gui = params.pop('clean_catalogue_gui') # make catalogue chan_grps = list(tdc_dataio.channel_groups.keys()) for chan_grp in chan_grps: # parameters can change depending the group catalogue_nested_params = make_nested_tdc_params(tdc_dataio, chan_grp, **params) if self.verbose: print('catalogue_nested_params') pprint(catalogue_nested_params) peeler_params = tdc.get_auto_params_for_peelers(tdc_dataio, chan_grp) if self.verbose: print('peeler_params') pprint(peeler_params) # check params and OpenCL when many channels use_sparse_template = False use_opencl_with_sparse = False if nb_chan > 64 and not peeler_params['use_sparse_template']: print('OpenCL is not available processing will be slow, try install it') cc = tdc.CatalogueConstructor(dataio=tdc_dataio, chan_grp=chan_grp) tdc.apply_all_catalogue_steps(cc, catalogue_nested_params, verbose=self.verbose) if clean_catalogue_gui: import pyqtgraph as pg app = pg.mkQApp() win = tdc.CatalogueWindow(cc) win.show() app.exec_() if self.verbose: print(cc) t0 = time.perf_counter() cc.make_catalogue_for_peeler() if self.verbose: t1 = time.perf_counter() print('make_catalogue_for_peeler', t1-t0) # apply Peeler (template matching) initial_catalogue = tdc_dataio.load_catalogue(chan_grp=chan_grp) peeler = tdc.Peeler(tdc_dataio) peeler.change_params(catalogue=initial_catalogue, **peeler_params) t0 = time.perf_counter() peeler.run(duration=None, progressbar=False) if self.verbose: t1 = time.perf_counter() print('peeler.tun', t1-t0)
def setup_catalogue(): if os.path.exists('test_peeler'): shutil.rmtree('test_peeler') dataio = DataIO(dirname='test_peeler') localdir, filenames, params = download_dataset(name='olfactory_bulb') dataio.set_data_source(type='RawData', filenames=filenames, **params) dataio.add_one_channel_group(channels=[5, 6, 7, 8, 9]) catalogueconstructor = CatalogueConstructor(dataio=dataio) fullchain_kargs = { 'duration': 60., 'preprocessor': { 'highpass_freq': 300., 'chunksize': 1024, 'lostfront_chunksize': 100, }, 'peak_detector': { 'peak_sign': '-', 'relative_threshold': 7., 'peak_span': 0.0005, #~ 'peak_span' : 0.000, }, 'extract_waveforms': { 'n_left': -25, 'n_right': 40, 'nb_max': 10000, }, 'clean_waveforms': { 'alien_value_threshold': 60., }, 'noise_snippet': { 'nb_snippet': 300, }, } apply_all_catalogue_steps(catalogueconstructor, fullchain_kargs, 'global_pca', {'n_components': 12}, 'kmeans', {'n_clusters': 12}, verbose=True) catalogueconstructor.trash_small_cluster() catalogueconstructor.make_catalogue_for_peeler()
def _run_from_folder(cls, output_folder, params, verbose): import tridesclous as tdc tdc_dataio = tdc.DataIO(dirname=str(output_folder)) params = params.copy() # make catalogue chan_grps = list(tdc_dataio.channel_groups.keys()) for chan_grp in chan_grps: # parameters can change depending the group catalogue_nested_params = make_nested_tdc_params( tdc_dataio, chan_grp, **params) if verbose: print('catalogue_nested_params') pprint(catalogue_nested_params) peeler_params = tdc.get_auto_params_for_peelers( tdc_dataio, chan_grp) if verbose: print('peeler_params') pprint(peeler_params) cc = tdc.CatalogueConstructor(dataio=tdc_dataio, chan_grp=chan_grp) tdc.apply_all_catalogue_steps(cc, catalogue_nested_params, verbose=verbose) if verbose: print(cc) # apply Peeler (template matching) initial_catalogue = tdc_dataio.load_catalogue(chan_grp=chan_grp) peeler = tdc.Peeler(tdc_dataio) peeler.change_params(catalogue=initial_catalogue, **peeler_params) t0 = time.perf_counter() peeler.run(duration=None, progressbar=False) if verbose: t1 = time.perf_counter() print('peeler.tun', t1 - t0)
def _run(self, recording, output_folder): nb_chan = recording.get_num_channels() tdc_dataio = tdc.DataIO(dirname=str(output_folder)) # make catalogue chan_grps = list(tdc_dataio.channel_groups.keys()) for chan_grp in chan_grps: # parameters can change depending the group catalogue_nested_params = make_nested_tdc_params( tdc_dataio, chan_grp, **self.params) #~ print(catalogue_nested_params) peeler_params = tdc.get_auto_params_for_peelers( tdc_dataio, chan_grp) #~ print(peeler_params) # check params and OpenCL when many channels use_sparse_template = False use_opencl_with_sparse = False if nb_chan > 64 and not peeler_params['use_sparse_template']: print( 'OpenCL is not available processing will be slow, try install it' ) cc = tdc.CatalogueConstructor(dataio=tdc_dataio, chan_grp=chan_grp) tdc.apply_all_catalogue_steps( cc, catalogue_nested_params, verbose=self.debug, ) if self.debug: print(cc) cc.make_catalogue_for_peeler() # apply Peeler (template matching) initial_catalogue = tdc_dataio.load_catalogue(chan_grp=chan_grp) peeler = tdc.Peeler(tdc_dataio) peeler.change_params(catalogue=initial_catalogue, **peeler_params) peeler.run(duration=None, progressbar=self.debug)
def tdc_helper(*, tmpdir, params, recording): import tridesclous as tdc # nb_chan = recording.get_num_channels() # check params and OpenCL when many channels use_sparse_template = False use_opencl_with_sparse = False # if nb_chan > 64: # this limit depend on the platform of course # if tdc.cltools.HAVE_PYOPENCL: # # force opencl # self.params['fullchain_kargs']['preprocessor']['signalpreprocessor_engine'] = 'opencl' # use_sparse_template = True # use_opencl_with_sparse = True # else: # print('OpenCL is not available processing will be slow, try install it') tdc_dataio = tdc.DataIO(dirname=str(tmpdir)) # make catalogue chan_grps = list(tdc_dataio.channel_groups.keys()) for chan_grp in chan_grps: cc = tdc.CatalogueConstructor(dataio=tdc_dataio, chan_grp=chan_grp) tdc.apply_all_catalogue_steps(cc, verbose=True, **params) cc.make_catalogue_for_peeler() # apply Peeler (template matching) initial_catalogue = tdc_dataio.load_catalogue(chan_grp=chan_grp) peeler = tdc.Peeler(tdc_dataio) peeler.change_params( catalogue=initial_catalogue, use_sparse_template=use_sparse_template, sparse_threshold_mad=1.5, use_opencl_with_sparse=use_opencl_with_sparse, ) peeler.run(duration=None, progressbar=False) sorting = se.TridesclousSortingExtractor(tmpdir) return sorting