def main(): # Define directories. output_directory = os.path.join(directory, "output") log_directory = os.path.join(directory, "log") # Create log directory (if necessary). if not os.path.isdir(output_directory): os.makedirs(output_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) # Define parameters. hosts = { 'master': '127.0.0.1', } # Define keyword arguments. director_kwargs = { 'log_path': os.path.join(log_directory, "log.txt"), } listener_kwargs = { 'name': "listener", 'acq_host': '192.168.0.253', 'acq_port': 40006, 'acq_dtype': 'uint16', 'acq_nb_samp': 2000, 'acq_nb_chan': 261, 'dtype': 'float32', 'log_level': DEBUG, } writer_kwargs = { 'name': "writer", 'data_path': os.path.join(output_directory, "data.h5"), 'nb_samples': 1024, 'sampling_rate': 20e+3, 'log_level': DEBUG, } # Define the element of the network. director = circusort.create_director(host=hosts['master'], **director_kwargs) manager = director.create_manager(host=hosts['master']) listener = manager.create_block('listener', **listener_kwargs) writer = manager.create_block('writer', **writer_kwargs) # Initialize the element of the network. director.initialize() # Connect the elements of the network. director.connect(listener.get_output('data'), [ writer.get_input('data'), ]) # Launch the network. director.start() director.join() director.destroy() return
def sorting(configuration_name): """Create the 1st sorting subnetwork. Parameter: configuration_name: string The name of the configuration (i.e. context). """ # Define directories. generation_directory = os.path.join(directory, "generation", configuration_name) sorting_directory = os.path.join(directory, "sorting", configuration_name) introspection_directory = os.path.join(directory, "introspection", configuration_name) # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. host = '127.0.0.1' # i.e. run the test locally dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) if not os.path.isdir(introspection_directory): os.makedirs(introspection_directory) # Define keyword arguments. reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'introspection_path': introspection_directory, } filter_kwargs = { 'name': "filter", 'cut_off': 100.0, # Hz 'introspection_path': introspection_directory, 'log_level': DEBUG, } signal_writer_kwargs = { 'name': "writer", 'data_path': os.path.join(sorting_directory, "data_filtered.raw"), 'introspection_path': introspection_directory, 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=host) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) filter_ = manager.create_block('filter', **filter_kwargs) writer = manager.create_block('writer', **signal_writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, filter_.input) director.connect(filter_.output, writer.input) # Launch the network. director.start() director.join() director.destroy()
def sorting(configuration_name): """Create the 1st sorting subnetwork. Parameter: configuration_name: string The name of the configuration (i.e. context). """ # Define directories. generation_directory = os.path.join(directory, "generation", configuration_name) sorting_directory = os.path.join(directory, "sorting", configuration_name) log_directory = os.path.join(directory, "log", configuration_name) introspection_directory = os.path.join(directory, "introspection", configuration_name) # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. # hosts = OrderedDict([ # ('master', '192.168.0.254'), # ('slave_1', '192.168.0.1'), # ('slave_2', '192.168.0.4'), # ('slave_3', '192.168.0.7'), # ]) hosts = OrderedDict([ ('master', '192.168.0.254'), ('slave_1', '192.168.0.1'), ('slave_2', '192.168.0.2'), ('slave_3', '192.168.0.3'), ]) dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] threshold_factor = 7.0 alignment = True spike_width = 5.0 # ms spike_jitter = 1.0 # ms spike_sigma = 2.75 # µV probe_path = os.path.join(generation_directory, "probe.prb") precomputed_template_paths = [ cell.template.path for cell in circusort.io.load_cells(generation_directory) ] # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) if not os.path.isdir(introspection_directory): os.makedirs(introspection_directory) # Define keyword arguments. director_kwarg = { 'log_path': os.path.join(log_directory, "log.txt"), 'log_level': INFO, } reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'speed_factor': 1.0, 'introspection_path': introspection_directory, 'log_level': DEBUG, } filter_kwargs = { 'name': "filter", 'degree': nb_filters, 'cut_off': 1.0, # Hz 'order': 1, 'introspection_path': introspection_directory, 'log_level': DEBUG, } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, 'introspection_path': introspection_directory, 'log_level': DEBUG, } detector_kwargs = { 'name': "detector", 'degree': nb_detectors, 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'introspection_path': introspection_directory, 'log_level': DEBUG, } pca_kwargs = { 'name': "pca", 'spike_width': spike_width, 'spike_jitter': spike_jitter, 'spike_sigma': spike_sigma, 'nb_waveforms': 2000, 'introspection_path': introspection_directory, 'log_level': DEBUG, } cluster_kwargs = { 'name': "cluster", 'threshold_factor': threshold_factor, 'alignment': alignment, 'sampling_rate': sampling_rate, 'spike_width': spike_width, 'spike_jitter': spike_jitter, 'spike_sigma': spike_sigma, 'nb_waveforms': 100000, 'probe_path': probe_path, 'two_components': False, 'local_merges': 3, 'introspection_path': introspection_directory, 'log_level': INFO, } updater_bis_kwargs = { 'name': "updater", 'probe_path': probe_path, 'templates_path': os.path.join(sorting_directory, "templates.h5"), 'overlaps_path': os.path.join(sorting_directory, "overlaps.p"), 'precomputed_template_paths': precomputed_template_paths, 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'introspection_path': introspection_directory, 'log_level': DEBUG, } fitter_bis_kwargs = { 'name': "fitter", 'degree': nb_fitters, 'templates_init_path': os.path.join(sorting_directory, "templates.h5"), 'overlaps_init_path': os.path.join(sorting_directory, "overlaps.p"), 'sampling_rate': sampling_rate, 'discarding_eoc_from_updater': True, 'introspection_path': introspection_directory, 'introspection_factor': 1.0 / float(nb_fitters), 'log_level': DEBUG, } writer_kwargs = { 'name': "writer", 'data_path': os.path.join(sorting_directory, "spikes.h5"), 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'introspection_path': introspection_directory, 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=hosts['master'], **director_kwarg) managers = OrderedDict([(key, director.create_manager(host=host)) for key, host in iter(hosts.items())]) reader = managers['master'].create_block('reader', **reader_kwargs) filter_ = managers['slave_1'].create_network('filter', **filter_kwargs) mad = managers['slave_1'].create_block('mad_estimator', **mad_kwargs) detector = managers['slave_2'].create_network('peak_detector', **detector_kwargs) pca = managers['slave_2'].create_block('pca', **pca_kwargs) cluster = managers['slave_2'].create_block('density_clustering', **cluster_kwargs) updater = managers['slave_2'].create_block('template_updater_bis', **updater_bis_kwargs) fitter = managers['slave_3'].create_network('fitter_bis', **fitter_bis_kwargs) writer = managers['master'].create_block('spike_writer', **writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.get_output('data'), [ filter_.get_input('data'), ]) director.connect_network(filter_) director.connect(filter_.get_output('data'), [ mad.get_input('data'), detector.get_input('data'), pca.get_input('data'), cluster.get_input('data'), fitter.get_input('data'), ]) director.connect(mad.get_output('mads'), [ detector.get_input('mads'), cluster.get_input('mads'), ]) director.connect_network(detector) director.connect(detector.get_output('peaks'), [ pca.get_input('peaks'), cluster.get_input('peaks'), fitter.get_input('peaks'), ]) director.connect(pca.get_output('pcs'), [ cluster.get_input('pcs'), ]) director.connect(cluster.get_output('templates'), [ updater.get_input('templates'), ]) director.connect(updater.get_output('updater'), [ fitter.get_input('updater'), ]) director.connect_network(fitter) director.connect(fitter.get_output('spikes'), [ writer.get_input('spikes'), ]) # Launch the network. director.start() director.join() director.destroy()
help="distributed computation or not") args = parser.parse_args() if args.mode == 'local': master = '127.0.0.1' slaves = ['127.0.0.1', '127.0.0.1', '127.0.0.1'] elif args.mode == 'remote': master = '192.168.0.254' slaves = ['192.168.0.1', '192.168.0.2', '192.168.0.3'] data_path = '/tmp/output.dat' peak_path = '/tmp/peaks.dat' thres_path = '/tmp/thresholds.dat' temp_path = '/tmp/templates' director = circusort.create_director(host=master) manager = {} for computer in slaves + [master]: manager[computer] = director.create_manager(host=computer) sampling_rate = 20000 two_components = True nb_channels = 4 probe_file = generate_fake_probe(nb_channels, radius=2, prb_file='test.prb') generator = manager[master].create_block('fake_spike_generator', nb_channels=nb_channels) filter = manager[master].create_block('filter', cut_off=100) whitening = manager[master].create_block('whitening')
# Test to measure the computational efficiency of two operations in one block # associated to one manager. import circusort import logging host = '127.0.0.1' # to run the test locally nb_groups = 4 director = circusort.create_director(host=host) manager = director.create_manager(host=host, log_level=logging.INFO) noise = manager.create_block('noise_generator') dispatcher = manager.create_block('chunk_dispatcher', nb_groups=nb_groups) filters = [manager.create_block('filter', name='Filter %d' %i) for i in xrange(nb_groups)] regrouper = manager.create_block('chunk_grouper', nb_groups=nb_groups) manager.initialize() manager.connect(noise.output, dispatcher.input) for i in xrange(nb_groups): manager.connect(dispatcher.get_output('data_%d' %i), filters[i].input) manager.connect(filters[i].output, regrouper.get_input('data_%d' %i)) manager.start()
def sorting(configuration_name): """Create the 1st sorting subnetwork. Parameter: configuration_name: string The name of the configuration (i.e. context). """ # Define directories. generation_directory = os.path.join(directory, "generation", configuration_name) sorting_directory = os.path.join(directory, "sorting", configuration_name) introspection_directory = os.path.join(directory, "introspection", configuration_name) log_directory = os.path.join(directory, "log", configuration_name) # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. host = '127.0.0.1' # i.e. run the test locally dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] threshold_factor = 7.0 probe_path = os.path.join(generation_directory, "probe.prb") precomputed_template_paths = [ cell.template.path for cell in circusort.io.load_cells(generation_directory) ] # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) if not os.path.isdir(introspection_directory): os.makedirs(introspection_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) # Define keyword arguments. director_kwargs = { 'log_path': os.path.join(log_directory, "log.txt"), } reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'introspection_path': introspection_directory, 'log_level': DEBUG, } filter_kwargs = { 'name': "filter", 'cut_off': 1.0, # Hz 'introspection_path': introspection_directory, 'log_level': DEBUG, } # filter_writer_kwargs = { # 'name': "filter_writer", # 'data_path': os.path.join(sorting_directory, "data.raw"), # 'introspection_path': introspection_directory, # 'log_level': DEBUG, # } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, 'introspection_path': introspection_directory, 'log_level': DEBUG, } # mad_writer_kwargs = { # 'data_path': os.path.join(sorting_directory, "mad.raw"), # 'introspection_path': introspection_directory, # 'log_level': DEBUG, # } detector_kwargs = { 'name': "detector", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'introspection_path': introspection_directory, 'log_level': DEBUG, } # peak_writer_kwargs = { # 'data_path': os.path.join(sorting_directory, "peaks.h5"), # 'introspection_path': introspection_directory, # 'log_level': DEBUG, # } pca_kwargs = { 'name': "pca", 'nb_waveforms': 100000, 'introspection_path': introspection_directory, 'log_level': DEBUG, } cluster_kwargs = { 'name': "cluster", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'nb_waveforms': 100000, 'probe_path': probe_path, 'two_components': False, 'introspection_path': introspection_directory, 'log_level': DEBUG, } updater_kwargs = { 'name': "updater", 'probe_path': probe_path, 'data_path': os.path.join(sorting_directory, "templates.h5"), 'precomputed_template_paths': precomputed_template_paths, 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'introspection_path': introspection_directory, 'log_level': DEBUG, } fitter_kwargs = { 'name': "fitter", 'sampling_rate': sampling_rate, 'discarding_eoc_from_updater': True, 'introspection_path': introspection_directory, 'log_level': DEBUG, } writer_kwargs = { 'name': "writer", 'data_path': os.path.join(sorting_directory, "spikes.h5"), 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'introspection_path': introspection_directory, 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=host, **director_kwargs) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) filter_ = manager.create_block('filter', **filter_kwargs) # filter_writer = manager.create_block('writer', **filter_writer_kwargs) mad = manager.create_block('mad_estimator', **mad_kwargs) # mad_writer = manager.create_block('writer', **mad_writer_kwargs) detector = manager.create_block('peak_detector', **detector_kwargs) # peak_writer = manager.create_block('peak_writer', **peak_writer_kwargs) pca = manager.create_block('pca', **pca_kwargs) cluster = manager.create_block('density_clustering', **cluster_kwargs) updater = manager.create_block('template_updater', **updater_kwargs) fitter = manager.create_network('fitter', **fitter_kwargs) writer = manager.create_block('spike_writer', **writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [filter_.input]) director.connect( filter_.output, [ mad.input, detector.get_input('data'), pca.get_input('data'), cluster.get_input('data'), fitter.get_input('data'), # filter_writer.get_input('data'), ]) director.connect( mad.output, [ detector.get_input('mads'), cluster.get_input('mads'), # mad_writer.get_input('data'), ]) director.connect( detector.get_output('peaks'), [ pca.get_input('peaks'), cluster.get_input('peaks'), fitter.get_input('peaks'), # peak_writer.get_input('peaks'), ]) director.connect(pca.get_output('pcs'), [ cluster.get_input('pcs'), ]) director.connect(cluster.get_output('templates'), [ updater.get_input('templates'), ]) director.connect(updater.get_output('updater'), [ fitter.get_input('updater'), ]) director.connect_network(fitter) director.connect(fitter.get_output('spikes'), [ writer.input, ]) # Launch the network. director.start() director.join() director.destroy()
def detection(directory): # Define directories. generation_directory = os.path.join(directory, "generation") detection_directory = os.path.join(directory, "detection") log_directory = os.path.join(directory, "log") # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. hosts = { 'master': '127.0.0.1', } hosts_keys = [ # ordered 'master', ] dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] threshold_factor = 7.0 # Create directories (if necessary). if not os.path.isdir(detection_directory): os.makedirs(detection_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) # Define keyword arguments. director_kwargs = { 'log_path': os.path.join(log_directory, "log.txt"), } reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'log_level': DEBUG, } filter_kwargs = { 'name': "filter", 'cut_off': 1.0, # Hz 'log_level': DEBUG, } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, 'log_level': DEBUG, } detector_kwargs = { 'name': "detector", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'log_level': DEBUG, } peak_writer_kwargs = { 'name': "peak_writer", 'data_path': os.path.join(detection_directory, "peaks.h5"), 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=hosts['master'], **director_kwargs) managers = { key: director.create_manager(host=hosts[key]) for key in hosts_keys } reader = managers['master'].create_block('reader', **reader_kwargs) filter_ = managers['master'].create_block('filter', **filter_kwargs) mad = managers['master'].create_block('mad_estimator', **mad_kwargs) detector = managers['master'].create_block('peak_detector', **detector_kwargs) peak_writer = managers['master'].create_block('peak_writer', **peak_writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [filter_.input]) director.connect(filter_.output, [ mad.input, detector.get_input('data'), ]) director.connect(mad.output, [ detector.get_input('mads'), ]) director.connect(detector.get_output('peaks'), [ peak_writer.get_input('peaks'), ]) # Launch the network. director.start() director.join() director.destroy() return
def main(): # Define the working directory. if not os.path.isdir(directory): os.makedirs(directory) # Parse command line. parser = argparse.ArgumentParser() parser.add_argument('--configuration', dest='pending_configuration', action='store_true', default=None) parser.add_argument('--generation', dest='pending_generation', action='store_true', default=None) parser.add_argument('--display', dest='pending_display', action='store_true', default=None) args = parser.parse_args() if args.pending_configuration is None and args.pending_generation is None and args.pending_display is None: args.pending_configuration = True args.pending_generation = True args.pending_display = True else: args.pending_configuration = args.pending_configuration is True args.pending_generation = args.pending_generation is True args.pending_display = args.pending_display is True configuration_directory = os.path.join(directory, "configuration") if args.pending_configuration: # Clean configuration directory (if necessary). if os.path.isdir(configuration_directory): shutil.rmtree(configuration_directory) os.makedirs(configuration_directory) # Generate configuration. kwargs = { 'general': { 'duration': 10.0, # s 'name': "read_n_display", }, 'probe': { 'mode': 'mea', 'nb_rows': 16, 'nb_columns': 16, 'radius': 100.0, # µm }, 'cells': { 'nb_cells': 256, } } configuration = circusort.io.generate_configuration(**kwargs) configuration.save(configuration_directory) generation_directory = os.path.join(directory, "generation") if args.pending_generation: # Clean generation directory (if necessary). if os.path.isdir(generation_directory): shutil.rmtree(generation_directory) os.makedirs(generation_directory) circusort.net.pregenerator( configuration_directory=configuration_directory, generation_directory=generation_directory, ) if args.pending_display: # Define log directory. log_directory = os.path.join(directory, "log") # Clean log directory (if necessary). if os.path.isdir(log_directory): shutil.rmtree(log_directory) os.makedirs(log_directory) # Load generation parameters. params = circusort.io.get_data_parameters(generation_directory) # Define parameters. host = '127.0.0.1' # Define keyword arguments. director_kwargs = {'log_path': os.path.join(log_directory, "log.txt")} reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': params['general']['dtype'], 'nb_channels': params['probe']['nb_channels'], 'nb_samples': params['general']['buffer_width'], 'sampling_rate': params['general']['sampling_rate'], 'is_realistic': True, 'log_level': DEBUG, } qt_displayer_kwargs = { 'name': "displayer", 'probe_path': os.path.join(generation_directory, "probe.prb"), 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=host, **director_kwargs) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) qt_displayer = manager.create_block('qt_displayer', **qt_displayer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.get_output('data'), [ qt_displayer.get_input('data'), ]) # Launch the network. director.start() director.join() director.destroy() return
def sorting(configuration_name): """Create the 1st sorting subnetwork. Parameter: configuration_name: string The name of the configuration (i.e. context). """ # Define directories. generation_directory = os.path.join(directory, "generation", configuration_name) sorting_directory = os.path.join(directory, "sorting", configuration_name) log_directory = os.path.join(directory, "log", configuration_name) introspection_directory = os.path.join(directory, "introspection", configuration_name) # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. host = '127.0.0.1' # i.e. run the test locally dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) if not os.path.isdir(introspection_directory): os.makedirs(introspection_directory) # Define keyword arguments. director_kwargs = { 'log_path': os.path.join(log_directory, "log.txt"), 'log_level': INFO, } reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'speed_factor': 1.0, 'introspection_path': introspection_directory, 'log_level': DEBUG, } filter_kwargs = { 'name': "filter", 'cut_off': 1.0, # Hz 'order': 1, 'introspection_path': introspection_directory, 'log_level': DEBUG, } mad_estimator_kwargs = { 'name': "mad", 'time_constant': 10.0, 'introspection_path': introspection_directory, 'log_level': DEBUG, } peak_detector_kwargs = { 'name': "detector", 'threshold_factor': 7.0, 'sampling_rate': sampling_rate, 'introspection_path': introspection_directory, 'log_level': DEBUG, } peak_writer_kwargs = { 'name': "writer", 'data_path': os.path.join(sorting_directory, "peaks.h5"), 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'introspection_path': introspection_directory, 'log_level': INFO, } # Define the elements of the network. director = circusort.create_director(host=host, **director_kwargs) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) filter_ = manager.create_block('filter', **filter_kwargs) mad_estimator = manager.create_block('mad_estimator', **mad_estimator_kwargs) peak_detector = manager.create_block('peak_detector', **peak_detector_kwargs) peak_writer = manager.create_block('peak_writer', **peak_writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [filter_.input]) director.connect(filter_.output, [ mad_estimator.input, peak_detector.get_input('data'), ]) director.connect(mad_estimator.output, [peak_detector.get_input('mads')]) director.connect(peak_detector.get_output('peaks'), [ peak_writer.input, ]) # Launch the network. director.start() director.join() director.destroy()
def sorting(configuration_name, with_precomputed_templates=True, nb_waveforms_clustering=400, nb_replay=1): """Create the 1st sorting subnetwork. Parameter: configuration_name: string The name of the configuration (i.e. context). """ # Define directories. generation_directory = os.path.join(directory, "generation", configuration_name) sorting_directory = os.path.join(directory, "sorting", configuration_name) introspection_directory = os.path.join(directory, "introspection", configuration_name) log_directory = os.path.join(directory, "log", configuration_name) output_directory = os.path.join(directory, "output", configuration_name) debug_directory = os.path.join(directory, "debug", configuration_name) # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. hosts = { 'master': '127.0.0.1', } hosts_keys = [ # ordered 'master', ] dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] threshold_factor = 7.0 probe_path = os.path.join(generation_directory, "probe.prb") precomputed_template_paths = [ cell.template.path for cell in circusort.io.load_cells(generation_directory) ] # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) if not os.path.isdir(introspection_directory): os.makedirs(introspection_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) if not os.path.isdir(output_directory): os.makedirs(output_directory) # Define keyword arguments. director_kwargs = { 'log_path': os.path.join(log_directory, "log.txt"), } reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'speed_factor': 2.0, 'introspection_path': introspection_directory, 'log_level': INFO, 'nb_replay': nb_replay } filter_kwargs = { 'name': "filter", 'cut_off': 1.0, # Hz 'introspection_path': introspection_directory, 'log_level': INFO, } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, 'introspection_path': introspection_directory, 'log_level': INFO, } detector_kwargs = { 'name': "detector", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'introspection_path': introspection_directory, 'log_level': INFO, } peak_writer_kwargs = { 'name': "peak_writer", 'data_path': os.path.join(sorting_directory, "peaks.h5"), 'introspection_path': introspection_directory, 'log_level': INFO, } pca_kwargs = { 'name': "pca", 'nb_waveforms': 1000, 'introspection_path': introspection_directory, 'log_level': INFO, } cluster_kwargs = { 'name': "cluster", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'nb_waveforms': nb_waveforms_clustering, 'nb_waveforms_tracking': 2 * nb_waveforms_clustering, 'probe_path': probe_path, 'two_components': False, 'local_merges': 3, 'debug_plots': debug_directory, # 'debug_ground_truth_templates': precomputed_template_paths 'introspection_path': introspection_directory, 'log_level': DEBUG, } cluster_writer_kwargs = { 'name': "cluster_writer", 'output_directory': sorting_directory, 'introspection_path': introspection_directory, 'log_level': INFO, } updater_bis_kwargs = { 'name': "updater_bis", 'probe_path': probe_path, 'templates_path': os.path.join(sorting_directory, "templates.h5"), 'overlaps_path': os.path.join(sorting_directory, "overlaps.p"), 'precomputed_template_paths': precomputed_template_paths if with_precomputed_templates else None, 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'introspection_path': introspection_directory, 'log_level': INFO, } updater_writer_kwargs = { 'name': "updater_writer", 'output_directory': sorting_directory, 'introspection_path': introspection_directory, 'log_level': INFO, } # Define the elements of the network. director = circusort.create_director(host=hosts['master'], **director_kwargs) managers = { key: director.create_manager(host=hosts[key]) for key in hosts_keys } reader = managers['master'].create_block('reader', **reader_kwargs) filter_ = managers['master'].create_block('filter', **filter_kwargs) mad = managers['master'].create_block('mad_estimator', **mad_kwargs) detector = managers['master'].create_block('peak_detector', **detector_kwargs) peak_writer = managers['master'].create_block('peak_writer', **peak_writer_kwargs) pca = managers['master'].create_block('pca', **pca_kwargs) cluster = managers['master'].create_block('density_clustering', **cluster_kwargs) cluster_writer = managers['master'].create_block('cluster_writer', **cluster_writer_kwargs) updater = managers['master'].create_block('template_updater_bis', **updater_bis_kwargs) updater_writer = managers['master'].create_block('updater_writer', **updater_writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [filter_.input]) director.connect(filter_.output, [ mad.input, detector.get_input('data'), pca.get_input('data'), cluster.get_input('data'), ]) director.connect(mad.output, [ detector.get_input('mads'), cluster.get_input('mads'), ]) director.connect(detector.get_output('peaks'), [ peak_writer.get_input('peaks'), pca.get_input('peaks'), cluster.get_input('peaks'), ]) director.connect(pca.get_output('pcs'), [ cluster.get_input('pcs'), ]) director.connect(cluster.get_output('templates'), [ cluster_writer.get_input('templates'), updater.get_input('templates'), ]) director.connect(updater.get_output('updater'), [ updater_writer.get_input('updater'), ]) # Launch the network. director.start() director.join() director.destroy()
def sorting(configuration_name): """Create the sorting network. Parameter: configuration_name: string The name of the configuration (i.e. context). """ # Define directories. if not os.path.isdir(directory): message = "Directory does not exist: {}".format(directory) raise OSError(message) generation_directory = os.path.join(directory, "generation", configuration_name) sorting_directory = os.path.join(directory, "sorting", configuration_name) introspection_directory = os.path.join(directory, "introspection", configuration_name) # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) # Define parameters. host = '127.0.0.1' # i.e. run the test locally dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] is_template_dictionary_initialized = False probe_path = os.path.join(generation_directory, "probe.prb") # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) if not os.path.isdir(introspection_directory): os.makedirs(introspection_directory) # Define keyword arguments. reader_kwargs = { 'name': "reader", 'data_path': os.path.join(directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'introspection_path': introspection_directory, } filter_kwargs = { 'name': "filter", 'cut_off': 100.0, # Hz 'introspection_path': introspection_directory, 'log_level': DEBUG, } signal_writer_kwargs = { 'name': "writer", 'data_path': os.path.join(sorting_directory, "data_filtered.raw"), 'introspection_path': introspection_directory, 'log_level': DEBUG, } mad_writer_kwargs = { 'data_path': os.path.join(sorting_directory, "mads.h5"), 'dataset_name': 'mads', } peak_writer_kwargs = { 'data_path': os.path.join(sorting_directory, "peaks.h5"), 'sampling_rate': sampling_rate, } if is_template_dictionary_initialized: cluster_kwargs = { 'nb_waveforms': 100000, # i.e. delay clustering (template already exists) } else: cluster_kwargs = { 'nb_waveforms': 100, # i.e. precipitate clustering (template does not exist) } updater_kwargs = { 'data_path': os.path.join(sorting_directory, "templates.h5"), } if is_template_dictionary_initialized: fitter_kwargs = { # TODO correct the following line. 'init_path': os.path.join(directory, "initial_templates.h5"), 'with_rejected_times': True, } else: fitter_kwargs = {} spike_writer_kwargs = { 'data_path': os.path.join(sorting_directory, "spikes.h5"), 'sampling_rate': sampling_rate, } # Define the elements of the network. director = circusort.create_director(host=host) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) filtering = manager.create_block('filter', **filter_kwargs) signal_writer = manager.create_block('writer', **signal_writer_kwargs) mad_estimator = manager.create_block('mad_estimator', log_level=DEBUG, time_constant=10) mad_writer = manager.create_block('writer', log_level=DEBUG, **mad_writer_kwargs) peak_detector = manager.create_block('peak_detector', threshold_factor=7.0, log_level=DEBUG) peak_writer = manager.create_block('peak_writer', log_level=INFO, **peak_writer_kwargs) pca = manager.create_block('pca', nb_waveforms=2000, log_level=DEBUG) cluster = manager.create_block('density_clustering', threshold_factor=7.0, probe_path=probe_path, two_components=False, log_level=DEBUG, **cluster_kwargs) updater = manager.create_block('template_updater', probe_path=probe_path, nb_channels=16, log_level=DEBUG, **updater_kwargs) fitter = manager.create_block('template_fitter', two_components=False, log_level=DEBUG, **fitter_kwargs) spike_writer = manager.create_block('spike_writer', log_level=DEBUG, **spike_writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [filtering.input]) director.connect(filtering.output, [ mad_estimator.input, peak_detector.get_input('data'), cluster.get_input('data'), pca.get_input('data'), fitter.get_input('data'), signal_writer.input ]) director.connect(mad_estimator.output, [ peak_detector.get_input('mads'), cluster.get_input('mads'), mad_writer.input ]) director.connect(peak_detector.get_output('peaks'), [ pca.get_input('peaks'), cluster.get_input('peaks'), fitter.get_input('peaks'), peak_writer.input ]) director.connect(pca.get_output('pcs'), cluster.get_input('pcs')) director.connect(cluster.get_output('templates'), updater.get_input('templates')) director.connect(updater.get_output('updater'), fitter.get_input('updater')) director.connect(fitter.output, spike_writer.input) # Launch the network. director.start() director.join() director.destroy() # TODO remove the following lines (i.e. analysis)? # # TODO load data_filtered.raw? # # TODO load mads.h5? # templates = circusort.io.load_templates(updater_kwargs['data_path']) # nb_templates = len(templates) # # TODO save initial_template.h5? # # TODO load initial_template.h5? # spikes = circusort.io.load_spikes(spike_writer_kwargs['data_path'], nb_units=nb_templates) # sorted_cells = spikes.to_units() # sorted_cells.save(sorting_directory) # # generated_cells = circusort.io.load_cells(generation_directory) # # matching = circusort.utils.find_matching(sorted_cells, generated_cells, t_min=220.0, t_max=300.0) return
def main(): # Parse command line. parser = argparse.ArgumentParser() parser.add_argument('--generation', dest='pending_generation', action='store_true', default=None) parser.add_argument('--sorting', dest='pending_sorting', action='store_true', default=None) args = parser.parse_args() if args.pending_generation is None and args.pending_sorting is None: args.pending_generation = True args.pending_sorting = True else: args.pending_generation = args.pending_generation is True args.pending_sorting = args.pending_sorting is True # Define the working directory. directory = os.path.join("~", ".spyking-circus-ort", "benchmarks", data_path) directory = os.path.expanduser(directory) if not os.path.isdir(directory): os.makedirs(directory) configuration_directory = os.path.join(directory, "configuration") if not os.path.isdir(configuration_directory): os.makedirs(configuration_directory) # TODO remove the following commented lines. # # Define probe path. # probe_path = os.path.join(configuration_directory, "probe.prb") # # Generate probe. # probe = circusort.io.generate_probe(mode='mea', nb_rows=nb_rows, nb_columns=nb_columns) # # Save probe. # probe.save(probe_path) # Define cells directory. cells_directory = os.path.join(configuration_directory, "cells") # Generate configuration. kwargs = { 'general': { 'duration': duration, }, 'probe': { 'mode': 'mea', 'nb_rows': nb_rows, 'nb_columns': nb_columns, 'radius': radius }, 'cells': { 'mode': "default", 'nb_cells': nb_cells, 'path': cells_directory, } } configuration = circusort.io.generate_configuration(**kwargs) # Save configuration. configuration.save(configuration_directory) # Create cells directory. os.makedirs(cells_directory) # cells_parameters = circusort.io.generate_cells_parameters() # TODO enable this line. # cells_parameters.save(cells_directory) # TODO enable this line. # For each cell... for k in range(0, nb_cells): # Define cell directory. cell_directory = os.path.join(cells_directory, str(k)) cell_parameters = [ ('train', [ ('rate', "2 + 5.0*(t > %g)" % ((k + 0.5) * (2 * duration / 3.) / float(nb_cells))), ]), ('position', []), # TODO be able to remove this line. ('template', []), # TODO be able to remove this line. ] cell_parameters = circusort.obj.Parameters(cell_parameters) cell_parameters.save(cell_directory) # Define directories. generation_directory = os.path.join(directory, "generation") sorting_directory = os.path.join(directory, "sorting") # Generate data (if necessary). if args.pending_generation: circusort.net.pregenerator( configuration_directory=configuration_directory, generation_directory=generation_directory) # Sort data (if necessary). if args.pending_sorting: # Load generation parameters. parameters = circusort.io.get_data_parameters(generation_directory) introspect_path = os.path.join(directory, 'introspection') # Define parameters. host = '127.0.0.1' # i.e. run the test locally dtype = parameters['general']['dtype'] nb_channels = parameters['probe']['nb_channels'] nb_samples = parameters['general']['buffer_width'] sampling_rate = parameters['general']['sampling_rate'] threshold_factor = 7.0 probe_path = os.path.join(generation_directory, "probe.prb") probe = circusort.io.load_probe(probe_path) precomputed_template_paths = [ os.path.join(e, 'template.h5') for e in list_cells(os.path.join(generation_directory, 'cells')) ] # Create sorting directory (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) # Define keyword arguments. reader_kwargs = { 'name': "reader", 'data_path': os.path.join(generation_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'nb_replay': nb_replay } filter_kwargs = { 'name': "filter", 'cut_off': 0.1, # Hz } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, } detector_kwargs = { 'name': "detector", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, } pca_kwargs = { 'name': "pca", 'nb_waveforms': 10000, } cluster_kwargs = { 'name': "cluster", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, 'nb_waveforms': nb_waveforms_clustering, 'probe_path': probe_path, 'two_components': False, 'log_level': INFO, 'debug_plots': os.path.join(directory, 'clustering_plots') } if preload_templates: cluster_kwargs['channels'] = [] updater_kwargs = { 'name': "updater", 'probe_path': probe_path, 'templates_path': os.path.join(sorting_directory, "templates.h5"), 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, 'log_level': DEBUG, 'overlaps_path': os.path.join(sorting_directory, "overlaps.pck") } if preload_templates: updater_kwargs[ 'precomputed_template_paths'] = precomputed_template_paths fitter_kwargs = { 'name': "fitter", 'degree': nb_fitters, 'sampling_rate': sampling_rate, 'log_level': DEBUG, 'introspection_path': introspect_path, 'discarding_eoc_from_updater': True, } # if preload_templates: # fitter_kwargs['templates_init_path'] = os.path.join(sorting_directory, "templates.h5") # fitter_kwargs['overlaps_init_path'] = os.path.join(sorting_directory, "overlaps.pck") writer_kwargs = { 'name': "writer", 'data_path': os.path.join(sorting_directory, "spikes.h5"), 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, } # Define the elements of the network. director = circusort.create_director(host=host) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) filter_ = manager.create_block('filter', **filter_kwargs) mad = manager.create_block('mad_estimator', **mad_kwargs) detector = manager.create_block('peak_detector', **detector_kwargs) pca = manager.create_block('pca', **pca_kwargs) cluster = manager.create_block('density_clustering', **cluster_kwargs) updater = manager.create_block('template_updater', **updater_kwargs) fitter = manager.create_network('fitter', **fitter_kwargs) cluster = manager.create_network('cluster', **cluster_kwargs) writer = manager.create_block('spike_writer', **writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [filter_.input]) director.connect(filter_.output, [ mad.input, detector.get_input('data'), pca.get_input('data'), cluster.get_input('data'), fitter.get_input('data'), ]) director.connect(mad.output, [ detector.get_input('mads'), cluster.get_input('mads'), ]) director.connect(detector.get_output('peaks'), [ pca.get_input('peaks'), cluster.get_input('peaks'), fitter.get_input('peaks'), ]) director.connect(pca.get_output('pcs'), [ cluster.get_input('pcs'), ]) director.connect(cluster.get_output('templates'), [ updater.get_input('templates'), ]) director.connect(updater.get_output('updater'), [ fitter.get_input('updater'), ]) director.connect_network(fitter) director.connect(fitter.get_output('spikes'), [ writer.input, ]) # Launch the network. director.start() director.join() director.destroy() return
def sorting(nb_waveforms_clustering=1000): """Create the sorting network.""" # Define directories. recording_directory = os.path.join(directory, "recording") sorting_directory = os.path.join(directory, "sorting") # introspection_directory = os.path.join(directory, "introspection") log_directory = os.path.join(directory, "log") output_directory = os.path.join(directory, "output") debug_directory = os.path.join(directory, "debug") # Define parameters. dtype = 'uint16' nb_samples = 1024 sampling_rate = 20e+3 threshold_factor = 7.0 alignment = True spike_width = 5.0 # ms spike_jitter = 1.0 # ms spike_sigma = 2.75 # µV probe_path = os.path.join(recording_directory, "probe.prb") nb_fitters = 2 # Create directories (if necessary). if not os.path.isdir(sorting_directory): os.makedirs(sorting_directory) # if not os.path.isdir(introspection_directory): # os.makedirs(introspection_directory) if not os.path.isdir(log_directory): os.makedirs(log_directory) if not os.path.isdir(output_directory): os.makedirs(output_directory) # Define keyword arguments. director_kwargs = { 'log_path': os.path.join(log_directory, "log.txt"), 'log_level': INFO, } reader_kwargs = { 'name': "reader", 'data_path': os.path.join(recording_directory, "data.raw"), 'dtype': dtype, 'nb_channels': nb_channels, 'nb_samples': nb_samples, 'sampling_rate': sampling_rate, 'is_realistic': True, 'speed_factor': 1.0, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } filter_kwargs = { 'name': "filter", 'cut_off': 500.0, # Hz 'order': 1, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } # writer_kwargs = { # 'name': "writer", # 'data_path': os.path.join(recording_directory, "filtered_data.raw"), # # 'introspection_path': introspection_directory, # 'log_level': DEBUG, # } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } detector_kwargs = { 'name': "detector", 'threshold_factor': threshold_factor, 'sampling_rate': sampling_rate, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } peak_writer_kwargs = { 'name': "peak_writer", 'data_path': os.path.join(sorting_directory, "peaks.h5"), # 'introspection_path': introspection_directory, 'log_level': DEBUG, } pca_kwargs = { 'name': "pca", 'spike_width': spike_width, 'spike_jitter': spike_jitter, 'spike_sigma': spike_sigma, 'alignment': alignment, 'nb_waveforms': 1000, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } cluster_kwargs = { 'name': "cluster", 'threshold_factor': threshold_factor, 'alignment': alignment, 'sampling_rate': sampling_rate, 'spike_width': spike_width, 'spike_jitter': spike_jitter, 'spike_sigma': spike_sigma, 'nb_waveforms': nb_waveforms_clustering, 'nb_waveforms_tracking': 100000, # i.e. block tracking 'probe_path': probe_path, 'two_components': False, 'local_merges': 3, 'debug_plots': debug_directory, # 'debug_ground_truth_templates': precomputed_template_paths, # 'introspection_path': introspection_directory, 'log_level': INFO, } # cluster_writer_kwargs = { # 'name': "cluster_writer", # 'output_directory': sorting_directory, # # 'introspection_path': introspection_directory, # 'log_level': DEBUG, # } updater_bis_kwargs = { 'name': "updater_bis", 'probe_path': probe_path, 'templates_path': os.path.join(sorting_directory, "templates.h5"), 'overlaps_path': os.path.join(sorting_directory, "overlaps.p"), # 'precomputed_template_paths': precomputed_template_paths if with_precomputed_templates else None, 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } # updater_writer_kwargs = { # 'name': "updater_writer", # 'output_directory': sorting_directory, # # 'introspection_path': introspection_directory, # 'log_level': DEBUG, # } fitter_bis_kwargs = { 'name': "fitter_bis", 'degree': nb_fitters, # 'templates_init_path': os.path.join(sorting_directory, "templates.h5"), # 'overlaps_init_path': os.path.join(sorting_directory, "overlaps.p"), 'sampling_rate': sampling_rate, 'discarding_eoc_from_updater': True, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } spike_writer_kwargs = { 'name': "spike_writer", 'data_path': os.path.join(sorting_directory, "spikes.h5"), 'sampling_rate': sampling_rate, 'nb_samples': nb_samples, # 'introspection_path': introspection_directory, 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=hosts['master'], **director_kwargs) managers = { key: director.create_manager(host=hosts[key]) for key in hosts_keys } reader = managers[managers_keys['reader']].create_block( 'reader', **reader_kwargs) filter_ = managers[managers_keys['filter']].create_block( 'filter', **filter_kwargs) # writer = managers[managers_keys['writer']].create_block('writer', **writer_kwargs) mad = managers[managers_keys['mad']].create_block('mad_estimator', **mad_kwargs) detector = managers[managers_keys['detector']].create_block( 'peak_detector', **detector_kwargs) peak_writer = managers[managers_keys['peak_writer']].create_block( 'peak_writer', **peak_writer_kwargs) pca = managers[managers_keys['pca']].create_block('pca', **pca_kwargs) cluster = managers[managers_keys['cluster']].create_block( 'density_clustering', **cluster_kwargs) # cluster_writer = managers[managers_keys['cluster_writer']].create_block('cluster_writer', **cluster_writer_kwargs) updater = managers[managers_keys['updater']].create_block( 'template_updater_bis', **updater_bis_kwargs) # updater_writer = managers[managers_keys['updater_writer']].create_block('updater_writer', **updater_writer_kwargs) fitter = managers[managers_keys['fitter']].create_network( 'fitter_bis', **fitter_bis_kwargs) spike_writer = managers[managers_keys['spike_writer']].create_block( 'spike_writer', **spike_writer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.output, [ filter_.get_input('data'), ]) director.connect( filter_.output, [ # writer.get_input('data'), mad.get_input('data'), detector.get_input('data'), pca.get_input('data'), cluster.get_input('data'), fitter.get_input('data'), ]) director.connect(mad.output, [ detector.get_input('mads'), cluster.get_input('mads'), ]) director.connect(detector.get_output('peaks'), [ peak_writer.get_input('peaks'), pca.get_input('peaks'), cluster.get_input('peaks'), fitter.get_input('peaks'), ]) director.connect(pca.get_output('pcs'), [ cluster.get_input('pcs'), ]) director.connect( cluster.get_output('templates'), [ # cluster_writer.get_input('templates'), updater.get_input('templates'), ]) director.connect( updater.get_output('updater'), [ # updater_writer.get_input('updater'), fitter.get_input('updater'), ]) director.connect_network(fitter) director.connect(fitter.get_output('spikes'), [ spike_writer.get_input('spikes'), ]) # Launch the network. director.start() director.join() director.destroy()
def main(): # Define the working directory. if not os.path.isdir(directory): os.makedirs(directory) # Parse command line. parser = argparse.ArgumentParser() parser.add_argument('--data-path', dest='data_path', required=True) parser.add_argument('--probe-path', dest='probe_path', required=True) args = parser.parse_args() # Define log directory. log_directory = os.path.join(directory, "log") # Clean log directory (if necessary). if os.path.isdir(log_directory): shutil.rmtree(log_directory) os.makedirs(log_directory) # TODO remove the 2 following lines. # # Load generation parameters. # params = circusort.io.get_data_parameters(generation_directory) # Define parameters. host = '127.0.0.1' # Define keyword arguments. director_kwargs = {'log_path': os.path.join(log_directory, "log.txt")} reader_kwargs = { 'name': "reader", 'data_path': args.data_path, 'dtype': 'int16', # 'nb_channels': 30, # TODO remove? 'nb_channels': 256, # TODO auto? 'nb_samples': 2000, 'sampling_rate': 20e+3, 'is_realistic': True, 'log_level': DEBUG, } filter_kwargs = { 'name': "filter", 'cut_off': 20.0, # Hz 'log_level': DEBUG, } mad_kwargs = { 'name': "mad", 'time_constant': 10.0, 'log_level': DEBUG, } detector_kwargs = { 'name': "detector", 'threshold_factor': 7.0, 'sampling_rate': 20e+3, 'log_level': DEBUG, } peak_displayer_kwargs = { 'name': "displayer", 'probe_path': args.probe_path, 'log_level': DEBUG, } # Define the elements of the network. director = circusort.create_director(host=host, **director_kwargs) manager = director.create_manager(host=host) reader = manager.create_block('reader', **reader_kwargs) filter_ = manager.create_block('filter', **filter_kwargs) mad = manager.create_block('mad_estimator', **mad_kwargs) detector = manager.create_block('peak_detector', **detector_kwargs) peak_displayer = manager.create_block('peak_displayer', **peak_displayer_kwargs) # Initialize the elements of the network. director.initialize() # Connect the elements of the network. director.connect(reader.get_output('data'), [ filter_.get_input('data'), ]) director.connect(filter_.get_output('data'), [ mad.get_input('data'), detector.get_input('data'), peak_displayer.get_input('data'), ]) director.connect(mad.get_output('mads'), [ detector.get_input('mads'), peak_displayer.get_input('mads'), ]) director.connect(detector.get_output('peaks'), [ peak_displayer.get_input('peaks'), ]) # Launch the network. director.start() director.join() director.destroy() return
# Test to measure the computational efficiency of two operations in one block # associated to one manager. import circusort import logging host = '127.0.0.1' # to run the test locally nb_channels = 10 director = circusort.create_director(host=host, log_level=logging.INFO) manager = director.create_manager(host=host, log_level=logging.INFO) reader = manager.create_block('reader', data_path='/tmp/input.dat', nb_channels=nb_channels) writer = manager.create_block('writer', data_path='/tmp/output.dat') manager.initialize() director.connect(reader.output, writer.input) manager.start() director.sleep(duration=1) import pylab, numpy x1 = numpy.memmap('/tmp/input.dat', dtype=numpy.float32, mode='r') x1 = x1.reshape(x1.size / nb_channels, nb_channels) x2 = numpy.memmap('/tmp/output.dat', dtype=numpy.float32, mode='r') x2 = x2.reshape(x2.size / nb_channels, nb_channels)