def patch_adaptor(adaptor, node_group, network): node_adaptor = NodeAdaptor.patch_adaptor(adaptor, node_group, network) # If dynamics params is stored in the nodes.h5 then we have to build each node separate if node_group.has_dynamics_params: node_adaptor.batch_process = False # If there is a non-null value in the model_processing column then it potentially means that every cell is # uniquly built (currently model_processing is applied to each individ. cell) and nodes can't be batched if 'model_processing' in node_group.columns: node_adaptor.batch_process = False elif 'model_processing' in node_group.all_columns and not all_null(node_group, 'model_processing'): node_adaptor.batch_process = False if node_adaptor.batch_process: io.log_info('Batch processing nodes for {}/{}.'.format(node_group.parent.name, node_group.group_id)) return node_adaptor
def run(self, tstop=None): if tstop is None: tstop = self._tstop for mod in self._mods: mod.initialize(self) io.barrier() io.log_info('Starting Simulation') n, res, data_res = self._get_block_trial(tstop) if n > 0: for r in moves.range(n): nest.Simulate(data_res) if res > 0: nest.Simulate(res * self.dt) if n < 0: nest.Simulate(tstop) io.barrier() io.log_info('Simulation finished, finalizing results.') for mod in self._mods: mod.finalize(self) io.barrier() io.log_info('Done.')
def run(self, duration=None): if duration is None: duration = self.duration for mod in self._mods: mod.initialize(self) io.barrier() io.log_info('Starting Simulation') n, res, data_res = self._get_block_trial(duration) if n > 0: for r in xrange(n): nest.Simulate(data_res) if res > 0: nest.Simulate(res * self.dt) if n < 0: nest.Simulate(duration) io.barrier() io.log_info('Simulation finished, finalizing results.') for mod in self._mods: mod.finalize(self) io.barrier() io.log_info('Done.')
def from_config(cls, configure, graph): # load the json file or object if isinstance(configure, string_types): config = Config.from_json(configure, validate=True) elif isinstance(configure, dict): config = configure else: raise Exception('Could not convert {} (type "{}") to json.'.format( configure, type(configure))) if 'run' not in config: raise Exception( 'Json file is missing "run" entry. Unable to build Bionetwork.' ) run_dict = config['run'] # Get network parameters # step time (dt) is set in the kernel and should be passed overwrite = run_dict[ 'overwrite_output_dir'] if 'overwrite_output_dir' in run_dict else True print_time = run_dict[ 'print_time'] if 'print_time' in run_dict else False dt = run_dict['dt'] # TODO: make sure dt exists network = cls(graph, dt=dt, overwrite=overwrite) if 'output_dir' in config['output']: network.output_dir = config['output']['output_dir'] if 'block_run' in run_dict and run_dict['block_run']: if 'block_size' not in run_dict: raise Exception( '"block_run" is set to True but "block_size" not found.') network._block_size = run_dict['block_size'] if 'duration' in run_dict: network.tstop = run_dict['duration'] elif 'tstop' in run_dict: network.tstop = run_dict['tstop'] if 'precise_times' in run_dict: network.set_spike_generator_params( precise_times=run_dict['precise_times']) if run_dict.get('allow_offgrid_spikes', False): network.set_spike_generator_params(allow_offgrid_spikes=True) # Create the output-directory, or delete existing files if it already exists graph.io.log_info('Setting up output directory') if not os.path.exists(config['output']['output_dir']): os.mkdir(config['output']['output_dir']) elif overwrite: for gfile in glob.glob( os.path.join(config['output']['output_dir'], '*.gdf')): os.remove(gfile) graph.io.log_info('Building cells.') graph.build_nodes() graph.io.log_info('Building recurrent connections') graph.build_recurrent_edges() for sim_input in inputs.from_config(config): node_set = graph.get_node_set(sim_input.node_set) if sim_input.input_type == 'spikes': io.log_info('Build virtual cell stimulations for {}'.format( sim_input.name)) path = sim_input.params['input_file'] spikes = SpikeTrains.load(path=path, file_type=sim_input.module, **sim_input.params) #spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module, # input_type=sim_input.input_type, params=sim_input.params) graph.add_spike_trains(spikes, node_set, network.get_spike_generator_params()) elif sim_input.input_type == 'current_clamp': # TODO: Need to make this more robust amp_times = sim_input.params.get('amplitude_times', []) amp_values = sim_input.params.get('amplitude_values', []) if 'delay' in sim_input.params: amp_times.append(sim_input.params['delay']) amp_values.append(sim_input.params['amp']) if 'duration' in sim_input.params: amp_times.append(sim_input.params['delay'] + sim_input.params['duration']) amp_values.append(0.0) network.add_step_currents(amp_times, amp_values, node_set, sim_input.name) else: graph.io.log_warning('Unknown input type {}'.format( sim_input.input_type)) sim_reports = reports.from_config(config) for report in sim_reports: if report.module == 'spikes_report': mod = mods.SpikesMod(**report.params) elif isinstance(report, reports.MembraneReport): # For convience and for compliance with SONATA format. "membrane_report" and "multimeter_report is the # same in pointnet. mod = mods.MultimeterMod(**report.params) else: graph.io.log_exception('Unknown report type {}'.format( report.module)) network.add_mod(mod) io.log_info('Network created.') return network
def from_config(cls, configure, graph): # load the json file or object if isinstance(configure, basestring): config = Config.from_json(configure, validate=True) elif isinstance(configure, dict): config = configure else: raise Exception('Could not convert {} (type "{}") to json.'.format( configure, type(configure))) if 'run' not in config: raise Exception( 'Json file is missing "run" entry. Unable to build Bionetwork.' ) run_dict = config['run'] # Get network parameters # step time (dt) is set in the kernel and should be passed overwrite = run_dict[ 'overwrite_output_dir'] if 'overwrite_output_dir' in run_dict else True print_time = run_dict[ 'print_time'] if 'print_time' in run_dict else False dt = run_dict['dt'] # TODO: make sure dt exists network = cls(graph, dt=dt, overwrite=overwrite) if 'output_dir' in config['output']: network.output_dir = config['output']['output_dir'] if 'block_run' in run_dict and run_dict['block_run']: if 'block_size' not in run_dict: raise Exception( '"block_run" is set to True but "block_size" not found.') network._block_size = run_dict['block_size'] if 'duration' in run_dict: network.duration = run_dict['duration'] elif 'tstop' in run_dict: network.duration = run_dict['tstop'] # Create the output-directory, or delete existing files if it already exists graph.io.log_info('Setting up output directory') if not os.path.exists(config['output']['output_dir']): os.mkdir(config['output']['output_dir']) elif overwrite: for gfile in glob.glob( os.path.join(config['output']['output_dir'], '*.gdf')): os.remove(gfile) graph.io.log_info('Building cells.') graph.build_nodes() graph.io.log_info('Building recurrent connections') graph.build_recurrent_edges() for sim_input in inputs.from_config(config): node_set = graph.get_node_set(sim_input.node_set) if sim_input.input_type == 'spikes': spikes = spike_trains.SpikesInput.load( name=sim_input.name, module=sim_input.module, input_type=sim_input.input_type, params=sim_input.params) io.log_info('Build virtual cell stimulations for {}'.format( sim_input.name)) graph.add_spike_trains(spikes, node_set) sim_reports = reports.from_config(config) for report in sim_reports: if report.module == 'spikes_report': mod = mods.SpikesMod(**report.params) else: graph.io.log_exception('Unknown report type {}'.format( report.module)) network.add_mod(mod) io.log_info('Network created.') return network