def test_parallel() -> None: proton = _make_proton(parallel=True) experiment = Engine(**proton) log.debug(pf(experiment.state.get_config(True))) experiment.update(10.0) log.debug(pf(experiment.state.get_config(True))) log.debug(pf(experiment.state.divide_value())) experiment.end()
def test_profiler() -> None: engine = Engine( processes={ 'processA': ProcessA(), 'processB': ProcessB(), }, topology={ 'processA': {}, 'processB': {}, }, profile=True, ) engine.update(3) engine.end() assert engine.stats is not None stats = engine.stats.strip_dirs() process_a_runtime = stats.stats[ # type: ignore ('test_profiler.py', 17, 'next_update')][3] process_b_runtime = stats.stats[ # type: ignore ('test_profiler.py', 32, 'next_update')][3] assert 0.6 <= process_a_runtime <= 0.7 assert 0.3 <= process_b_runtime <= 0.4
def simulate_experiment(experiment: Engine, settings: Optional[Dict[str, Any]] = None) -> Dict: """Simulate an :term:`Engine`. Args: experiment: a configured experiment Returns: A timeseries of variables from all ports. If ``return_raw_data`` is True, return the raw data instead. """ settings = settings or {} total_time = settings.get('total_time', 10) return_raw_data = settings.get('return_raw_data', False) # run simulation experiment.update(total_time) experiment.end() # return data from emitter if return_raw_data: return experiment.emitter.get_data() return experiment.emitter.get_timeseries()
class ModelProfiler: """Profile Bioscrape-COBRA composites""" # model complexity n_agents = 1 experiment_time = DEFAULT_EXPERIMENT_TIME parallel = False reuse_processes = False stochastic = False division = False spatial = False emit_step = 1 # initialize composite = None experiment = None initial_state = None def set_parameters( self, n_agents=None, experiment_time=None, parallel=None, reuse_processes=None, emit_step=None, stochastic=None, division=None, spatial=None, ): self.n_agents = \ n_agents if n_agents is not None else self.n_agents self.experiment_time = \ experiment_time or self.experiment_time self.parallel = \ parallel or self.parallel self.reuse_processes = \ reuse_processes or self.reuse_processes self.emit_step = \ emit_step or self.emit_step self.stochastic = \ stochastic or self.stochastic self.division = \ division or self.division self.spatial = \ spatial or self.spatial def _generate_composite(self, **kwargs): initial_agent_states = [{ 'rates': { 'k_leak': 0.005 # less leak -> less spontanteous expression } }] self.composite, _, self.initial_state = get_bioscrape_cobra_composite( n_agents=self.n_agents, initial_agent_states=initial_agent_states, stochastic=self.stochastic, division=self.division, spatial=self.spatial, initial_glucose=1e1, initial_lactose=5e1, depth=0.5, diffusion_rate=2e-2, jitter_force=1e-5, bounds=[30, 30], n_bins=[30, 30], sbml_file=STOCHASTIC_FILE if self.stochastic else DETERMINISTIC_FILE, parallel=self.parallel, reuse_processes=self.reuse_processes, ) def _initialize_experiment(self, **kwargs): self.experiment = Engine(processes=self.composite['processes'], topology=self.composite['topology'], initial_state=self.initial_state, **kwargs) def _run_experiment(self, **kwargs): self.experiment.update(kwargs['experiment_time']) self.experiment.end() def _get_emitter_data(self, **kwargs): _ = kwargs data = self.experiment.emitter.get_data() return data def _get_emitter_timeseries(self, **kwargs): _ = kwargs timeseries = self.experiment.emitter.get_timeseries() return timeseries def _profile_method(self, method, **kwargs): """The main profiling method and of the simulation steps Args method: the simulation step. For example self._run_experiment """ profiler = cProfile.Profile() profiler.enable() method(**kwargs) profiler.disable() stats = pstats.Stats(profiler) return stats def run_profile(self): print('GENERATE COMPOSITE') self._profile_method(self._generate_composite) print('INITIALIZE EXPERIMENT') self._profile_method(self._initialize_experiment) print('RUN EXPERIMENT') self._profile_method(self._run_experiment, experiment_time=self.experiment_time) print('GET EMITTER DATA') self._profile_method(self._get_emitter_data) def profile_communication_latency(self): self._generate_composite() self._initialize_experiment(display_info=False) # profile the experiment stats = self._profile_method( self._run_experiment, experiment_time=self.experiment_time, ) # get next_update runtime next_update_amount = ("next_update", ) _, stats_list = stats.get_print_list(next_update_amount) process_update_time = 0 for s in stats_list: process_update_time += stats.stats[s][3] # get total runtime experiment_time = stats.total_tt store_update_time = experiment_time - process_update_time # print_stats = stats.strip_dirs().sort_stats(-1).print_stats() # looping_stats = stats.sort_stats(SortKey.TIME).print_stats(20) return process_update_time, store_update_time
def test_hyperdivision(profile: bool = True) -> None: total_time = 10 n_agents = 100 division_thresholds = [3, 4, 5, 6, 7] # what values of x triggers division? # initialize agent composer agent_composer = ToyDivider() # make the composite composite = Composite() agent_ids = [str(agent_idx) for agent_idx in range(n_agents)] for agent_id in agent_ids: divider_config = { 'divider': { 'x_division_threshold': random.choice(division_thresholds), } } agent_composite = agent_composer.generate(config={ 'agent_id': agent_id, **divider_config, }, path=('agents', agent_id)) composite.merge(agent_composite) # add an environment environment_process: Processes = {'environment': ToyEnvironment()} environment_topology: Topology = { 'environment': { 'agents': { '_path': ('agents', ), '*': { 'external': ('external', 'GLC') } }, } } # combine the environment and agent composite.merge( processes=environment_process, topology=environment_topology, ) # make the sim, run the sim, retrieve the data experiment = Engine( processes=composite.processes, steps=composite.steps, flow=composite.flow, topology=composite.topology, profile=profile, ) experiment.update(total_time) experiment.end() data = experiment.emitter.get_data() print(f"n agents initial: {n_agents}") print(f"n agents final: {len(data[total_time]['agents'].keys())}") assert len(data[total_time]['agents'].keys()) > n_agents if profile: stats = experiment.stats stats.strip_dirs().sort_stats( # type: ignore 'cumulative', 'cumtime').print_stats(20) # make sure view_values is fast stats_view_values = stats.get_print_list( # type: ignore ('view_values', ))[1] view_values_times = stats.stats[ # type: ignore stats_view_values[0]][3] total_runtime = stats.total_tt # type: ignore assert view_values_times < 0.1 * total_runtime
class ComplexModelSim: """Profile Complex Models This class lets you initialize and profile the simulation of composite models with arbitrary numbers of processes, variables per process, and total stores. """ # model complexity number_of_processes = DEFAULT_N_PROCESSES number_of_variables = DEFAULT_N_VARIABLES process_sleep = DEFAULT_PROCESS_SLEEP number_of_parallel_processes = 0 number_of_stores = 10 number_of_ports = 1 hierarchy_depth = 1 experiment_time = DEFAULT_EXPERIMENT_TIME # display print_top_stats = 4 # initialize composite = None experiment = None def set_parameters( self, number_of_processes=None, number_of_parallel_processes=None, number_of_stores=None, number_of_ports=None, number_of_variables=None, hierarchy_depth=None, process_sleep=None, print_top_stats=None, experiment_time=None, ): self.number_of_processes = \ number_of_processes or self.number_of_processes self.number_of_parallel_processes = \ number_of_parallel_processes or self.number_of_parallel_processes self.number_of_ports = \ number_of_ports or self.number_of_ports self.number_of_variables = \ number_of_variables or self.number_of_variables self.number_of_stores = \ number_of_stores or self.number_of_stores self.hierarchy_depth = \ hierarchy_depth or self.hierarchy_depth self.process_sleep = \ process_sleep or self.process_sleep self.print_top_stats = \ print_top_stats or self.print_top_stats self.experiment_time = \ experiment_time or self.experiment_time def _generate_composite(self, **kwargs): number_of_processes = kwargs.get('number_of_processes', self.number_of_processes) number_of_parallel_processes = kwargs.get( 'number_of_parallel_processes', self.number_of_parallel_processes) number_of_stores = kwargs.get('number_of_stores', self.number_of_stores) number_of_ports = kwargs.get('number_of_ports', self.number_of_ports) number_of_variables = kwargs.get('number_of_variables', self.number_of_variables) hierarchy_depth = kwargs.get('hierarchy_depth', self.hierarchy_depth) process_sleep = kwargs.get('process_sleep', self.process_sleep) composer = ManyVariablesComposite({ 'number_of_processes': number_of_processes, 'number_of_parallel_processes': number_of_parallel_processes, 'number_of_stores': number_of_stores, 'number_of_ports': number_of_ports, 'number_of_variables': number_of_variables, 'hierarchy_depth': hierarchy_depth, 'process_sleep': process_sleep, }) self.composite = composer.generate(**kwargs) def _initialize_experiment(self, **kwargs): self.experiment = Engine(processes=self.composite['processes'], topology=self.composite['topology'], **kwargs) def _run_experiment(self, **kwargs): self.experiment.update(kwargs['experiment_time']) self.experiment.end() def _get_emitter_data(self, **kwargs): _ = kwargs data = self.experiment.emitter.get_data() return data def _get_emitter_timeseries(self, **kwargs): _ = kwargs timeseries = self.experiment.emitter.get_timeseries() return timeseries def _profile_method(self, method, **kwargs): """The main profiling method and of the simulation steps Args method: the simulation step. For example self._run_experiment """ print_top_stats = kwargs.get('print_top_stats', self.print_top_stats) profiler = cProfile.Profile() profiler.enable() method(**kwargs) profiler.disable() stats = pstats.Stats(profiler) if print_top_stats: stats.sort_stats('tottime').print_stats(print_top_stats) return stats def profile_communication_latency(self): self._generate_composite() self._initialize_experiment(display_info=False) # profile the experiment stats = self._profile_method(self._run_experiment, experiment_time=self.experiment_time, print_top_stats=None) # get next_update runtime next_update_amount = ("next_update", ) _, stats_list = stats.get_print_list(next_update_amount) process_update_time = 0 for s in stats_list: process_update_time += stats.stats[s][3] # get runtime experiment_time = stats.total_tt store_update_time = experiment_time - process_update_time return process_update_time, store_update_time
def simulate_bioscrape_cobra( division=False, stochastic=False, initial_glucose=1e1, initial_lactose=1e1, initial_agent_states=None, bounds=None, n_bins=None, depth=DEPTH, diffusion_rate=1e-1, jitter_force=1e-5, divide_threshold=2000 * units.fg, spatial=False, external_volume=None, n_agents=1, halt_threshold=100, total_time=100, sbml_file=None, emitter='timeseries', output_type=None, parallel=False, ): """ Main simulation function for BioscrapeCOBRA Args: * division (bool): sets whether the agents divides * stochastic (bool): load the stochastic lac operon model * initial_glucose (float): initial external glucose concentration * initial_lactose: (float): initial external initial_lactose concentration * initial_agent_states (dict): set initial state values * bounds (list): size of the environment [x, y] in microns * n_bins (list): number of bins in the [x, y] dimensions * depth (float): depth of the environment in microns * diffusion_rate (float): diffusion rate constant for all molecules, micron^s/sec. * divide_threshold (float): mass at which cells divide, in fg * spatial (bool): use spatial environment * external_volume (float): volume of external bin, if non-spatial environment * n_agents (int): number of initial agents in environment * halt_threshold (int): number of agents at which simulations will terminate * total_time (float): total simulation time, in seconds * sbml_file (str): the file for the Bioscrape process. Uses default if None. * emitter (str): type of emitter, 'timeseries' or 'database'. * output_type (str): 'timeseries' or 'unitless'. If None, return experiment instance * parallel (bool): run processes in parallel, useful for large compute machines * jitter_force (float): random force applied to cell bodies (in pN) """ biocobra_composite, initial_composite, initial_state_full = get_bioscrape_cobra_composite( division=division, stochastic=stochastic, initial_glucose=initial_glucose, initial_lactose=initial_lactose, initial_agent_states=initial_agent_states, bounds=bounds, n_bins=n_bins, depth=depth, diffusion_rate=diffusion_rate, jitter_force=jitter_force, divide_threshold=divide_threshold, spatial=spatial, external_volume=external_volume, n_agents=n_agents, sbml_file=sbml_file, parallel=parallel) # make the experiment experiment_id = (f"{'stochastic' if stochastic else 'deterministic'}" f"{'_division' if division else ''}" f"{'_spatial' if spatial else ''}" f"_{timestamp()}") experiment_config = { 'processes': biocobra_composite.processes, 'topology': biocobra_composite.topology, 'initial_state': initial_state_full, 'display_info': False, 'experiment_id': experiment_id, 'emit_step': max(BIOSCRAPE_TIMESTEP, COBRA_TIMESTEP), 'emitter': {'type': emitter}} print(f'Initializing experiment {experiment_id}') biocobra_experiment = Engine(**experiment_config) # run the experiment clock_start = clock.time() if division: # terminate upon reaching total_time or halt_threshold sim_step = max(BIOSCRAPE_TIMESTEP, COBRA_TIMESTEP) * 10 for _ in tqdm(range(0, total_time, sim_step)): n_agents = len(biocobra_experiment.state.get_value()['agents']) if n_agents < halt_threshold: biocobra_experiment.update(sim_step) else: biocobra_experiment.update(total_time) # print runtime and finalize clock_finish = clock.time() - clock_start print(f'Completed in {clock_finish:.2f} seconds') biocobra_experiment.end() # retrieve the data if output_type == 'timeseries': return biocobra_experiment.emitter.get_timeseries(), initial_composite if output_type == 'unitless': return biocobra_experiment.emitter.get_data_unitless(), initial_composite return biocobra_experiment, initial_composite