def test_run_inserted_store() -> None: """Make a store using the API, run it as a simulation""" store = Store({}) store["p1"] = ToyProcess({'name': 'p1'}) store["p2"] = ToyProcess({'name': 'p2'}) sim = Engine(store=store) sim.update(1.0)
def test_complex_topology() -> None: # make the experiment outer_path = ('universe', 'agent') pq = PoQo({}) pq_composite = pq.generate(path=outer_path) pq_composite.pop('_schema') experiment = Engine(composite=pq_composite) # get the initial state initial_state = experiment.state.get_value() print('time 0:') pp(initial_state) # simulate for 1 second experiment.update(1) next_state = experiment.state.get_value() print('time 1:') pp(next_state) # pull out the agent state initial_agent_state = initial_state['universe']['agent'] agent_state = next_state['universe']['agent'] assert agent_state['aaa']['a1'] == initial_agent_state['aaa']['a1'] + 1 assert agent_state['aaa']['x'] == initial_agent_state['aaa']['x'] - 9 assert agent_state['ccc']['a3'] == initial_agent_state['ccc']['a3'] + 1
def test_add_delete() -> None: process = AddDelete() topology = {'sub_stores': ('sub_stores', ), 'expected': ('expected', )} # initial state n_initial = 10 initial_substores = [ str(random.randint(0, 2**63)) for _ in range(n_initial) ] initial_state = { 'sub_stores': {sub_store: 1 for sub_store in initial_substores}, 'expected': initial_substores, } experiment = Engine( processes={'process': process}, topology={'process': topology}, initial_state=initial_state, ) experiment.update(10) # assert that no overlapping sub store between time steps. # All sub stores should get deleted, and new sub stores added data = experiment.emitter.get_data() times = list(data.keys()) n_times = len(times) for t_index in range(n_times): if t_index < n_times - 1: current_time = times[t_index] next_time = times[t_index + 1] current_ids = set(data[current_time]['sub_stores'].keys()) next_ids = set(data[next_time]['sub_stores'].keys()) assert len(set(current_ids).intersection(set(next_ids))) == 0
def test_bigraph_view() -> None: agent_id = '1' top_view_steps = {'top_view': TopView()} top_view_flow: Flow = {'top_view': []} top_view_topology: Topology = { 'top_view': { 'top': (), # connect to the top 'other': ('other', ), } } composite = get_toy_transport_in_env_composite(agent_id=agent_id) composite.merge(steps=top_view_steps, topology=top_view_topology, flow=top_view_flow) # run the simulation sim = Engine( composite=composite, initial_state={'agents': { agent_id: { 'external': { 'GLC': 10.0 } } }}) sim.update(20) data = sim.emitter.get_data() print(pf(data)) len(data[20.0]['agents'])
def test_run_rewired_store() -> None: """Make a store using the API, run it as a simulation""" store = Store({}) store["p1"] = ToyProcess({'name': 'p1'}) store["p2"] = ToyProcess({'name': 'p2'}) store["p1"].connect(('port1', ), store['p2', "port2"]) sim = Engine(store=store) sim.update(1.0)
def test_topology_ports() -> None: proton = _make_proton() experiment = Engine(**proton) log.debug(pf(experiment.state.get_config(True))) experiment.update(10.0) log.debug(pf(experiment.state.get_config(True))) log.debug(pf(experiment.state.divide_value()))
def test_multi_port_merge() -> None: # run experiment merge_port = MergePort({}) network = merge_port.generate() exp = Engine(**{ 'processes': network['processes'], 'topology': network['topology'] }) exp.update(2) output = exp.emitter.get_timeseries() expected_output = {'aaa': {'a': [0, 3, 6]}, 'time': [0.0, 1.0, 2.0]} assert output == expected_output
def test_emit_config() -> None: # test alternate emit options merge_port = MergePort({}) network = merge_port.generate() exp1 = Engine( processes=network['processes'], topology=network['topology'], emit_topology=False, emit_processes=True, emit_config=True, progress_bar=True, emit_step=2, ) exp1.update(10)
def run_large_initial_emit(): """ This experiment runs a large experiment to test the database emitter. This requires MongoDB to be configured and running. """ config = {'number_of_processes': 1000, 'number_of_parameters': 1000} composer = ManyParametersComposite(config) composite = composer.generate() settings = { 'experiment_name': 'large database experiment', 'experiment_id': f'large_{str(uuid.uuid4())}', 'emitter': 'database', } experiment = Engine( **{ 'processes': composite['processes'], 'topology': composite['topology'], **settings }) # run the experiment experiment.update(10) # retrieve the data with data_from_database experiment_id = experiment.experiment_id # retrieve the data from emitter data = experiment.emitter.get_data() assert list(data.keys()) == [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 ] # retrieve the data directly from database db = get_experiment_database() data, experiment_config = data_from_database(experiment_id, db) assert 'processes' in experiment_config assert 0.0 in data # delete the experiment delete_experiment_from_database(experiment_id)
def test_run_store_in_experiment() -> None: """put a store in an experiment and run it""" store = get_toy_store() # retrieve the processes and topology processes = store.get_processes() topology = store.get_topology() _ = processes # set to _ to pass lint test _ = topology # run the experiment with a topology experiment = Engine(store=store) experiment.update(10) data = experiment.emitter.get_data() assert experiment.processes['process1'] == store['process1'].value assert experiment.processes['process2'] == store['process2'].value assert data[10.0] != data[0.0] print(data)
def test_environment_view_with_division() -> None: composite = get_env_view_composite() experiment = Engine(processes=composite.processes, topology=composite.topology) experiment.update(10) data = experiment.emitter.get_data() # confirm that the environment sees the new agents. once_different = False for state in data.values(): agent_ids = set(state['agents'].keys()) env_agents = set(state['log_update'].get('agents', {}).keys()) if env_agents != agent_ids: if not once_different: once_different = True else: # the values have been different for more than one update ValueError( f'environment sees {env_agents} instead of {agent_ids}') else: once_different = False
def test_rewire_ports() -> None: """connect a process' ports to different store""" store = test_insert_process() # connect process1's port1 to the store at process3's port1 store = test_insert_process() store['process1'].connect('port1', store['process3']['port1']) assert store['process1']['port1'] == store['process3']['port1'] # connect process2's port2 to store store_A store = test_insert_process() store['process2'].connect('port2', store['store_A']) assert store['process2', 'port2', 'var_a'] == store['store_A', 'var_a'] # turn variable 'var_a' into 'var_b' store = test_insert_process() store['process2'].connect(['port2', 'var_a'], store['store_A', 'var_b']) # store['process2', 'port2', 'var_a'] = store['store_A', 'var_b'] assert store['process2', 'port2', 'var_a'] == store['store_A', 'var_b'] sim = Engine(store=store) sim.update(1.0)
def run_mother_machine(time=10, out_dir='out'): config = get_mother_machine_config() # configure the experiment agent_ids = config.get('agent_ids', ['0']) # get the environment composite environment = Lattice(config.get('environment', {})) composite = environment.generate({}) # add the agents growth_division = GrowDivide(config.get('growth_division', {})) for agent_id in agent_ids: agent = growth_division.generate({'agent_id': agent_id}) composite.merge(composite=agent, path=('agents', agent_id)) experiment = Engine( **{ 'processes': composite['processes'], 'topology': composite['topology'], 'initial_state': config.get('initial_state', {}), 'progress_bar': True, }) # simulate settings = {'total_time': time, 'return_raw_data': True} data = simulate_experiment(experiment, settings) # agents plot plot_settings = {'agents_key': 'agents'} plot_agents_multigen(data, plot_settings, out_dir) # snapshot plot agents, fields = format_snapshot_data(data) bounds = config['environment']['multibody']['bounds'] plot_snapshots(bounds, agents=agents, fields=fields, n_snapshots=4, out_dir=out_dir, filename=f"mother_machine") # make snapshot video make_video( data, bounds, plot_type='fields', step=100, out_dir=out_dir, filename=f"mother_machine", )
def simulate_experiment(experiment: Engine, settings: Optional[Dict[str, Any]] = None) -> Dict: """Simulate an :term:`Engine`. Args: experiment: a configured experiment Returns: A timeseries of variables from all ports. If ``return_raw_data`` is True, return the raw data instead. """ settings = settings or {} total_time = settings.get('total_time', 10) return_raw_data = settings.get('return_raw_data', False) # run simulation experiment.update(total_time) experiment.end() # return data from emitter if return_raw_data: return experiment.emitter.get_data() return experiment.emitter.get_timeseries()
def test_engine_run_for() -> None: total_time = 10.0 time_interval = 1.0 timestep1 = 0.75 timestep2 = 1.25 topo = { 'external': ('external', ), 'internal': ('internal', ), } composite = Composite({ 'processes': { 'process1': ToyTransport({'time_step': timestep1}), 'process2': ToyTransport({'time_step': timestep2}), }, 'topology': { 'process1': topo, 'process2': topo, } }) initial_state = { 'external': { 'GLC': 100 }, } sim = Engine(processes=composite.processes, topology=composite.topology, initial_state=initial_state) time = 0.0 while sim.global_time < total_time: sim.run_for(time_interval) time += time_interval assert sim.global_time == time # check that the front has advanced correctly front = sim.front for path, advance in front.items(): expected_time = 0.0 if path[0] == 'process1': expected_time = int(time / timestep1) * timestep1 elif path[0] == 'process2': expected_time = int(time / timestep2) * timestep2 assert advance['time'] == expected_time, \ f"front time {advance['time']} " \ f"is not expected {expected_time}" # make all the processes complete sim.complete() final_time = time assert sim.global_time == final_time front = sim.front for path, advance in front.items(): assert advance['time'] == sim.global_time, \ f"process at path {path} did not complete"
def test_parallel() -> None: proton = _make_proton(parallel=True) experiment = Engine(**proton) log.debug(pf(experiment.state.get_config(True))) experiment.update(10.0) log.debug(pf(experiment.state.get_config(True))) log.debug(pf(experiment.state.divide_value())) experiment.end()
def test_glob_schema() -> None: composite = get_toy_transport_in_env_composite() experiment = Engine(processes=composite.processes, topology=composite.topology) experiment.update(10) # declare processes in reverse order processes_reverse = { 'environment': ToyEnvironment(), 'agents': { '0': { 'transport': ToyTransport() } } } experiment_reverse = Engine(processes=processes_reverse, topology=composite.topology) experiment_reverse.update(10)
def composite_in_experiment( composite: Composite, settings: Dict[str, Any] = None, initial_state: Dict[str, Any] = None, ) -> Engine: """Put a Composite in an Engine Args: composite: the :term:`Composite` object. settings: a dictionary of options, including composite_config for configuring the composite. Additional keywords include timeline, environment, and outer_path. initial_state: initial state to overrides the defaults. Returns: an :term:`Engine`. """ settings = settings or {} initial_state = initial_state or {} processes = composite['processes'] topology = composite['topology'] timeline = settings.get('timeline', None) if timeline is not None: add_timeline(processes, topology, timeline) all_times = [t[0] for t in timeline['timeline']] settings['total_time'] = max(all_times) environment = settings.get('environment', None) if environment is not None: add_environment(processes, topology, environment) # initialize the experiment experiment_config = { 'processes': processes, 'topology': topology, 'initial_state': initial_state } for key, setting in settings.items(): if key in experiment_config_keys: experiment_config[key] = setting return Engine(**experiment_config)
def glucose_phosphorylation_experiment(config=None): if config is None: config = {} default_config = { 'injected_glc_phosphorylation': {}, 'emitter': { 'type': 'timeseries', }, 'initial_state': {}, } default_config.update(config) config = default_config compartment = InjectedGlcPhosphorylation( config['injected_glc_phosphorylation']) compartment_dict = compartment.generate() experiment = Engine( processes=compartment_dict['processes'], topology=compartment_dict['topology'], emitter=config['emitter'], initial_state=config['initial_state'], ) return experiment
def compose_experiment( hierarchy: Dict[str, Any], settings: Optional[Dict[str, Any]] = None, initial_state: Optional[Dict[str, Any]] = None, ) -> Engine: """Make an experiment with arbitrarily embedded compartments. Args: hierarchy: an embedded dictionary mapping the desired topology of nodes, with composers declared under a global COMPOSER_KEY that maps to a dictionary with 'type', 'config', and 'topology' for the processes in the Composer. Composers include lone processes. settings: experiment configuration settings. initial_state: is the initial_state. Returns: The experiment. """ settings = settings or {} initial_state = initial_state or {} # make the hierarchy composite = initialize_hierarchy(hierarchy) processes = composite['processes'] topology = composite['topology'] experiment_config = { 'processes': processes, 'topology': topology, 'initial_state': initial_state } for key, setting in settings.items(): if key in experiment_config_keys: experiment_config[key] = setting return Engine(**experiment_config)
def test_profiler() -> None: engine = Engine( processes={ 'processA': ProcessA(), 'processB': ProcessB(), }, topology={ 'processA': {}, 'processB': {}, }, profile=True, ) engine.update(3) engine.end() assert engine.stats is not None stats = engine.stats.strip_dirs() process_a_runtime = stats.stats[ # type: ignore ('test_profiler.py', 17, 'next_update')][3] process_b_runtime = stats.stats[ # type: ignore ('test_profiler.py', 32, 'next_update')][3] assert 0.6 <= process_a_runtime <= 0.7 assert 0.3 <= process_b_runtime <= 0.4
class ModelProfiler: """Profile Bioscrape-COBRA composites""" # model complexity n_agents = 1 experiment_time = DEFAULT_EXPERIMENT_TIME parallel = False reuse_processes = False stochastic = False division = False spatial = False emit_step = 1 # initialize composite = None experiment = None initial_state = None def set_parameters( self, n_agents=None, experiment_time=None, parallel=None, reuse_processes=None, emit_step=None, stochastic=None, division=None, spatial=None, ): self.n_agents = \ n_agents if n_agents is not None else self.n_agents self.experiment_time = \ experiment_time or self.experiment_time self.parallel = \ parallel or self.parallel self.reuse_processes = \ reuse_processes or self.reuse_processes self.emit_step = \ emit_step or self.emit_step self.stochastic = \ stochastic or self.stochastic self.division = \ division or self.division self.spatial = \ spatial or self.spatial def _generate_composite(self, **kwargs): initial_agent_states = [{ 'rates': { 'k_leak': 0.005 # less leak -> less spontanteous expression } }] self.composite, _, self.initial_state = get_bioscrape_cobra_composite( n_agents=self.n_agents, initial_agent_states=initial_agent_states, stochastic=self.stochastic, division=self.division, spatial=self.spatial, initial_glucose=1e1, initial_lactose=5e1, depth=0.5, diffusion_rate=2e-2, jitter_force=1e-5, bounds=[30, 30], n_bins=[30, 30], sbml_file=STOCHASTIC_FILE if self.stochastic else DETERMINISTIC_FILE, parallel=self.parallel, reuse_processes=self.reuse_processes, ) def _initialize_experiment(self, **kwargs): self.experiment = Engine(processes=self.composite['processes'], topology=self.composite['topology'], initial_state=self.initial_state, **kwargs) def _run_experiment(self, **kwargs): self.experiment.update(kwargs['experiment_time']) self.experiment.end() def _get_emitter_data(self, **kwargs): _ = kwargs data = self.experiment.emitter.get_data() return data def _get_emitter_timeseries(self, **kwargs): _ = kwargs timeseries = self.experiment.emitter.get_timeseries() return timeseries def _profile_method(self, method, **kwargs): """The main profiling method and of the simulation steps Args method: the simulation step. For example self._run_experiment """ profiler = cProfile.Profile() profiler.enable() method(**kwargs) profiler.disable() stats = pstats.Stats(profiler) return stats def run_profile(self): print('GENERATE COMPOSITE') self._profile_method(self._generate_composite) print('INITIALIZE EXPERIMENT') self._profile_method(self._initialize_experiment) print('RUN EXPERIMENT') self._profile_method(self._run_experiment, experiment_time=self.experiment_time) print('GET EMITTER DATA') self._profile_method(self._get_emitter_data) def profile_communication_latency(self): self._generate_composite() self._initialize_experiment(display_info=False) # profile the experiment stats = self._profile_method( self._run_experiment, experiment_time=self.experiment_time, ) # get next_update runtime next_update_amount = ("next_update", ) _, stats_list = stats.get_print_list(next_update_amount) process_update_time = 0 for s in stats_list: process_update_time += stats.stats[s][3] # get total runtime experiment_time = stats.total_tt store_update_time = experiment_time - process_update_time # print_stats = stats.strip_dirs().sort_stats(-1).print_stats() # looping_stats = stats.sort_stats(SortKey.TIME).print_stats(20) return process_update_time, store_update_time
def _initialize_experiment(self, **kwargs): self.experiment = Engine(processes=self.composite['processes'], topology=self.composite['topology'], initial_state=self.initial_state, **kwargs)
def emit_control() -> None: run_time = 5 # get the composer composer = PoQo({}) # turn on emits composite = composer.generate() exp = Engine(composite=composite, store_emit={'on': [()]}) exp.update(run_time) data = exp.emitter.get_data() assert data[run_time]['bbb'] != {}, 'this emit should be on' print(pf(data)) # turn off emits composite = composer.generate() exp = Engine(composite=composite, store_emit={'off': [()]}) exp.update(run_time) data = exp.emitter.get_data() assert data[run_time]['bbb'] == {}, 'this emit should be off' print(pf(data)) # selectively turn on emits composite = composer.generate() exp = Engine(composite=composite, store_emit={ 'on': [( 'bbb', 'e2', )], }) exp.update(run_time) data = exp.emitter.get_data() assert data[run_time]['bbb']['e2'] != {}, 'this emit should be on' assert data[run_time]['ccc'] == {}, 'this emit should be off' print(pf(data)) # test store_emit with None composite = composer.generate() exp = Engine(composite=composite, store_emit={ 'on': None, }) exp.update(run_time)
def test_hyperdivision(profile: bool = True) -> None: total_time = 10 n_agents = 100 division_thresholds = [3, 4, 5, 6, 7] # what values of x triggers division? # initialize agent composer agent_composer = ToyDivider() # make the composite composite = Composite() agent_ids = [str(agent_idx) for agent_idx in range(n_agents)] for agent_id in agent_ids: divider_config = { 'divider': { 'x_division_threshold': random.choice(division_thresholds), } } agent_composite = agent_composer.generate(config={ 'agent_id': agent_id, **divider_config, }, path=('agents', agent_id)) composite.merge(agent_composite) # add an environment environment_process: Processes = {'environment': ToyEnvironment()} environment_topology: Topology = { 'environment': { 'agents': { '_path': ('agents', ), '*': { 'external': ('external', 'GLC') } }, } } # combine the environment and agent composite.merge( processes=environment_process, topology=environment_topology, ) # make the sim, run the sim, retrieve the data experiment = Engine( processes=composite.processes, steps=composite.steps, flow=composite.flow, topology=composite.topology, profile=profile, ) experiment.update(total_time) experiment.end() data = experiment.emitter.get_data() print(f"n agents initial: {n_agents}") print(f"n agents final: {len(data[total_time]['agents'].keys())}") assert len(data[total_time]['agents'].keys()) > n_agents if profile: stats = experiment.stats stats.strip_dirs().sort_stats( # type: ignore 'cumulative', 'cumtime').print_stats(20) # make sure view_values is fast stats_view_values = stats.get_print_list( # type: ignore ('view_values', ))[1] view_values_times = stats.stats[ # type: ignore stats_view_values[0]][3] total_runtime = stats.total_tt # type: ignore assert view_values_times < 0.1 * total_runtime
def test_timescales() -> None: class Slow(Process): name = 'slow' defaults = {'timestep': 3.0} def __init__(self, config: Optional[dict] = None) -> None: super().__init__(config) def ports_schema(self) -> Schema: return {'state': {'base': {'_default': 1.0}}} def next_update(self, timestep: Union[float, int], states: State) -> Update: base = states['state']['base'] next_base = timestep * base * 0.1 return {'state': {'base': next_base}} class Fast(Process): name = 'fast' defaults = {'timestep': 0.3} def __init__(self, config: Optional[dict] = None) -> None: super().__init__(config) def ports_schema(self) -> Schema: return { 'state': { 'base': { '_default': 1.0 }, 'motion': { '_default': 0.0 } } } def next_update(self, timestep: Union[float, int], states: State) -> Update: base = states['state']['base'] motion = timestep * base * 0.001 return {'state': {'motion': motion}} processes = {'slow': Slow(), 'fast': Fast()} states = {'state': {'base': 1.0, 'motion': 0.0}} topology: Topology = { 'slow': { 'state': ('state', ) }, 'fast': { 'state': ('state', ) } } emitter = {'type': 'null'} experiment = Engine(processes=processes, topology=topology, emitter=emitter, initial_state=states) experiment.update(10.0)
def test_2_store_1_port() -> None: """ Split one port of a processes into two stores """ class OnePort(Process): name = 'one_port' def ports_schema(self) -> Schema: return { 'A': { 'a': { '_default': 0, '_emit': True }, 'b': { '_default': 0, '_emit': True } } } def next_update(self, timestep: Union[float, int], states: State) -> Update: return {'A': {'a': 1, 'b': 2}} class SplitPort(Composer): """splits OnePort's ports into two stores""" name = 'split_port_composer' def generate_processes(self, config: Optional[dict]) -> Dict[str, Any]: return {'one_port': OnePort({})} def generate_topology(self, config: Optional[dict]) -> Topology: return { 'one_port': { 'A': { 'a': ( 'internal', 'a', ), 'b': ( 'external', 'a', ) } } } # run experiment split_port = SplitPort({}) network = split_port.generate() exp = Engine(**{ 'processes': network['processes'], 'topology': network['topology'] }) exp.update(2) output = exp.emitter.get_timeseries() expected_output = { 'external': { 'a': [0, 2, 4] }, 'internal': { 'a': [0, 1, 2] }, 'time': [0.0, 1.0, 2.0] } assert output == expected_output
def test_units() -> None: class UnitsMicrometer(Process): name = 'units_micrometer' def ports_schema(self) -> Schema: return { 'A': { 'a': { '_default': 0 * units.um, '_emit': True }, 'b': { '_default': 'string b', '_emit': True, }, 'c': { '_default': 0, '_emit': True, } } } def next_update(self, timestep: Union[float, int], states: State) -> Update: return { 'A': { 'a': 1 * units.um, 'c': 1, } } class UnitsMillimeter(Process): name = 'units_millimeter' def ports_schema(self) -> Schema: return { 'A': { 'a': { # '_default': 0 * units.mm, '_emit': True } } } def next_update(self, timestep: Union[float, int], states: State) -> Update: return {'A': {'a': 1 * units.mm}} class MultiUnits(Composer): name = 'multi_units_composer' def generate_processes(self, config: Optional[dict]) -> Dict[str, Any]: return { 'units_micrometer': UnitsMicrometer({}), 'units_millimeter': UnitsMillimeter({}) } def generate_topology(self, config: Optional[dict]) -> Topology: return { 'units_micrometer': { 'A': ('aaa', ) }, 'units_millimeter': { 'A': ('aaa', ) } } # run experiment multi_unit = MultiUnits({}) network = multi_unit.generate() exp = Engine(**{ 'processes': network['processes'], 'topology': network['topology'] }) exp.update(5) timeseries = exp.emitter.get_timeseries() print('TIMESERIES') pp(timeseries) data = exp.emitter.get_data() print('DATA') pp(data) data_deserialized = exp.emitter.get_data_deserialized() print('DESERIALIZED') pp(data_deserialized) data_unitless = exp.emitter.get_data_unitless() print('UNITLESS') pp(data_unitless) query = [('aaa', 'a'), ('aaa', 'c')] query_data = exp.emitter.get_data(query) print('QUERY DATA') pp(query_data)
def test_runtime_order() -> None: class RuntimeOrderProcess(Process): def ports_schema(self) -> Schema: return {'store': {'var': {'_default': 0}}} def next_update(self, timestep: float, states: State) -> Update: _ = states self.parameters['execution_log'].append(self.name) return {} class RuntimeOrderStep(Step): def ports_schema(self) -> Schema: return {'store': {'var': {'_default': 0}}} def next_update(self, timestep: float, states: State) -> Update: _ = states self.parameters['execution_log'].append(self.name) return {} class RuntimeOrderDeriver(Deriver): def ports_schema(self) -> Schema: return {'store': {'var': {'_default': 0}}} def next_update(self, timestep: float, states: State) -> Update: _ = states self.parameters['execution_log'].append(self.name) return {} class RuntimeOrderComposer(Composer): def generate_processes(self, config: Optional[dict]) -> Dict[str, Any]: config = cast(dict, config or {}) proc1 = RuntimeOrderProcess({ 'name': 'process1', 'time_step': 1, 'execution_log': config['execution_log'], }) proc2 = RuntimeOrderProcess({ 'name': 'process2', 'time_step': 2, 'execution_log': config['execution_log'], }) deriver = RuntimeOrderDeriver({ 'name': 'deriver', 'execution_log': config['execution_log'], }) return { 'p1': proc1, 'p2': proc2, 'd': deriver, } def generate_steps(self, config: Optional[dict]) -> Steps: config = config or {} step1 = RuntimeOrderStep({ 'name': 'step1', 'execution_log': config['execution_log'], }) step2 = RuntimeOrderStep({ 'name': 'step2', 'execution_log': config['execution_log'], }) step3 = RuntimeOrderStep({ 'name': 'step3', 'execution_log': config['execution_log'], }) return { 's1': step1, 's2': step2, 's3': step3, } def generate_flow(self, config: Optional[dict]) -> Steps: config = config or {} return { 's1': [], 's2': [('s1', )], 's3': [('s1', )], } def generate_topology(self, config: Optional[dict]) -> Topology: return { 'p1': { 'store': ('store', ), }, 'p2': { 'store': ('store', ), }, 'd': { 'store': ('store', ), }, 's1': { 'store': ('store', ), }, 's2': { 'store': ('store', ), }, 's3': { 'store': ('store', ), }, } execution_log: List[str] = [] composer = RuntimeOrderComposer() composite = composer.generate({'execution_log': execution_log}) experiment = Engine( processes=composite.processes, steps=composite.steps, flow=composite.flow, topology=composite.topology, ) experiment.update(4) expected_log = [ ('deriver', 'step1'), {'step2', 'step3'}, {'process1', 'process2'}, ('deriver', 'step1'), {'step2', 'step3'}, {'process1'}, ('deriver', 'step1'), {'step2', 'step3'}, {'process1', 'process2'}, ('deriver', 'step1'), {'step2', 'step3'}, {'process1'}, ('deriver', 'step1'), {'step2', 'step3'}, ] for expected_group in expected_log: num = len(expected_group) group = execution_log[0:num] execution_log = execution_log[num:] if isinstance(expected_group, tuple): assert tuple(group) == expected_group elif isinstance(expected_group, set): assert set(group) == expected_group
def test_custom_divider() -> None: """ToyDividerProcess has a custom `split_divider`""" agent_id = '1' composer = ToyDivider({ 'agent_id': agent_id, 'divider': { 'x_division_threshold': 3, } }) composite = composer.generate(path=('agents', agent_id)) experiment = Engine( processes=composite.processes, steps=composite.steps, flow=composite.flow, topology=composite.topology, ) experiment.update(8) data = experiment.emitter.get_data() expected_data = { 0.0: { 'agents': { '1': { 'variable': { 'x': 0, '2x': 0 } } } }, 1.0: { 'agents': { '1': { 'variable': { 'x': 1, '2x': 2 } } } }, 2.0: { 'agents': { '1': { 'variable': { 'x': 2, '2x': 4 } } } }, 3.0: { 'agents': { '1': { 'variable': { 'x': 3, '2x': 6 } } } }, 4.0: { 'agents': { '1': { 'variable': { 'x': 4, '2x': 8 } } } }, 5.0: { 'agents': { '10': { 'variable': { 'x': 2, '2x': 4 } }, '11': { 'variable': { 'x': 2, '2x': 4 } } } }, 6.0: { 'agents': { '10': { 'variable': { 'x': 3, '2x': 6 } }, '11': { 'variable': { 'x': 3, '2x': 6 } } } }, 7.0: { 'agents': { '10': { 'variable': { 'x': 4, '2x': 8 } }, '11': { 'variable': { 'x': 4, '2x': 8 } } } }, 8.0: { 'agents': { '100': { 'variable': { 'x': 2, '2x': 4 } }, '101': { 'variable': { 'x': 2, '2x': 4 } }, '110': { 'variable': { 'x': 2, '2x': 4 } }, '111': { 'variable': { 'x': 2, '2x': 4 } } } } } assert data == expected_data