Ejemplo n.º 1
0
    def test_get_sim_config(self):
        self.assertEqual(SimulationConfig(5.),
                         SimulationEngine._get_sim_config(time_max=5.))

        config_dict = dict(time_max=5., time_init=2.)
        self.assertEqual(
            SimulationConfig(5., 2.),
            SimulationEngine._get_sim_config(config_dict=config_dict))

        with self.assertRaisesRegex(
                SimulatorError,
                'time_max, sim_config, or config_dict must be provided'):
            SimulationEngine._get_sim_config()

        config_dict = dict(time_init=2.)
        with self.assertRaisesRegex(
                SimulatorError,
                'at most 1 of time_max, sim_config, or config_dict'):
            SimulationEngine._get_sim_config(100, config_dict=config_dict)

        simulation_config = SimulationConfig(10)
        with self.assertRaisesRegex(
                SimulatorError,
                'sim_config is not provided, sim_config= is probably needed'):
            SimulationEngine._get_sim_config(simulation_config)

        config_dict = dict(time_init=2.)
        with self.assertRaisesRegex(
                SimulatorError, 'time_max must be provided in config_dict'):
            SimulationEngine._get_sim_config(config_dict=config_dict)
Ejemplo n.º 2
0
    def __init__(self, model, wc_sim_config, options=None):
        """
        Args:
            model (:obj:`Model`): the model being simulated
            wc_sim_config (:obj:`WCSimulationConfig`): a whole-cell simulation configuration
            options (:obj:`dict`, optional): options for submodels, keyed by submodel class name
        """
        # initialize simulation infrastructure
        self.simulation = SimulationEngine()
        self.random_state = RandomStateManager.instance()

        # create simulation attributes
        self.model = model
        self.wc_sim_config = wc_sim_config
        self.options = options

        self._skipped_submodels = self.prepare_skipped_submodels()

        # a model without submodels cannot be simulated
        submodel_ids = set(
            [submodel.id for submodel in self.model.get_submodels()])
        if not submodel_ids - self.skipped_submodels():
            raise MultialgorithmError(
                f"model {self.model.id} cannot be simulated because it contains"
                f" no submodels")
Ejemplo n.º 3
0
    def test_profiling(self):
        existing_levels = self.suspend_logging(self.log_names)
        simulation_engine = SimulationEngine()
        num_sim_objs = 20
        self.prep_simulation(simulation_engine, num_sim_objs)
        end_sim_time = 200
        expected_text = [
            'function calls', 'Ordered by: internal time',
            'filename:lineno(function)'
        ]
        sim_config_dict = dict(time_max=end_sim_time,
                               output_dir=self.out_dir,
                               profile=True)
        stats = simulation_engine.simulate(
            config_dict=sim_config_dict).profile_stats
        self.assertTrue(isinstance(stats, pstats.Stats))
        measurements = ''.join(
            open(self.measurements_pathname, 'r').readlines())
        for text in expected_text:
            self.assertIn(text, measurements)

        sim_config_dict = dict(time_max=end_sim_time, profile=True)
        with CaptureOutput(relay=False) as capturer:
            stats = simulation_engine.simulate(
                config_dict=sim_config_dict).profile_stats
            for text in expected_text:
                self.assertIn(text, capturer.get_text())
        self.assertTrue(isinstance(stats, pstats.Stats))
        self.restore_logging_levels(self.log_names, existing_levels)
Ejemplo n.º 4
0
    def test_performance(self):
        existing_levels = self.suspend_logging(self.log_names)
        simulation_engine = SimulationEngine()
        end_sim_time = 100
        num_sim_objs = 4
        max_num_profile_objects = 300
        max_num_sim_objs = 5000
        print()
        print(
            f"Performance test of cyclical messaging network: end simulation time: {end_sim_time}"
        )
        unprofiled_perf = [
            "\n#sim obs\t# events\trun time (s)\tevents/s".expandtabs(15)
        ]

        while num_sim_objs < max_num_sim_objs:

            # measure execution time
            self.prep_simulation(simulation_engine, num_sim_objs)
            start_time = time.process_time()
            num_events = simulation_engine.simulate(end_sim_time).num_events
            run_time = time.process_time() - start_time
            self.assertEqual(num_sim_objs * end_sim_time, num_events)
            unprofiled_perf.append("{}\t{}\t{:8.3f}\t{:8.0f}".format(
                num_sim_objs, num_events, run_time,
                num_events / run_time).expandtabs(15))

            # profile
            if num_sim_objs < max_num_profile_objects:
                self.prep_simulation(simulation_engine, num_sim_objs)
                out_file = os.path.join(
                    self.out_dir, "profile_out_{}.out".format(num_sim_objs))
                locals = {
                    'simulation_engine': simulation_engine,
                    'end_sim_time': end_sim_time
                }
                cProfile.runctx(
                    'num_events = simulation_engine.simulate(end_sim_time)',
                    {},
                    locals,
                    filename=out_file)
                profile = pstats.Stats(out_file)
                print(f"Profile for {num_sim_objs} simulation objects:")
                profile.strip_dirs().sort_stats('cumulative').print_stats(20)

            num_sim_objs *= 4

        self.restore_logging_levels(self.log_names, existing_levels)
        performance_log = os.path.join(os.path.dirname(__file__), 'results',
                                       'perf_results',
                                       'de_sim_performance_log.txt')
        with open(performance_log, 'a') as perf_log:
            today = datetime.today().strftime('%Y-%m-%d')
            print(f'Performance summary on {today}', end='', file=perf_log)
            print("\n".join(unprofiled_perf), file=perf_log)
            print(file=perf_log)

        print(f'Performance summary, written to {performance_log}')
        print("\n".join(unprofiled_perf))
Ejemplo n.º 5
0
 def make_sim_w_skeleton_submodel(self, lang_submodel, behavior):
     self.simulator = SimulationEngine()
     # concatenate tuples in fn call for Py 2.7: see https://stackoverflow.com/a/12830036
     skeleton_submodel = SkeletonSubmodel(
         *(make_dynamic_submodel_params(self.model, lang_submodel) + (behavior,)))
     self.simulator.add_object(skeleton_submodel)
     self.simulator.initialize()
     return skeleton_submodel
Ejemplo n.º 6
0
 def setUp(self):
     # create simulator
     self.simulator = SimulationEngine()
     self.out_dir = tempfile.mkdtemp()
     self.log_names = ['de_sim.debug.file', 'de_sim.plot.file']
     measurements_file = core.get_config()['de_sim']['measurements_file']
     self.measurements_pathname = os.path.join(self.out_dir,
                                               measurements_file)
    def setUp(self):
        self.checkpoint_dir = tempfile.mkdtemp()

        self.simulator = SimulationEngine()
        self.a = 4
        self.b = 3
        self.state = SharedValue(self.b)
        self.update_period = 3
        self.updating_obj = PeriodicLinearUpdatingSimuObj(
            'self.updating_obj', self.update_period, self.state, self.a,
            self.b)
        self.checkpoint_period = 11
Ejemplo n.º 8
0
 def test_progress_bar(self):
     simulator = SimulationEngine()
     simulator.add_object(PeriodicSimulationObject('name', 1))
     simulator.initialize()
     print('\nTesting progress bar:', file=sys.stderr)
     sys.stderr.flush()
     with CaptureOutput(relay=True) as capturer:
         try:
             time_max = 10
             config_dict = dict(time_max=time_max, progress=True)
             self.assertEqual(
                 simulator.simulate(config_dict=config_dict).num_events,
                 time_max + 1)
             self.assertTrue(f"/{time_max}" in capturer.get_text())
             self.assertTrue("time_max" in capturer.get_text())
         except ValueError as e:
             if str(e) == 'I/O operation on closed file':
                 pass
                 # This ValueError is raised because progressbar expects sys.stderr to remain open
                 # for an extended time period but karr_lab_build_utils run-tests has closed it.
                 # Since SimulationProgressBar works and passes tests under naked pytest, and
                 # progressbar does not want to address the conflict over sys.stderr
                 # (see https://github.com/WoLpH/python-progressbar/issues/202) we let this
                 # test fail under karr_lab_build_utils.
             else:
                 self.fail('test_progress failed for unknown reason')
Ejemplo n.º 9
0
    def test_reproducibility(self):
        self.simulator = SimulationEngine()

        # comprehensive reproducibility test:
        # test whether the simulation engine deterministically delivers events to objects
        # run a simulation in which sim objects execute multiple concurrent events that contain
        # messages with different types, and messages that have the same type and different contents
        # test all types of event and message sorting
        num_sim_objs = TestSimulationReproducibility.NUM_SIM_OBJS
        for i in range(num_sim_objs):
            obj_num = i + 1
            self.simulator.add_object(
                ReproducibleTestSimulationObject(obj_name(obj_num), obj_num,
                                                 num_sim_objs))
        self.simulator.initialize()
        self.simulator.simulate(30)
        for sim_obj in self.simulator.get_objects():
            self.assertEqual(0, sim_obj.delicates_before_hello)
            self.assertEqual(0, sim_obj.disordered_delicates)
Ejemplo n.º 10
0
    def test_non_zero_time_init(self):

        for time_init in [-3, 2]:
            simulator = SimulationEngine()
            time_max = 5
            period = 1
            pso = SpecialPeriodicSimulationObject('pso', period, time_init)
            expected_times = list(
                np.linspace(time_init, time_max, time_max - time_init + 1))
            simulator.add_objects([pso])
            simulator.initialize()
            simulation_config = SimulationConfig(time_max, time_init)
            simulator.simulate(sim_config=simulation_config)
            self.assertEqual(expected_times, pso.times)
Ejemplo n.º 11
0
    def main(args):

        # create a simulator
        simulator = SimulationEngine()

        # create a simulation object and add it to the simulation
        simulator.add_object(
            MinimalSimulationObject('minimal_sim_obj', args.delay))

        # run the simulation
        simulator.initialize()
        num_events = simulator.simulate(args.time_max)
        return (num_events)
Ejemplo n.º 12
0
    def main(args):

        # create a simulator
        simulator = SimulationEngine()

        # create simulation objects, and send each one an initial event message to self
        for obj_id in range(args.num_phold_procs):
            phold_obj = PholdSimulationObject(obj_name(obj_id), args)
            simulator.add_object(phold_obj)

        # run the simulation
        simulator.initialize()
        event_num = simulator.simulate(args.time_max).num_events
        sys.stderr.write("Executed {} events.\n".format(event_num))
        return(event_num)
Ejemplo n.º 13
0
    def main(sir_class, time_max, seed, **sir_args):

        # create a simulator
        simulator = SimulationEngine()

        # create a SIR instance
        sir = sir_class(**sir_args)
        simulator.add_object(sir)

        # initialize simulation, which sends the SIR instance an initial event message
        simulator.initialize()

        # run the simulation
        event_num = simulator.simulate(time_max).num_events
        print("Executed {} events.\n".format(event_num))
        return sir
Ejemplo n.º 14
0
    def test_TemplatePeriodicSimulationObject(self):

        simulator = SimulationEngine()
        time_max = 5
        expected = []

        # int period
        period = 1
        pso_1 = SpecialPeriodicSimulationObject('pso_1', period)
        expected.append(np.linspace(0, time_max, time_max + 1))

        period = .1
        pso_2 = SpecialPeriodicSimulationObject('pso_2', period)
        expected.append([t / 10 for t in range(time_max * 10 + 1)])

        psos = [pso_1, pso_2]
        simulator.add_objects(psos)
        simulator.initialize()
        simulator.simulate(time_max)

        for pso, expect in zip(psos, expected):
            for pso_time, expect_time in zip(pso.times, expect):
                self.assertTrue(math.isclose(pso_time, expect_time))
Ejemplo n.º 15
0
class TestSkeletonSubmodel(unittest.TestCase):

    def setUp(self):
        warnings.simplefilter("ignore")
        self.MODEL_FILENAME = os.path.join(os.path.dirname(__file__), 'fixtures',
                                           'test_submodel_no_shared_species.xlsx')
        self.model = Reader().run(self.MODEL_FILENAME)[Model][0]
        prepare_model(self.model)

    def make_sim_w_skeleton_submodel(self, lang_submodel, behavior):
        self.simulator = SimulationEngine()
        # concatenate tuples in fn call for Py 2.7: see https://stackoverflow.com/a/12830036
        skeleton_submodel = SkeletonSubmodel(
            *(make_dynamic_submodel_params(self.model, lang_submodel) + (behavior,)))
        self.simulator.add_object(skeleton_submodel)
        self.simulator.initialize()
        return skeleton_submodel

    def test_skeleton_submodel(self):
        behavior = {SkeletonSubmodel.INTER_REACTION_TIME: 2}
        lang_submodel = self.model.get_submodels()[0]
        for conc in self.model.distribution_init_concentrations:
            conc.std = 0

        time_max = 100
        skeleton_submodel = self.make_sim_w_skeleton_submodel(lang_submodel, behavior)
        self.assertEqual(self.simulator.simulate(time_max),
                         time_max / behavior[SkeletonSubmodel.INTER_REACTION_TIME])

        behavior = {SkeletonSubmodel.INTER_REACTION_TIME: 2,
                    SkeletonSubmodel.REACTION_TO_EXECUTE: 0}    # reaction #0 makes one more 'species_1[c]'
        skeleton_submodel = self.make_sim_w_skeleton_submodel(lang_submodel, behavior)
        interval = 10
        pop_before = skeleton_submodel.local_species_population.read_one(0, 'species_1[c]')
        for time_max in range(interval, 5 * interval, interval):
            self.simulator.simulate(time_max)
            pop_after = skeleton_submodel.local_species_population.read_one(time_max, 'species_1[c]')
            delta = pop_after - pop_before
            self.assertEqual(delta, interval / behavior[SkeletonSubmodel.INTER_REACTION_TIME])
            pop_before = pop_after
class TestCheckpointSimulationObjects(unittest.TestCase):
    def setUp(self):
        self.checkpoint_dir = tempfile.mkdtemp()

        self.simulator = SimulationEngine()
        self.a = 4
        self.b = 3
        self.state = SharedValue(self.b)
        self.update_period = 3
        self.updating_obj = PeriodicLinearUpdatingSimuObj(
            'self.updating_obj', self.update_period, self.state, self.a,
            self.b)
        self.checkpoint_period = 11

    def tearDown(self):
        shutil.rmtree(self.checkpoint_dir)

    def test_abstract_checkpoint_simulation_object(self):
        '''
        Run a simulation with a subclass of AbstractCheckpointSimulationObject and another object.
        Take checkpoints and test them.
        '''

        checkpoints = []
        checkpointing_obj = PeriodicCheckpointSimuObj('checkpointing_obj',
                                                      self.checkpoint_period,
                                                      self.state, checkpoints)
        self.simulator.add_objects([self.updating_obj, checkpointing_obj])
        self.simulator.initialize()
        run_time = 100
        self.simulator.run(run_time)
        checkpointing_obj.create_checkpoint()
        for i in range(1 + int(run_time / self.checkpoint_period)):
            time, value = checkpoints[i]
            self.assertEqual(time, i * self.checkpoint_period)
            # updating_obj sets the shared value to a * time + b, at the instants 0, update_period, 2 * update_period, ...
            # checkpointing_obj samples the value at times unsynchronized with updating_obj
            # therefore, for 0<a, the sampled values are at most a * update_period less than the line a * time + b
            linear_prediction = self.a * self.checkpoint_period * i + self.b
            self.assertTrue(
                linear_prediction -
                self.a * self.update_period <= value <= linear_prediction)

    def test_checkpoint_simulation_object(self):
        # Run a simulation with a CheckpointSimulationObject and another object.
        # Take checkpoints and test them.

        # prepare
        checkpointing_obj = CheckpointSimulationObject('checkpointing_obj',
                                                       self.checkpoint_period,
                                                       self.checkpoint_dir,
                                                       self.state)
        self.simulator.add_objects([self.updating_obj, checkpointing_obj])
        self.simulator.initialize()

        def endpoints(duration, period):
            # the number of time points in 0, period, 2 * period, ..., int(duration / period) * period
            quotient = duration / period
            return int(quotient) + 1

        # run
        run_time = 22
        expected_num_events = endpoints(run_time,
                                        self.update_period) + endpoints(
                                            run_time, self.checkpoint_period)
        num_events = self.simulator.run(run_time).num_events

        # check results
        self.assertEqual(expected_num_events, num_events)
        expected_checkpoint_times = [
            float(t) for t in range(
                0,
                self.checkpoint_period *
                int(run_time / self.checkpoint_period) +
                1, self.checkpoint_period)
        ]
        access_checkpoints = AccessCheckpoints(self.checkpoint_dir)
        checkpoints = access_checkpoints.list_checkpoints()
        self.assertEqual(expected_checkpoint_times, checkpoints)
        checkpoint = access_checkpoints.get_checkpoint()
        self.assertEqual(checkpoint,
                         access_checkpoints.get_checkpoint(time=run_time))

        for i in range(1 + int(run_time / self.checkpoint_period)):
            time = i * self.checkpoint_period
            state_value = access_checkpoints.get_checkpoint(time=time).state
            max_value = self.a * self.checkpoint_period * i + self.b
            self.assertTrue(
                max_value -
                self.a * self.update_period <= state_value <= max_value)

    def test_checkpoint_simulation_object_exception(self):
        with self.assertRaises(SimulatorError):
            PeriodicCheckpointSimuObj('', 0, None, None)
Ejemplo n.º 17
0
    def test_simulation_stop_condition(self):
        simulator = SimulationEngine()
        # 1 event/sec:
        simulator.add_object(PeriodicSimulationObject('name', 1))
        simulator.initialize()
        time_max = 10
        # execute to time <= time_max, with 1st event at time = 1
        self.assertEqual(simulator.simulate(time_max).num_events, time_max + 1)

        __stop_cond_end = 3

        def stop_cond_eg(time):
            return __stop_cond_end <= time

        simulator = SimulationEngine()
        simulator.add_object(PeriodicSimulationObject('name', 1))
        simulator.initialize()
        sim_config = SimulationConfig(time_max)
        sim_config.stop_condition = stop_cond_eg
        # because the simulation is executing one event / sec, the number of events should equal the stop time plus 1
        self.assertEqual(
            simulator.simulate(sim_config=sim_config).num_events,
            __stop_cond_end + 1)
Ejemplo n.º 18
0
class MultialgorithmSimulation(object):
    """ Initialize a multialgorithm simulation from a language model and run-time parameters

    Create a simulation from a model described by a `wc_lang` `Model`.

    Attributes:
        model (:obj:`Model`): a model description
        args (:obj:`dict`): parameters for the simulation; if `results_dir` is an entry in `args`,
            then `checkpoint_period` must also be included
        simulation (:obj:`SimulationEngine`): the initialized simulation
        checkpointing_sim_obj (:obj:`MultialgorithmicCheckpointingSimObj`): the checkpointing object;
            `None` if absent
        random_state (:obj:`numpy.random.RandomState`): a random state
        init_populations (:obj:`dict` from species id to population): the initial populations of
            species, derived from the specification in `model`
        local_species_population (:obj:`LocalSpeciesPopulation`): a shared species population for the
            multialgorithm simulation
        dynamic_model (:obj:`DynamicModel`): the dynamic state of a model being simulated
        temp_dynamic_compartments (:obj:`dict`): the simulation's `DynamicCompartment`s, one for each
            compartment in `model`; temporary attribute used until :obj:`DynamicModel` is made
        _skipped_submodels (:obj:`set` of :obj:`str`): submodels that won't be run, identified by their ids
    """
    def __init__(self, model, wc_sim_config, options=None):
        """
        Args:
            model (:obj:`Model`): the model being simulated
            wc_sim_config (:obj:`WCSimulationConfig`): a whole-cell simulation configuration
            options (:obj:`dict`, optional): options for submodels, keyed by submodel class name
        """
        # initialize simulation infrastructure
        self.simulation = SimulationEngine()
        self.random_state = RandomStateManager.instance()

        # create simulation attributes
        self.model = model
        self.wc_sim_config = wc_sim_config
        self.options = options

        self._skipped_submodels = self.prepare_skipped_submodels()

        # a model without submodels cannot be simulated
        submodel_ids = set(
            [submodel.id for submodel in self.model.get_submodels()])
        if not submodel_ids - self.skipped_submodels():
            raise MultialgorithmError(
                f"model {self.model.id} cannot be simulated because it contains"
                f" no submodels")

    def prepare_skipped_submodels(self):
        """ Prepare the IDs of the submodels that will not be run

        Returns:
            :obj:`set` of :obj:`str`: the IDs of submodels that will not run
        """
        if self.wc_sim_config.submodels_to_skip:
            submodels_to_skip = set(self.wc_sim_config.submodels_to_skip)
            submodel_ids = set(
                [submodel.id for submodel in self.model.get_submodels()])
            if submodels_to_skip - submodel_ids:
                raise MultialgorithmError(
                    f"'submodels_to_skip' contains submodels that aren't in the model: "
                    f"{submodels_to_skip - submodel_ids}")
            return submodels_to_skip
        else:
            return set()

    def skipped_submodels(self):
        """ IDs of the submodels that will not be run

        Returns:
            :obj:`set` of :obj:`str`: the IDs of submodels that will not be run
        """
        return self._skipped_submodels

    def build_simulation(self):
        """ Prepare a multialgorithm simulation

        Returns:
            :obj:`tuple` of (`SimulationEngine`, `DynamicModel`): an initialized simulation and its
                dynamic model
        """
        self.set_simultaneous_execution_priorities()
        self.initialize_components()
        self.initialize_infrastructure()
        return (self.simulation, self.dynamic_model)

    def initialize_components(self):
        """ Initialize the biological components of a simulation
        """
        self.create_dynamic_compartments()
        self.init_species_pop_from_distribution()
        self.local_species_population = self.make_local_species_population()
        self.prepare_dynamic_compartments()

    def initialize_infrastructure(self):
        """ Initialize the infrastructure of a simulation
        """
        self.dynamic_model = DynamicModel(self.model,
                                          self.local_species_population,
                                          self.temp_dynamic_compartments)
        self.temp_dynamic_compartments = None
        for comp in self.dynamic_model.dynamic_compartments.values():
            comp.dynamic_model = self.dynamic_model
        if self.wc_sim_config.de_simulation_config.output_dir is not None:
            self.checkpointing_sim_obj = self.create_multialgorithm_checkpointing(
                self.wc_sim_config.de_simulation_config.output_dir,
                self.wc_sim_config.checkpoint_period)
        self.dynamic_model.dynamic_submodels = self.create_dynamic_submodels()

    def molecular_weights_for_species(self, species=None):
        """ Obtain the molecular weights for species with specified ids

        A weight of `NaN` is returned for species whose species_types do not have a structure.

        Args:
            species (:obj:`iterator`, optional): an iterator over species ids; if not initialized,
                obtain weights for all species in the model

        Returns:
            :obj:`dict`: species_type_id -> molecular weight
        """
        species_weights = {}
        species = species or [
            species.id for species in self.model.get_species()
        ]
        for species_id in species:
            species_type_id, _ = ModelUtilities.parse_species_id(species_id)
            species_type = self.model.species_types.get_one(id=species_type_id)
            if species_type.structure:
                species_weights[
                    species_id] = species_type.structure.molecular_weight
            else:
                species_weights[species_id] = float('nan')
        return species_weights

    def init_species_pop_from_distribution(self):
        """ Initialize the species populations

        Uses the dynamic compartment volume previously sampled from its distribution
        """
        self.init_populations = {}
        for species in self.model.get_species():
            dynamic_compartment = self.temp_dynamic_compartments[
                species.compartment.id]
            self.init_populations[
                species.
                id] = ModelUtilities.sample_copy_num_from_concentration(
                    species, dynamic_compartment.init_volume,
                    self.random_state)

    def get_dynamic_compartments(self, submodel):
        """ Get the :obj:`DynamicCompartment`\ s for Submodel `submodel`

        Args:
            submodel (:obj:`Submodel`): the `wc_lang` submodel being compiled into a `DynamicSubmodel`

        Returns:
            :obj:`dict`: mapping: compartment id -> `DynamicCompartment` for the
                `DynamicCompartment`(s) that a new `DynamicSubmodel` needs
        """
        dynamic_compartments = {}
        for comp in submodel.get_children(kind='submodel', __type=Compartment):
            dynamic_compartments[
                comp.id] = self.dynamic_model.dynamic_compartments[comp.id]
        return dynamic_compartments

    def create_dynamic_compartments(self):
        """ Create the :obj:`DynamicCompartment`\ s for this simulation
        """
        # create DynamicCompartments
        self.temp_dynamic_compartments = {}
        for compartment in self.model.get_compartments():
            self.temp_dynamic_compartments[
                compartment.id] = DynamicCompartment(None, self.random_state,
                                                     compartment)

    def prepare_dynamic_compartments(self):
        """ Prepare the :obj:`DynamicCompartment`\ s for this simulation
        """
        for dynamic_compartment in self.temp_dynamic_compartments.values():
            dynamic_compartment.initialize_mass_and_density(
                self.local_species_population)

    def make_local_species_population(self, retain_history=True):
        """ Create a :obj:`LocalSpeciesPopulation` that contains all the species in a model

        Instantiate a :obj:`LocalSpeciesPopulation` as the centralized store of a model's species population.

        Args:
            retain_history (:obj:`bool`, optional): whether the :obj:`LocalSpeciesPopulation` should
                retain species population history

        Returns:
            :obj:`LocalSpeciesPopulation`: a :obj:`LocalSpeciesPopulation` for the model
        """
        molecular_weights = self.molecular_weights_for_species()

        # Species used by continuous time submodels (like DFBA and ODE) need initial population slopes
        # which indicate that the species is modeled by a continuous time submodel.
        # TODO(Arthur): support non-zero initial population slopes; calculate them with initial runs of dFBA and ODE submodels
        init_pop_slopes = {}
        for submodel in self.model.get_submodels():
            if submodel.id in self.skipped_submodels():
                continue
            if are_terms_equivalent(submodel.framework, onto['WC:ordinary_differential_equations']) or \
                    are_terms_equivalent(submodel.framework, onto['WC:dynamic_flux_balance_analysis']):
                for species in submodel.get_children(kind='submodel',
                                                     __type=Species):
                    init_pop_slopes[species.id] = 0.0

        return LocalSpeciesPopulation(
            'LSP_' + self.model.id,
            self.init_populations,
            molecular_weights,
            initial_population_slopes=init_pop_slopes,
            random_state=self.random_state,
            retain_history=retain_history)

    def set_simultaneous_execution_priorities(self):
        """ Assign simultaneous execution priorities for all simulation objects and submodels
        """
        # Simulation objects and submodels executing at the same simulation time run in this order:
        SimObjClassPriority.assign_decreasing_priority([
            SsaSubmodel, NrmSubmodel, DsaSubmodel, DfbaSubmodel, OdeSubmodel,
            MultialgorithmicCheckpointingSimObj
        ])

    def create_multialgorithm_checkpointing(self, checkpoints_dir,
                                            checkpoint_period):
        """ Create a multialgorithm checkpointing object for this simulation

        Args:
            checkpoints_dir (:obj:`str`): the directory that will contain checkpoints
            checkpoint_period (:obj:`float`): interval between checkpoints, in simulated seconds

        Returns:
            :obj:`MultialgorithmicCheckpointingSimObj`: the checkpointing object
        """
        multialgorithm_checkpointing_sim_obj = MultialgorithmicCheckpointingSimObj(
            CHECKPOINTING_SIM_OBJ, checkpoint_period, checkpoints_dir,
            self.local_species_population, self.dynamic_model, self)

        # add the multialgorithm checkpointing object to the simulation
        self.simulation.add_object(multialgorithm_checkpointing_sim_obj)
        return multialgorithm_checkpointing_sim_obj

    def create_dynamic_submodels(self):
        """ Create dynamic submodels that access shared species

        Returns:
            :obj:`list`: list of the simulation's `DynamicSubmodel`\ s

        Raises:
            :obj:`MultialgorithmError`: if a submodel cannot be created
        """
        def get_options(self, submodel_class_name):
            if self.options is not None and submodel_class_name in self.options:
                return self.options[submodel_class_name]
            else:
                return {}

        # make the simulation's submodels
        simulation_submodels = {}
        for lang_submodel in self.model.get_submodels():

            if lang_submodel.id in self.skipped_submodels():
                continue

            # don't create a submodel with no reactions
            if not lang_submodel.reactions:
                warnings.warn(
                    f"not creating submodel '{lang_submodel.id}': no reactions provided",
                    MultialgorithmWarning)
                continue

            if are_terms_equivalent(
                    lang_submodel.framework,
                    onto['WC:stochastic_simulation_algorithm']):
                simulation_submodel = SsaSubmodel(
                    lang_submodel.id, self.dynamic_model,
                    list(lang_submodel.reactions),
                    lang_submodel.get_children(kind='submodel',
                                               __type=Species),
                    self.get_dynamic_compartments(lang_submodel),
                    self.local_species_population,
                    **get_options(self, 'SsaSubmodel'))

            elif are_terms_equivalent(lang_submodel.framework,
                                      onto['WC:next_reaction_method']):
                simulation_submodel = NrmSubmodel(
                    lang_submodel.id, self.dynamic_model,
                    list(lang_submodel.reactions),
                    lang_submodel.get_children(kind='submodel',
                                               __type=Species),
                    self.get_dynamic_compartments(lang_submodel),
                    self.local_species_population,
                    **get_options(self, 'NrmSubmodel'))

            elif are_terms_equivalent(
                    lang_submodel.framework,
                    onto['WC:dynamic_flux_balance_analysis']):
                # TODO(Arthur): make DFBA submodels work
                simulation_submodel = DfbaSubmodel(
                    lang_submodel.id, self.dynamic_model,
                    list(lang_submodel.reactions),
                    lang_submodel.get_children(kind='submodel',
                                               __type=Species),
                    self.get_dynamic_compartments(lang_submodel),
                    self.local_species_population,
                    self.wc_sim_config.dfba_time_step,
                    **get_options(self, 'DfbaSubmodel'))

            elif are_terms_equivalent(
                    lang_submodel.framework,
                    onto['WC:ordinary_differential_equations']):
                simulation_submodel = OdeSubmodel(
                    lang_submodel.id, self.dynamic_model,
                    list(lang_submodel.reactions),
                    lang_submodel.get_children(kind='submodel',
                                               __type=Species),
                    self.get_dynamic_compartments(lang_submodel),
                    self.local_species_population,
                    self.wc_sim_config.ode_time_step,
                    **get_options(self, 'OdeSubmodel'))

            elif are_terms_equivalent(
                    lang_submodel.framework,
                    onto['WC:deterministic_simulation_algorithm']):
                # a deterministic simulation algorithm, used for testing
                simulation_submodel = DsaSubmodel(
                    lang_submodel.id, self.dynamic_model,
                    list(lang_submodel.reactions),
                    lang_submodel.get_children(kind='submodel',
                                               __type=Species),
                    self.get_dynamic_compartments(lang_submodel),
                    self.local_species_population,
                    **get_options(self, 'DsaSubmodel'))

            else:
                raise MultialgorithmError(
                    f"Unsupported lang_submodel framework '{lang_submodel.framework}'"
                )

            simulation_submodels[simulation_submodel.id] = simulation_submodel

            # add the submodel to the simulation
            self.simulation.add_object(simulation_submodel)

        return simulation_submodels

    def __str__(self):
        """ Provide a readable representation of this `MultialgorithmSimulation`

        Returns:
            :obj:`str`: a readable representation of this `MultialgorithmSimulation`
        """
        return obj_to_str(self, [
            'args', 'simulation', 'dynamic_compartments', 'dynamic_model',
            'init_populations', 'local_species_population', 'model',
            'checkpointing_sim_obj', 'simulation_submodels'
        ])
Ejemplo n.º 19
0
class TestAggregateDistributedProps(unittest.TestCase):
    def setUp(self):
        self.PERIOD = 10
        self.property_name = 'test_prop'
        self.aggregate_distributed_props = AggregateDistributedProps(
            'aggregate_distributed_props')
        self.simulator = SimulationEngine()

    def make_properties_and_providers(self, num_properties, num_providers,
                                      period, num_periods,
                                      value_hist_generator):
        '''Create the components of distributed property collection: `PropertyProvider`s,
        `DistributedProperty`s, and `PropertyRequestor`s
        '''

        # set up objects for testing
        expected_value_hist = {
            time: val
            for time, val in value_hist_generator()
        }

        property_providers = []
        for prop_num in range(num_properties):
            for prov_num in range(num_providers):
                property_providers.append(
                    PropertyProvider(
                        'property_{}_provider_{}'.format(prop_num, prov_num),
                        {'property_{}'.format(prop_num): expected_value_hist}))

        properties = []
        requestors = []
        for prop_num in range(num_properties):
            property_name = 'property_{}'.format(prop_num)
            properties.append(
                DistributedProperty(
                    property_name, period,
                    property_providers[prop_num *
                                       num_providers:(prop_num + 1) *
                                       num_providers], sum_values))

            # since the aggregation function is sum, expect distributed property to be
            # (number of providers)*value
            expected_requestor_value_history = {
                time: num_providers * val
                for time, val in expected_value_hist.items()
            }
            requestors.append(
                PropertyRequestor('property_requestor_{}'.format(prop_num),
                                  property_name, period,
                                  self.aggregate_distributed_props,
                                  expected_requestor_value_history, self))

        return (properties, expected_value_hist, property_providers,
                requestors)

    def test_aggregate_distributed_props(self):

        # test multiple concurrent properties over multiple time periods
        # these values are entirely arbitrary
        NUM_PROPERTIES = 3
        NUM_PROVIDERS = 4
        NUM_PERIODS = 6
        PERIOD = 5
        OFFSET = 3
        RATE = 2

        def value_hist_generator():
            # generates time,value tuples for a history
            # values are a linear function of time
            for i in range(NUM_PERIODS):
                yield PERIOD * i, OFFSET + RATE * i

        # create and register test SimulationObjects
        (props, expected_value_hist, providers,
         requestors) = self.make_properties_and_providers(
             NUM_PROPERTIES, NUM_PROVIDERS, PERIOD, NUM_PERIODS,
             value_hist_generator)

        self.simulator.add_objects(providers + requestors +
                                   [self.aggregate_distributed_props])

        for property in props:
            self.aggregate_distributed_props.add_property(property)

        # send initial events
        self.simulator.initialize()
        self.simulator.simulate((NUM_PERIODS - 1) * PERIOD)

    def test_aggregate_distributed_props_errors1(self):
        '''
        send the simulator a bad message
        simulate & catch error
        '''
        self.simulator.add_object(self.aggregate_distributed_props)

        self.aggregate_distributed_props.send_event(
            0, self.aggregate_distributed_props,
            message_types.GetHistoricalProperty('no_such_property_name', 0))

        self.simulator.initialize()
        with self.assertRaises(ValueError) as context:
            self.simulator.simulate(10)
        self.assertIn('Error: unknown distributed property',
                      str(context.exception))

    def test_aggregate_distributed_props_errors2(self):
        '''
        send the simulator a bad message
        simulate & catch error
        '''
        self.simulator.add_object(self.aggregate_distributed_props)

        prop_name = 'prop_name'
        period = 3
        distributed_property = DistributedProperty(prop_name, period, [],
                                                   sum_values)
        self.aggregate_distributed_props.add_property(distributed_property)

        self.aggregate_distributed_props.send_event(
            2, self.aggregate_distributed_props,
            message_types.GetHistoricalProperty(prop_name, 1))

        self.simulator.initialize()
        with self.assertRaises(ValueError) as context:
            self.simulator.simulate(10)
        self.assertIn(
            "Error: distributed property 'prop_name' not available for time",
            str(context.exception))
Ejemplo n.º 20
0
 def setUp(self):
     self.PERIOD = 10
     self.property_name = 'test_prop'
     self.aggregate_distributed_props = AggregateDistributedProps(
         'aggregate_distributed_props')
     self.simulator = SimulationEngine()
Ejemplo n.º 21
0
class TestSimulationEngine(unittest.TestCase):
    def setUp(self):
        # create simulator
        self.simulator = SimulationEngine()
        self.out_dir = tempfile.mkdtemp()
        self.log_names = ['de_sim.debug.file', 'de_sim.plot.file']
        measurements_file = core.get_config()['de_sim']['measurements_file']
        self.measurements_pathname = os.path.join(self.out_dir,
                                                  measurements_file)

    def tearDown(self):
        shutil.rmtree(self.out_dir)

    def make_one_object_simulation(self):
        self.simulator.reset()
        obj = ExampleSimulationObject(obj_name(1))
        self.simulator.add_object(obj)
        self.assertEqual(self.simulator.get_object(obj.name), obj)
        self.simulator.initialize()

    def test_get_sim_config(self):
        self.assertEqual(SimulationConfig(5.),
                         SimulationEngine._get_sim_config(time_max=5.))

        config_dict = dict(time_max=5., time_init=2.)
        self.assertEqual(
            SimulationConfig(5., 2.),
            SimulationEngine._get_sim_config(config_dict=config_dict))

        with self.assertRaisesRegex(
                SimulatorError,
                'time_max, sim_config, or config_dict must be provided'):
            SimulationEngine._get_sim_config()

        config_dict = dict(time_init=2.)
        with self.assertRaisesRegex(
                SimulatorError,
                'at most 1 of time_max, sim_config, or config_dict'):
            SimulationEngine._get_sim_config(100, config_dict=config_dict)

        simulation_config = SimulationConfig(10)
        with self.assertRaisesRegex(
                SimulatorError,
                'sim_config is not provided, sim_config= is probably needed'):
            SimulationEngine._get_sim_config(simulation_config)

        config_dict = dict(time_init=2.)
        with self.assertRaisesRegex(
                SimulatorError, 'time_max must be provided in config_dict'):
            SimulationEngine._get_sim_config(config_dict=config_dict)

    def test_simulate_and_run(self):
        for operation in ['simulate', 'run']:
            self.make_one_object_simulation()
            expr = f'self.simulator.{operation}(5.0).num_events'
            self.assertEqual(eval(expr), 3)

    def test_one_object_simulation_neg_endtime(self):
        obj = ExampleSimulationObject(obj_name(1))
        self.simulator.add_object(obj)
        self.simulator.initialize()
        config_dict = dict(time_max=-1, time_init=-2)
        self.assertEqual(
            self.simulator.simulate(config_dict=config_dict).num_events, 0)

    def test_simulation_engine_exceptions(self):
        obj = ExampleSimulationObject(obj_name(1))
        with self.assertRaisesRegex(
                SimulatorError,
                f"cannot delete simulation object '{obj.name}'"):
            self.simulator.delete_object(obj)

        no_such_obj_name = 'no such object'
        with self.assertRaisesRegex(
                SimulatorError,
                f"cannot get simulation object '{no_such_obj_name}'"):
            self.simulator.get_object(no_such_obj_name)

        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has not been initialized'):
            self.simulator.simulate(5.0)

        self.simulator.initialize()
        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has no objects'):
            self.simulator.simulate(5.0)

        self.simulator.add_object(obj)
        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has no initial events'):
            self.simulator.simulate(5.0)

        simulator = SimulationEngine()
        simulator.add_object(BasicExampleSimulationObject('test'))
        simulator.initialize()
        # start time = 2
        simulation_config = SimulationConfig(5, 2)
        with self.assertRaisesRegex(
                SimulatorError,
                'first event .* is earlier than the start time'):
            simulator.simulate(sim_config=simulation_config)

        with self.assertRaisesRegex(
                SimulatorError, f"cannot add simulation object '{obj.name}'"):
            self.simulator.add_object(obj)

        self.simulator.delete_object(obj)
        try:
            self.simulator.add_object(obj)
        except Exception:
            self.fail('should be able to add object after delete')

        self.simulator.reset()
        self.simulator.initialize()
        obj = ExampleSimulationObject(obj_name(2))
        self.simulator.add_object(obj)
        obj.time += 1
        event_queue = self.simulator.event_queue
        event_queue.schedule_event(0, 0, obj, obj, InitMsg())
        with self.assertRaisesRegex(SimulatorError,
                                    "but event time .* < object time"):
            self.simulator.simulate(5.0)

        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has already been initialized'):
            self.simulator.initialize()

    def test_simulation_end(self):
        self.simulator.add_object(BasicExampleSimulationObject('name'))
        self.simulator.initialize()
        # TODO(Arthur): test that the "No events remain" message is logged
        self.simulator.simulate(5.0)

    def test_simulation_stop_condition(self):
        simulator = SimulationEngine()
        # 1 event/sec:
        simulator.add_object(PeriodicSimulationObject('name', 1))
        simulator.initialize()
        time_max = 10
        # execute to time <= time_max, with 1st event at time = 1
        self.assertEqual(simulator.simulate(time_max).num_events, time_max + 1)

        __stop_cond_end = 3

        def stop_cond_eg(time):
            return __stop_cond_end <= time

        simulator = SimulationEngine()
        simulator.add_object(PeriodicSimulationObject('name', 1))
        simulator.initialize()
        sim_config = SimulationConfig(time_max)
        sim_config.stop_condition = stop_cond_eg
        # because the simulation is executing one event / sec, the number of events should equal the stop time plus 1
        self.assertEqual(
            simulator.simulate(sim_config=sim_config).num_events,
            __stop_cond_end + 1)

    def test_progress_bar(self):
        simulator = SimulationEngine()
        simulator.add_object(PeriodicSimulationObject('name', 1))
        simulator.initialize()
        print('\nTesting progress bar:', file=sys.stderr)
        sys.stderr.flush()
        with CaptureOutput(relay=True) as capturer:
            try:
                time_max = 10
                config_dict = dict(time_max=time_max, progress=True)
                self.assertEqual(
                    simulator.simulate(config_dict=config_dict).num_events,
                    time_max + 1)
                self.assertTrue(f"/{time_max}" in capturer.get_text())
                self.assertTrue("time_max" in capturer.get_text())
            except ValueError as e:
                if str(e) == 'I/O operation on closed file':
                    pass
                    # This ValueError is raised because progressbar expects sys.stderr to remain open
                    # for an extended time period but karr_lab_build_utils run-tests has closed it.
                    # Since SimulationProgressBar works and passes tests under naked pytest, and
                    # progressbar does not want to address the conflict over sys.stderr
                    # (see https://github.com/WoLpH/python-progressbar/issues/202) we let this
                    # test fail under karr_lab_build_utils.
                else:
                    self.fail('test_progress failed for unknown reason')

    def test_multi_object_simulation_and_reset(self):
        for i in range(1, 4):
            obj = ExampleSimulationObject(obj_name(i))
            self.simulator.add_object(obj)
        self.simulator.initialize()
        self.assertEqual(self.simulator.simulate(5.0).num_events, 9)

        event_count_lines = self.simulator.provide_event_counts().split(
            '\n')[1:]
        for idx, line in enumerate(event_count_lines):
            self.assertIn('3', line)
            self.assertIn('ExampleSimulationObject', line)
            self.assertIn(obj_name(idx + 1), line)

        self.simulator.reset()
        self.assertEqual(len(self.simulator.simulation_objects), 0)

    def test_multi_interacting_object_simulation(self):
        sim_objects = [
            InteractingSimulationObject(obj_name(i)) for i in range(1, 3)
        ]
        self.simulator.add_objects(sim_objects)
        self.simulator.initialize()
        self.assertEqual(self.simulator.simulate(2.5).num_events, 4)

    def make_cyclical_messaging_network_sim(self, simulator, num_objs):
        # make simulation with cyclical messaging network
        sim_objects = [
            CyclicalMessagesSimulationObject(obj_name(i), i, num_objs, self)
            for i in range(num_objs)
        ]
        simulator.add_objects(sim_objects)

    def test_cyclical_messaging_network(self):
        # test event times at simulation objects; this test should succeed with any
        # natural number for num_objs and any non-negative value of time_max
        self.make_cyclical_messaging_network_sim(self.simulator, 10)
        self.simulator.initialize()
        self.assertTrue(0 < self.simulator.simulate(20).num_events)

    def test_message_queues(self):
        warnings.simplefilter("ignore")

        # test with an empty event queue

        class InactiveSimulationObject(ApplicationSimulationObject):
            def __init__(self):
                SimulationObject.__init__(self, 'inactive')

            def send_initial_events(self):
                pass

            def get_state(self):
                pass

            event_handlers = []

            messages_sent = [InitMsg]

        self.make_cyclical_messaging_network_sim(self.simulator, 4)
        self.simulator.add_object(InactiveSimulationObject())
        self.simulator.initialize()
        queues = self.simulator.message_queues()
        for sim_obj_name in self.simulator.simulation_objects:
            self.assertIn(sim_obj_name, queues)

        # test with self.time initialized
        self.simulator.simulate(5)
        queues = self.simulator.message_queues()
        for sim_obj_name in self.simulator.simulation_objects:
            self.assertIn(sim_obj_name, queues)

    def test_metadata_collection(self):
        self.make_one_object_simulation()
        time_max = 5
        config_dict = dict(time_max=time_max, output_dir=self.out_dir)
        self.simulator.run(config_dict=config_dict)
        sim_metadata = SimulationMetadata.read_dataclass(self.out_dir)
        self.assertIsInstance(sim_metadata, SimulationMetadata)
        self.assertEqual(sim_metadata.simulation_config.time_max, time_max)
        self.assertGreaterEqual(sim_metadata.run.run_time, 0)

        # provide AuthorMetadata
        self.simulator.reset()
        self.make_one_object_simulation()
        output_dir = tempfile.mkdtemp(dir=self.out_dir)
        config_dict = dict(time_max=time_max, output_dir=output_dir)
        author_name = 'Joe'
        author_metadata = AuthorMetadata(name=author_name)
        self.simulator.run(config_dict=config_dict,
                           author_metadata=author_metadata)
        sim_metadata = SimulationMetadata.read_dataclass(output_dir)
        self.assertEqual(sim_metadata.author.name, author_name)

        # no output_dir
        self.simulator.reset()
        self.make_one_object_simulation()
        self.simulator.run(5.0)
        self.assertIsInstance(self.simulator.sim_metadata, SimulationMetadata)

        @contextlib.contextmanager
        def working_directory(path):
            """ A context manager that temporarilyt changes the working directory

            Args:
                path (:obj:`str`): the temporary working directory
            See http://code.activestate.com/recipes/576620-changedirectory-context-manager/#c2
            """
            prev_cwd = os.getcwd()
            os.chdir(path)
            yield
            os.chdir(prev_cwd)

        # current directory not a git repo
        new_dir = tempfile.mkdtemp(dir=self.out_dir)
        with working_directory(new_dir) as cd:
            self.simulator.reset()
            self.make_one_object_simulation()
            self.simulator.run(5.0)
        self.assertTrue(self.simulator.sim_metadata.simulator_repo is None)

    ### test simulation performance ### # noqa: E266
    def prep_simulation(self, simulation_engine, num_sim_objs):
        simulation_engine.reset()
        self.make_cyclical_messaging_network_sim(simulation_engine,
                                                 num_sim_objs)
        simulation_engine.initialize()

    def suspend_logging(self, log_names, new_level=LogLevel.exception):
        # cannot use environment variable(s) to modify logging because logging2.Logger() as used
        # by LoggerConfigurator().from_dict() simply reuses existing logs whose names don't change
        # instead, modify the levels of existing logs
        # get all existing levels
        existing_levels = {}  # map from log name -> handler name -> level
        for log_name in log_names:
            existing_levels[log_name] = {}
            existing_log = LogRegister.get_logger(name=log_name)
            for handler in existing_log.handlers:
                existing_levels[log_name][handler.name] = handler.min_level
        # set levels to new level
        for log_name in log_names:
            existing_log = LogRegister.get_logger(name=log_name)
            for handler in existing_log.handlers:
                handler.min_level = new_level
        return existing_levels

    def restore_logging_levels(self, log_names, existing_levels):
        for log_name in log_names:
            existing_log = LogRegister.get_logger(name=log_name)
            for handler in existing_log.handlers:
                handler.min_level = existing_levels[log_name][handler.name]

    def test_suspend_restore_logging(self):
        debug_logs = core.get_debug_logs()

        existing_levels = self.suspend_logging(self.log_names)
        # suspended
        for log_name in self.log_names:
            fast_logger = FastLogger(debug_logs.get_log(log_name), 'debug')
            self.assertEqual(fast_logger.get_level(), LogLevel.exception)

        self.restore_logging_levels(self.log_names, existing_levels)
        level_by_logger = {}
        for logger, handler_levels in existing_levels.items():
            min_level = LogLevel.exception
            for handler, level in handler_levels.items():
                if level < min_level:
                    min_level = level
            level_by_logger[logger] = min_level

        # restored
        for log_name in self.log_names:
            fast_logger = FastLogger(debug_logs.get_log(log_name), 'debug')
            self.assertEqual(fast_logger.get_level(),
                             level_by_logger[log_name])

    # @unittest.skip("takes 3 to 5 min.")
    def test_performance(self):
        existing_levels = self.suspend_logging(self.log_names)
        simulation_engine = SimulationEngine()
        end_sim_time = 100
        num_sim_objs = 4
        max_num_profile_objects = 300
        max_num_sim_objs = 5000
        print()
        print(
            f"Performance test of cyclical messaging network: end simulation time: {end_sim_time}"
        )
        unprofiled_perf = [
            "\n#sim obs\t# events\trun time (s)\tevents/s".expandtabs(15)
        ]

        while num_sim_objs < max_num_sim_objs:

            # measure execution time
            self.prep_simulation(simulation_engine, num_sim_objs)
            start_time = time.process_time()
            num_events = simulation_engine.simulate(end_sim_time).num_events
            run_time = time.process_time() - start_time
            self.assertEqual(num_sim_objs * end_sim_time, num_events)
            unprofiled_perf.append("{}\t{}\t{:8.3f}\t{:8.0f}".format(
                num_sim_objs, num_events, run_time,
                num_events / run_time).expandtabs(15))

            # profile
            if num_sim_objs < max_num_profile_objects:
                self.prep_simulation(simulation_engine, num_sim_objs)
                out_file = os.path.join(
                    self.out_dir, "profile_out_{}.out".format(num_sim_objs))
                locals = {
                    'simulation_engine': simulation_engine,
                    'end_sim_time': end_sim_time
                }
                cProfile.runctx(
                    'num_events = simulation_engine.simulate(end_sim_time)',
                    {},
                    locals,
                    filename=out_file)
                profile = pstats.Stats(out_file)
                print(f"Profile for {num_sim_objs} simulation objects:")
                profile.strip_dirs().sort_stats('cumulative').print_stats(20)

            num_sim_objs *= 4

        self.restore_logging_levels(self.log_names, existing_levels)
        performance_log = os.path.join(os.path.dirname(__file__), 'results',
                                       'perf_results',
                                       'de_sim_performance_log.txt')
        with open(performance_log, 'a') as perf_log:
            today = datetime.today().strftime('%Y-%m-%d')
            print(f'Performance summary on {today}', end='', file=perf_log)
            print("\n".join(unprofiled_perf), file=perf_log)
            print(file=perf_log)

        print(f'Performance summary, written to {performance_log}')
        print("\n".join(unprofiled_perf))

    def test_profiling(self):
        existing_levels = self.suspend_logging(self.log_names)
        simulation_engine = SimulationEngine()
        num_sim_objs = 20
        self.prep_simulation(simulation_engine, num_sim_objs)
        end_sim_time = 200
        expected_text = [
            'function calls', 'Ordered by: internal time',
            'filename:lineno(function)'
        ]
        sim_config_dict = dict(time_max=end_sim_time,
                               output_dir=self.out_dir,
                               profile=True)
        stats = simulation_engine.simulate(
            config_dict=sim_config_dict).profile_stats
        self.assertTrue(isinstance(stats, pstats.Stats))
        measurements = ''.join(
            open(self.measurements_pathname, 'r').readlines())
        for text in expected_text:
            self.assertIn(text, measurements)

        sim_config_dict = dict(time_max=end_sim_time, profile=True)
        with CaptureOutput(relay=False) as capturer:
            stats = simulation_engine.simulate(
                config_dict=sim_config_dict).profile_stats
            for text in expected_text:
                self.assertIn(text, capturer.get_text())
        self.assertTrue(isinstance(stats, pstats.Stats))
        self.restore_logging_levels(self.log_names, existing_levels)

    def test_mem_use_measurement(self):
        self.make_one_object_simulation()
        time_max = 20
        config_dict = dict(time_max=time_max,
                           output_dir=self.out_dir,
                           object_memory_change_interval=10)
        self.simulator.simulate(config_dict=config_dict)
        expected_text = [
            'Memory use changes by SummaryTracker', '# objects', 'float'
        ]
        measurements = ''.join(
            open(self.measurements_pathname, 'r').readlines())
        for text in expected_text:
            self.assertIn(text, measurements)

        self.make_one_object_simulation()
        with CaptureOutput(relay=False) as capturer:
            config_dict = dict(time_max=time_max,
                               object_memory_change_interval=10)
            self.simulator.simulate(config_dict=config_dict)
            for text in expected_text:
                self.assertIn(text, capturer.get_text())
Ejemplo n.º 22
0
    def test_simulation_engine_exceptions(self):
        obj = ExampleSimulationObject(obj_name(1))
        with self.assertRaisesRegex(
                SimulatorError,
                f"cannot delete simulation object '{obj.name}'"):
            self.simulator.delete_object(obj)

        no_such_obj_name = 'no such object'
        with self.assertRaisesRegex(
                SimulatorError,
                f"cannot get simulation object '{no_such_obj_name}'"):
            self.simulator.get_object(no_such_obj_name)

        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has not been initialized'):
            self.simulator.simulate(5.0)

        self.simulator.initialize()
        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has no objects'):
            self.simulator.simulate(5.0)

        self.simulator.add_object(obj)
        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has no initial events'):
            self.simulator.simulate(5.0)

        simulator = SimulationEngine()
        simulator.add_object(BasicExampleSimulationObject('test'))
        simulator.initialize()
        # start time = 2
        simulation_config = SimulationConfig(5, 2)
        with self.assertRaisesRegex(
                SimulatorError,
                'first event .* is earlier than the start time'):
            simulator.simulate(sim_config=simulation_config)

        with self.assertRaisesRegex(
                SimulatorError, f"cannot add simulation object '{obj.name}'"):
            self.simulator.add_object(obj)

        self.simulator.delete_object(obj)
        try:
            self.simulator.add_object(obj)
        except Exception:
            self.fail('should be able to add object after delete')

        self.simulator.reset()
        self.simulator.initialize()
        obj = ExampleSimulationObject(obj_name(2))
        self.simulator.add_object(obj)
        obj.time += 1
        event_queue = self.simulator.event_queue
        event_queue.schedule_event(0, 0, obj, obj, InitMsg())
        with self.assertRaisesRegex(SimulatorError,
                                    "but event time .* < object time"):
            self.simulator.simulate(5.0)

        with self.assertRaisesRegex(SimulatorError,
                                    'Simulation has already been initialized'):
            self.simulator.initialize()