def __init__( self, function: Callable, configuration: Union[str, Mapping, None] = None, scope: Union[Scope, str] = None, safe: bool = True, db: Database = None, name: str = 'EMAT', metamodel_id=None, ): if scope is None: raise ValueError('must give scope') if name == 'EMAT': try: _name = self.function.__name__ except: pass else: if _name.isalnum(): name = _name elif _name.replace("_", "").replace(" ", "").isalnum(): name = _name.replace("_", "").replace(" ", "") AbstractCoreModel.__init__(self, configuration, scope, safe, db, metamodel_id=metamodel_id) self.archive_path = self.config.get('archive_path', None) if self.archive_path is not None: os.makedirs(self.archive_path, exist_ok=True) # If no archive path is given, a temporary directory is created. # All archive files will be lost when this CoreDummy is deleted. if self.archive_path is None: import tempfile self._temp_archive = tempfile.TemporaryDirectory() self.archive_path = self._temp_archive.name WorkbenchModel.__init__(self, name, function)
def test_determine_parameters(self): function = mock.Mock() model_a = Model("A", function) model_a.uncertainties = [RealParameter('a', 0, 1), RealParameter('b', 0, 1),] function = mock.Mock() model_b = Model("B", function) model_b.uncertainties = [RealParameter('b', 0, 1), RealParameter('c', 0, 1),] models = [model_a, model_b] parameters = determine_parameters(models, 'uncertainties', union=True) for model in models: for unc in model.uncertainties: self.assertIn(unc.name, parameters.keys()) parameters = determine_parameters(models, 'uncertainties', union=False) self.assertIn('b', parameters.keys()) self.assertNotIn('c', parameters.keys()) self.assertNotIn('a', parameters.keys())
def test_determine_parameters(self): function = mock.Mock() model_a = Model("A", function) model_a.uncertainties = [ RealParameter('a', 0, 1), RealParameter('b', 0, 1), ] function = mock.Mock() model_b = Model("B", function) model_b.uncertainties = [ RealParameter('b', 0, 1), RealParameter('c', 0, 1), ] models = [model_a, model_b] parameters = determine_parameters(models, 'uncertainties', union=True) for model in models: for unc in model.uncertainties: self.assertIn(unc.name, parameters.keys()) parameters = determine_parameters(models, 'uncertainties', union=False) self.assertIn('b', parameters.keys()) self.assertNotIn('c', parameters.keys()) self.assertNotIn('a', parameters.keys())
def test_setup_working_directories(self, mock_os): function = mock.Mock() mock_msi = Model('test', function) msis = {mock_msi.name: mock_msi} mock_client = mock.create_autospec(ipyparallel.Client) mock_client.ids = [0, 1] # pretend we have two engines mock_view = mock.create_autospec( ipyparallel.client.view.View) #@ @UndefinedVariable mock_client.__getitem__.return_value = mock_view ema.setup_working_directories(mock_client, msis)
def test_initialize_engines(self): function = mock.Mock() mock_msi = Model('test', function) msis = {mock_msi.name: mock_msi} mock_client = mock.create_autospec(ipyparallel.Client) mock_client.ids = [0, 1] # pretend we have two engines mock_view = mock.create_autospec( ipyparallel.client.view.View) #@ @UndefinedVariable mock_client.__getitem__.return_value = mock_view cwd = '.' ema.initialize_engines(mock_client, msis, cwd) mock_view.apply_sync.assert_any_call(ema._initialize_engine, 0, msis, cwd) mock_view.apply_sync.assert_any_call(ema._initialize_engine, 1, msis, cwd)
def test_copy_wds_for_msis(self, mock_os, mock_shutil): mock_os.path.join.return_value = '.' function = mock.Mock() mock_msi = Model('test', function) msis = {mock_msi.name: mock_msi} engine_id = 0 engine = ema.Engine(engine_id, msis) engine.root_dir = '/dir_name' dirs_to_copy = ['/test'] wd_by_msi = {'/test': [mock_msi.name]} engine.copy_wds_for_msis(dirs_to_copy, wd_by_msi) mock_os.path.basename.called_once_with(dirs_to_copy[0]) mock_os.path.join.called_once_with('/dir_name', dirs_to_copy[0]) mock_shutil.copytree.assert_called_once_with('/test', '.') self.assertEqual('.', mock_msi.working_directory)
def test_run_experiment(self): function = mock.Mock() mock_msi = Model('test', function) mock_runner = mock.create_autospec(experiment_runner.ExperimentRunner) msis = [mock_msi] engine_id = 0 engine = ema.Engine(engine_id, msis, '.') engine.runner = mock_runner experiment = {'a': 1} engine.run_experiment(experiment) mock_runner.run_experiment.assert_called_once_with(experiment) mock_runner.run_experiment.side_effect = EMAError self.assertRaises(EMAError, engine.run_experiment, experiment) mock_runner.run_experiment.side_effect = Exception self.assertRaises(EMAParallelError, engine.run_experiment, experiment)
if entry == 'max_P': minimize = 1 outcome = outcomes[entry] mean_performance = minimize * np.mean(outcome) std_performance = np.std(outcome) objs.append(float(mean_performance)) objs.append(float(std_performance)) return objs if __name__ == '__main__': ema_logging.log_to_stderr(ema_logging.INFO) #instantiate the model model = Model('lakeproblem', function=lake_problem) #specify uncertainties model.uncertainties = [RealParameter("b", 0.1, 0.45), RealParameter("q", 2.0, 4.5), RealParameter("mean", 0.01, 0.05), RealParameter("stdev", 0.001, 0.005), RealParameter("delta", 0.93, 0.99)] #specify outcomes model.outcomes = [ScalarOutcome("max_P",), ScalarOutcome("utility"), ScalarOutcome("inertia"), ScalarOutcome("reliability")] # override some of the defaults of the model model.constants = [Constant('alpha', 0.41),
minimize = 1 outcome = outcomes[entry] mean_performance = minimize * np.mean(outcome) std_performance = np.std(outcome) objs.append(float(mean_performance)) objs.append(float(std_performance)) return objs if __name__ == '__main__': ema_logging.log_to_stderr(ema_logging.INFO) #instantiate the model model = Model('lakeproblem', function=lake_problem) #specify uncertainties model.uncertainties = [ RealParameter("b", 0.1, 0.45), RealParameter("q", 2.0, 4.5), RealParameter("mean", 0.01, 0.05), RealParameter("stdev", 0.001, 0.005), RealParameter("delta", 0.93, 0.99) ] #specify outcomes model.outcomes = [ ScalarOutcome("max_P", ), ScalarOutcome("utility"), ScalarOutcome("inertia"), ScalarOutcome("reliability")