def test_get_invalid_module(setup_case): res_config = setup_case("local/mini_ert", "mini_config") ert = EnKFMain(res_config) es_update = ESUpdate(ert) with pytest.raises(KeyError, match="No such module:STD_ENKF_XXX"): es_update.getModule("STD_ENKF_XXX")
def test_update(self): config = self.createTestPath("local/snake_oil/snake_oil.ert") with ErtTestContext("update_test", config) as context: ert = context.getErt() es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") mask = BoolVector(initial_size=ert.getEnsembleSize(), default_value=True) run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = sim_node.asGenKw() target_gen_kw = target_node.asGenKw() # Test that an update has actually taken place for index in range(len(sim_gen_kw)): self.assertNotEqual(sim_gen_kw[index], target_gen_kw[index])
def run(self): target = self._run_widget.target_case() source = self._run_widget.source_case() ert = ERT.ert fs_manager = ert.getEnkfFsManager() es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(target) source_fs = fs_manager.getFileSystem(source) run_context = ErtRunContext.ensemble_smoother_update( source_fs, target_fs, ) success = es_update.smootherUpdate(run_context) if not success: msg = QMessageBox() msg.setIcon(QMessageBox.Warning) msg.setWindowTitle("Run Analysis") msg.setText("Unable to run analysis for case '%s'." % source) msg.setStandardButtons(QMessageBox.Ok) msg.exec_() return ERT.ertChanged.emit() self._dialog.accept()
def update(self, line): arguments = splitArguments(line) if len(arguments) == 1: case_name = arguments[0] ert = self.ert() fs_manager = ert.getEnkfFsManager() ert.getEnkfSimulationRunner().runWorkflows(HookRuntime.PRE_UPDATE) es_update = ESUpdate( ert ) target_fs = fs_manager.getFileSystem(case_name) source_fs = fs_manager.getCurrentFileSystem( ) model_config = ert.getModelConfig( ) runpath_fmt = model_config.getRunpathFormat( ) subst_list = ert.getDataKW( ) mask = BoolVector( default_value = True, initial_size = ert.getEnsembleSize( ) ) run_context = ErtRunContext.ensemble_smoother( source_fs , target_fs , mask, runpath_fmt, subst_list, 0 ) success = es_update.smootherUpdate( run_context ) if not success: self.lastCommandFailed("Unable to perform update") ert.getEnkfSimulationRunner().runWorkflows(HookRuntime.POST_UPDATE) else: self.lastCommandFailed("Expected one argument: <target_fs> received: '%s'" % line)
def test_update_code1(self): with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() local_config = main.getLocalConfig() local_config.clear() obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") obs.addNode("WWCT0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.addActiveData("PORO") ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) row_scaling = ministep.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() # ------------------------------------------------------------------------------------- grid = main.eclConfig().getGrid() obs_pos = grid.get_xyz(ijk=(5, 5, 1)) length_scale = (2, 1, 0.50) row_scaling.assign(field_config.get_data_size(), GaussianDecay(obs_pos, length_scale, grid)) # ------------------------------------------------------------------------------------- init_fs = init_data(main) target_fs = main.getEnkfFsManager().getFileSystem("target") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update( init_fs, target_fs) es_update.smootherUpdate(run_context)
def update(self, line): arguments = splitArguments(line) if len(arguments) == 1: case_name = arguments[0] ert = self.ert() fs_manager = ert.getEnkfFsManager() ert.getEnkfSimulationRunner().runWorkflows(HookRuntime.PRE_UPDATE) es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(case_name) source_fs = fs_manager.getCurrentFileSystem() model_config = ert.getModelConfig() runpath_fmt = model_config.getRunpathFormat() subst_list = ert.getDataKW() mask = BoolVector(default_value=True, initial_size=ert.getEnsembleSize()) run_context = ErtRunContext.ensemble_smoother( source_fs, target_fs, mask, runpath_fmt, subst_list, 0) success = es_update.smootherUpdate(run_context) if not success: self.lastCommandFailed("Unable to perform update") ert.getEnkfSimulationRunner().runWorkflows(HookRuntime.POST_UPDATE) else: self.lastCommandFailed( "Expected one argument: <target_fs> received: '%s'" % line)
def test_localization(self): config = self.createTestPath("local/snake_oil/snake_oil.ert") with ErtTestContext("localization_test", config) as context: ert = context.getErt() es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") # perform localization localized_idxs = (1, 2) local_config = ert.getLocalConfig() local_config.clear() dataset = local_config.createDataset("DATASET_SCALAR_LOCA") dataset.addNode("SNAKE_OIL_PARAM") active_list = dataset.getActiveList("SNAKE_OIL_PARAM") for i in localized_idxs: active_list.addActiveIndex(i) obs = local_config.createObsdata("OBSSET_LOCA") obs.addNode("WOPR_OP1_72") ministep = local_config.createMinistep("MINISTEP_LOCA") ministep.attachDataset(dataset) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Run enseble smoother mask = BoolVector(initial_size=ert.getEnsembleSize(), default_value=True) model_config = ert.getModelConfig() path_fmt = model_config.getRunpathFormat() jobname_fmt = model_config.getJobnameFormat() subst_list = None run_context = ErtRunContext.ensemble_smoother( sim_fs, target_fs, mask, path_fmt, jobname_fmt, subst_list, 0 ) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = sim_node.asGenKw() target_gen_kw = target_node.asGenKw() # Test that the localized values has been updated for i in localized_idxs: self.assertNotEqual(sim_gen_kw[i], target_gen_kw[i]) # test that all the other values are left unchanged non_localized_idxs = ( x for x in range(len(sim_gen_kw)) if x not in localized_idxs ) for i in non_localized_idxs: self.assertEqual(sim_gen_kw[i], target_gen_kw[i])
def test_localization(setup_case, expected_target_gen_kw): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") # perform localization localized_idxs = (1, 2) local_config = ert.getLocalConfig() local_config.clear() obs = local_config.createObsdata("OBSSET_LOCA") obs.addNode("WOPR_OP1_72") ministep = local_config.createMinistep("MINISTEP_LOCA") ministep.addActiveData("SNAKE_OIL_PARAM") # replace dataset.addNode() active_list = ministep.getActiveList("SNAKE_OIL_PARAM") for i in localized_idxs: active_list.addActiveIndex(i) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Run ensemble smoother mask = [True] * ert.getEnsembleSize() model_config = ert.getModelConfig() path_fmt = model_config.getRunpathFormat() jobname_fmt = model_config.getJobnameFormat() subst_list = None run_context = ErtRunContext.ensemble_smoother( sim_fs, target_fs, mask, path_fmt, jobname_fmt, subst_list, 0 ) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = list(sim_node.asGenKw()) target_gen_kw = list(target_node.asGenKw()) # Test that the localized values has been updated assert sim_gen_kw[1:3] != target_gen_kw[1:3] # test that all the other values are left unchanged assert sim_gen_kw[3:] == target_gen_kw[3:] assert sim_gen_kw[0] == target_gen_kw[0] assert target_gen_kw == pytest.approx(expected_target_gen_kw)
def test_create(self): config = self.createTestPath("local/mini_ert/mini_config") with ErtTestContext("python/enkf/data/mini_ert_simulated", config) as context: ert = context.getErt() es_update = ESUpdate(ert) self.assertFalse(es_update.hasModule("NO_NOT_THIS_MODULE")) with self.assertRaises(KeyError): m = es_update.getModule("STD_ENKF_XXX") module = es_update.getModule("STD_ENKF")
def test_update_workflow(self): with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() workflow_list = main.getWorkflowList() workflow = workflow_list["ROW_SCALING_WORKFLOW1"] self.assertTrue(workflow.run(main)) init_fs = init_data(main) target_fs = main.getEnkfFsManager().getFileSystem("target") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, target_fs) es_update.smootherUpdate(run_context)
def analyse(ert, target, source): """Runs analysis using target and source cases. Returns whether or not the analysis was successful.""" fs_manager = ert.getEnkfFsManager() es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(target) source_fs = fs_manager.getFileSystem(source) run_context = ErtRunContext.ensemble_smoother_update( source_fs, target_fs, ) es_update.smootherUpdate(run_context)
def __init__(self, config, strict=True, verbose=True): """ Initializes an instance of EnkfMain. Note: @config ought to be the ResConfig instance holding the configuration. It also accepts that config is the name of a configuration file, this is however deprecated. """ res_config = self._init_res_config(config) if res_config is None: raise TypeError("Failed to construct EnKFMain instance due to invalid res_config.") c_ptr = self._alloc(res_config, strict, verbose) if c_ptr: super(EnKFMain, self).__init__(c_ptr) else: raise ValueError('Failed to construct EnKFMain instance from config %s.' % res_config) if config is None: self.__simulation_runner = None self.__fs_manager = None self.__es_update = None else: self.__simulation_runner = EnkfSimulationRunner(self) self.__fs_manager = EnkfFsManager(self) self.__es_update = ESUpdate(self) self.__key_manager = KeyManager(self)
def test_update_report(setup_case, snapshot): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) log_file = Path(ert.analysisConfig().get_log_path()) / "deprecated" snapshot.assert_match(log_file.read_text("utf-8"), "update_log")
def _run_ministep(ert, obs_group, data_parameters, prior_name, target_name, output_path): local_config = ert.getLocalConfig() # Reset internal local config structure, in order to make your own local_config.clear() # A ministep is used to link betwen data and observations. # Make more ministeps to condition different groups together ministep = local_config.createMinistep("MINISTEP") # Add all dataset to localize data_all = local_config.createDataset("DATASET") for data in data_parameters: data_all.addNode(data) # Add all obs to be used in this updating scheme obsdata = local_config.createObsdata("OBS") for obs in obs_group: obsdata.addNode(obs) # Attach the created dataset and obsset to the ministep ministep.attachDataset(data_all) ministep.attachObsset(obsdata) # Then attach the ministep to the update step local_config.getUpdatestep().attachMinistep(ministep) # Perform update analysis ert.analysisConfig().set_log_path(output_path) run_context = ErtRunContext.ensemble_smoother_update( ert.getEnkfFsManager().getFileSystem(prior_name), ert.getEnkfFsManager().getFileSystem(target_name), ) ESUpdate(ert).smootherUpdate(run_context)
def __init__(self, model_config, res_config=None, strict=True, verbose=True): if model_config is not None and not isfile(model_config): raise IOError('No such configuration file "%s".' % model_config) if res_config is None: res_config = ResConfig(model_config) res_config.convertToCReference(self) if res_config is None or not isinstance(res_config, ResConfig): raise TypeError("Failed to construct EnKFMain instance due to invalid res_config.") c_ptr = self._alloc(model_config, res_config, strict, verbose) if c_ptr: super(EnKFMain, self).__init__(c_ptr) else: raise ValueError('Failed to construct EnKFMain instance from config %s.' % model_config) # The model_config argument can be None; the only reason to # allow that possibility is to be able to test that the # site-config loads correctly. if model_config is None: self.__simulation_runner = None self.__fs_manager = None self.__es_update = None else: self.__simulation_runner = EnkfSimulationRunner(self) self.__fs_manager = EnkfFsManager(self) self.__es_update = ESUpdate(self) self.__key_manager = KeyManager(self)
def test_large_case(self): with open("config", "w") as fp: fp.write( """NUM_REALIZATIONS 10 GRID CASE.EGRID FIELD PORO PARAMETER poro.grdecl INIT_FILES:fields/poro%d.grdecl SUMMARY WBHP OBS_CONFIG observations.txt TIME_MAP timemap.txt """ ) for f in ["timemap.txt", "observations.txt"]: src_file = self.createTestPath(os.path.join("local/row_scaling", f)) shutil.copy(src_file, "./") # The grid size must be greater than 250000 (the default matrix size in # enkf_main_update()) grid = EclGridGenerator.create_rectangular((70, 70, 70), (1, 1, 1)) grid.save_EGRID("CASE.EGRID") res_config = ResConfig(user_config_file="config") main = EnKFMain(res_config) init_fs = init_data(main) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Apply the row scaling row_scaling = local_data.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() row_scaling.assign(field_config.get_data_size(), ScalingTest(grid)) es_update = ESUpdate(main) update_fs = main.getEnkfFsManager().getFileSystem("target2") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs) es_update.smootherUpdate(run_context)
def test_snapshot_alpha(setup_case, alpha, expected): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") obs_file = Path("observations") / "observations.txt" with obs_file.open(mode="w") as fin: fin.write( """ SUMMARY_OBSERVATION LOW_STD { VALUE = 10; ERROR = 0.1; DATE = 2015-06-23; KEY = FOPR; }; SUMMARY_OBSERVATION HIGH_STD { VALUE = 10; ERROR = 1.0; DATE = 2015-06-23; KEY = FOPR; }; SUMMARY_OBSERVATION EXTREMELY_HIGH_STD { VALUE = 10; ERROR = 10.0; DATE = 2015-06-23; KEY = FOPR; }; """ ) ert = EnKFMain(res_config) es_update = ESUpdate(ert) ert.analysisConfig().selectModule("IES_ENKF") fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) ert.analysisConfig().setEnkfAlpha(alpha) es_update.smootherUpdate(run_context) result_snapshot = ert.update_snapshots[run_context.get_id()] assert result_snapshot.alpha == alpha assert result_snapshot.ministep_snapshots["ALL_ACTIVE"].obs_status == expected
def test_update(setup_case, module, expected_gen_kw): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) ert.analysisConfig().selectModule(module) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = list(sim_node.asGenKw()) target_gen_kw = list(target_node.asGenKw()) assert sim_gen_kw != target_gen_kw assert sim_gen_kw == pytest.approx( [ -1.3035319087841115, 0.8222709205428339, -1.1400029486153482, 0.7477534046493867, -0.10400064074767973, -1.7223242794585338, 0.0761604027734105, 0.4039137216428462, 0.10001691562080614, 0.09549338450036506, ] ) assert target_gen_kw == pytest.approx(expected_gen_kw)
def _init_from_real_enkf_main(self, real_enkf_main): super(EnKFMain, self).__init__(real_enkf_main.from_param(real_enkf_main).value, parent=real_enkf_main, is_reference=True) self.__simulation_runner = EnkfSimulationRunner(self) self.__fs_manager = EnkfFsManager(self) self.__es_update = ESUpdate(self)
def _ensemble_smoother_run(ert, target_case): source_fs, target_fs = setup_fs(ert, target_case) model_config = ert.getModelConfig() subst_list = ert.getDataKW() mask = BoolVector(default_value=True, initial_size=ert.getEnsembleSize()) prior_context = ErtRunContext.ensemble_smoother( sim_fs=source_fs, target_fs=target_fs, mask=mask, path_fmt=model_config.getRunpathFormat(), jobname_fmt=model_config.getJobnameFormat(), subst_list=subst_list, itr=0) sim_runner = ert.getEnkfSimulationRunner() _run_ensemble_experiment(ert, prior_context, sim_runner) sim_runner.runWorkflows(HookRuntime.PRE_UPDATE) es_update = ESUpdate(ert) success = es_update.smootherUpdate(prior_context) if not success: raise AssertionError("Analysis of simulation failed!") sim_runner.runWorkflows(HookRuntime.POST_UPDATE) ert.getEnkfFsManager().switchFileSystem(prior_context.get_target_fs()) sim_fs = prior_context.get_target_fs() state = RealizationStateEnum.STATE_HAS_DATA | RealizationStateEnum.STATE_INITIALIZED mask = sim_fs.getStateMap().createMask(state) rerun_context = ErtRunContext.ensemble_smoother( sim_fs=sim_fs, target_fs=None, mask=mask, path_fmt=model_config.getRunpathFormat(), jobname_fmt=model_config.getJobnameFormat(), subst_list=subst_list, itr=1) _run_ensemble_experiment(ert, rerun_context, sim_runner)
def test_attach_obs_data_to_ministep(self): config = self.createTestPath("local/snake_oil/snake_oil.ert") expected_keys = { "WPR_DIFF_1", "WOPR_OP1_108", "FOPR", "WOPR_OP1_144", "WOPR_OP1_190", "WOPR_OP1_9", "WOPR_OP1_36", "WOPR_OP1_72", } with ErtTestContext("obs_data_ministep_test", config) as context: ert = context.getErt() es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update( sim_fs, target_fs) es_update.smootherUpdate(run_context) update_step = ert.getLocalConfig().getUpdatestep() ministep = update_step[len(update_step) - 1] obs_data = ministep.get_obs_data() self.assertEqual(len(expected_keys), obs_data.get_num_blocks()) observed_obs_keys = set() for block_num in range(obs_data.get_num_blocks()): block = obs_data.get_block(block_num) obs_key = block.get_obs_key() observed_obs_keys.add(obs_key) for i in range(len(block)): self.assertTrue(block.is_active(i)) self.assertSetEqual(expected_keys, observed_obs_keys)
def run(self): target = self._run_widget.target_case() source = self._run_widget.source_case() ert = ERT.ert fs_manager = ert.getEnkfFsManager() es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(target) source_fs = fs_manager.getFileSystem(source) success = es_update.smootherUpdate( source_fs , target_fs ) if not success: msg = QMessageBox() msg.setIcon(QMessageBox.Warning) msg.setWindowTitle("Run Analysis") msg.setText("Unable to run analysis for case '%s'." % source) msg.setStandardButtons(QMessageBox.Ok) msg.exec_() return ERT.ertChanged.emit() self._dialog.accept()
def update(self, line): arguments = splitArguments(line) if len(arguments) == 1: case_name = arguments[0] ert = self.ert() fs_manager = ert.getEnkfFsManager() ert.getEnkfSimulationRunner().runWorkflows(HookRuntime.PRE_UPDATE) es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(case_name) source_fs = fs_manager.getCurrentFileSystem() success = es_update.smootherUpdate(source_fs, target_fs) if not success: self.lastCommandFailed("Unable to perform update") ert.getEnkfSimulationRunner().runWorkflows(HookRuntime.POST_UPDATE) else: self.lastCommandFailed( "Expected one argument: <target_fs> received: '%s'" % line)
def test_row_scaling_using_assign_vector(self): random_seed = "ABCDEFGHIJK0123456" with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() init_fs = init_data(main) update_fs1 = main.getEnkfFsManager().getFileSystem("target1") # The first smoother update without row scaling es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs1) rng = main.rng() rng.setState(random_seed) es_update.smootherUpdate(run_context) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WWCT0") obs.addNode("WBHP0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Apply the row scaling row_scaling = local_data.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() scaling = ScalingTest(grid) scaling_vector = np.ndarray( [field_config.get_data_size()], dtype=np.float32 ) for i in range(field_config.get_data_size()): scaling_vector[i] = scaling(i) row_scaling.assign_vector(scaling_vector) # Second update with row scaling update_fs2 = main.getEnkfFsManager().getFileSystem("target2") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs2) rng.setState(random_seed) es_update.smootherUpdate(run_context) # Fetch the three values initial, update without row scaling and # update with row scaling and verify that the row scaling has been # correctly applied. init_node = EnkfNode(poro_config) update_node1 = EnkfNode(poro_config) update_node2 = EnkfNode(poro_config) for iens in range(main.getEnsembleSize()): node_id = NodeId(0, iens) init_node.load(init_fs, node_id) update_node1.load(update_fs1, node_id) update_node2.load(update_fs2, node_id) assert_field_update( grid, init_node.asField(), update_node1.asField(), update_node2.asField(), )
def test_get_invalid_module(minimal_config): ert = EnKFMain(minimal_config) es_update = ESUpdate(ert) with pytest.raises(KeyError, match="No such module:STD_ENKF_XXX"): es_update.getModule("STD_ENKF_XXX")
def test_has_module(module, expected, minimal_config): ert = EnKFMain(minimal_config) es_update = ESUpdate(ert) assert es_update.hasModule(module) is expected
def test_get_module(module, minimal_config): ert = EnKFMain(minimal_config) es_update = ESUpdate(ert) es_update.getModule(module)
def test_has_module(setup_case, module, expected): res_config = setup_case("local/mini_ert", "mini_config") ert = EnKFMain(res_config) es_update = ESUpdate(ert) assert es_update.hasModule(module) is expected
def test_reuse_ALL_ACTIVE(self): random_seed = "ABCDEFGHIJK0123456" with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() grid = main.eclConfig().getGrid() init_fs = init_data(main) es_update = ESUpdate(main) update_fs1 = main.getEnkfFsManager().getFileSystem("target1") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs1) rng = main.rng() rng.setState(random_seed) # Normal update without any local configuration es_update.smootherUpdate(run_context) local_config = main.getLocalConfig() local_config.clear_active() with self.assertRaises(KeyError): obs_data = local_config.copyObsdata("NO_SUCH_OBS", "my_obs") local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs_data = local_config.copyObsdata("ALL_OBS", "my_obs") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs_data) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) update_fs2 = main.getEnkfFsManager().getFileSystem("target2") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs2) rng.setState(random_seed) # Local update with reused ALL_OBS observation configuration es_update.smootherUpdate(run_context) del obs_data["WBHP0"] ministep2 = local_config.createMinistep("MINISTEP_LOCAL2") obs_data2 = local_config.createObsdata("OBSDATA2") obs_data2.addNode("WBHP0") ministep2.attachDataset(local_data) ministep2.attachObsset(obs_data2) updatestep.attachMinistep(ministep2) update_fs3 = main.getEnkfFsManager().getFileSystem("target3") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs3) # Local update with two ministeps - where one observation has been removed from the first es_update.smootherUpdate(run_context) ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] update_node1 = EnkfNode(poro_config) update_node2 = EnkfNode(poro_config) update_node3 = EnkfNode(poro_config) for iens in range(main.getEnsembleSize()): node_id = NodeId(0, iens) update_node1.load(update_fs1, node_id) update_node2.load(update_fs2, node_id) update_node3.load(update_fs3, node_id) field1 = update_node1.asField() field2 = update_node2.asField() field3 = update_node3.asField() for k in range(grid.nz): for j in range(grid.ny): for i in range(grid.nx): assert field1.ijk_get_double( i, j, k ) == field2.ijk_get_double(i, j, k) f1 = field1.ijk_get_double(i, j, k) f3 = field3.ijk_get_double(i, j, k) # Due to the randomness in the sampling process, # which becomes different when the update steps is # split in two ministeps we can not enforce # equality here. diff = abs(f1 - f3) assert diff < 0.01
def test_2ministep(self): with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() init_fs = init_data(main) update_fs1 = main.getEnkfFsManager().getFileSystem("target1") # The first smoother update without row scaling es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs1) rng = main.rng() es_update.smootherUpdate(run_context) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") ministep1 = local_config.createMinistep("MINISTEP1") local_data1 = local_config.createDataset("LOCAL1") local_data1.addNode("PORO") row_scaling1 = local_data1.row_scaling("PORO") ministep1.attachDataset(local_data1) ministep1.attachObsset(obs) ministep2 = local_config.createMinistep("MINISTEP2") local_data2 = local_config.createDataset("LOCAL2") local_data2.addNode("PORO") row_scaling2 = local_data2.row_scaling("PORO") ministep2.attachDataset(local_data2) ministep2.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep1) updatestep.attachMinistep(ministep2) # Apply the row scaling ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() row_scaling1.assign(field_config.get_data_size(), SelectLayer(0, grid)) row_scaling2.assign(field_config.get_data_size(), SelectLayer(1, grid)) update_fs2 = main.getEnkfFsManager().getFileSystem("target2") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs2) es_update.smootherUpdate(run_context) init_node = EnkfNode(poro_config) node1 = EnkfNode(poro_config) node2 = EnkfNode(poro_config) for iens in range(main.getEnsembleSize()): node_id = NodeId(0, iens) init_node.load(init_fs, node_id) node1.load(update_fs1, node_id) node2.load(update_fs2, node_id) init_field = init_node.asField() field1 = node1.asField() field2 = node2.asField() for iv, v1, v2 in zip(init_field, field1, field2): assert iv != v1
def test_get_module(setup_case, module): res_config = setup_case("local/mini_ert", "mini_config") ert = EnKFMain(res_config) es_update = ESUpdate(ert) es_update.getModule(module)