def test_update(self): config = self.createTestPath("local/snake_oil/snake_oil.ert") with ErtTestContext("update_test", config) as context: ert = context.getErt() es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") mask = BoolVector(initial_size=ert.getEnsembleSize(), default_value=True) run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = sim_node.asGenKw() target_gen_kw = target_node.asGenKw() # Test that an update has actually taken place for index in range(len(sim_gen_kw)): self.assertNotEqual(sim_gen_kw[index], target_gen_kw[index])
def run(self): target = self._run_widget.target_case() source = self._run_widget.source_case() ert = ERT.ert fs_manager = ert.getEnkfFsManager() es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(target) source_fs = fs_manager.getFileSystem(source) run_context = ErtRunContext.ensemble_smoother_update( source_fs, target_fs, ) success = es_update.smootherUpdate(run_context) if not success: msg = QMessageBox() msg.setIcon(QMessageBox.Warning) msg.setWindowTitle("Run Analysis") msg.setText("Unable to run analysis for case '%s'." % source) msg.setStandardButtons(QMessageBox.Ok) msg.exec_() return ERT.ertChanged.emit() self._dialog.accept()
def test_update_code1(self): with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() local_config = main.getLocalConfig() local_config.clear() obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") obs.addNode("WWCT0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.addActiveData("PORO") ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) row_scaling = ministep.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() # ------------------------------------------------------------------------------------- grid = main.eclConfig().getGrid() obs_pos = grid.get_xyz(ijk=(5, 5, 1)) length_scale = (2, 1, 0.50) row_scaling.assign(field_config.get_data_size(), GaussianDecay(obs_pos, length_scale, grid)) # ------------------------------------------------------------------------------------- init_fs = init_data(main) target_fs = main.getEnkfFsManager().getFileSystem("target") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update( init_fs, target_fs) es_update.smootherUpdate(run_context)
def _run_ministep(ert, obs_group, data_parameters, prior_name, target_name, output_path): local_config = ert.getLocalConfig() # Reset internal local config structure, in order to make your own local_config.clear() # A ministep is used to link betwen data and observations. # Make more ministeps to condition different groups together ministep = local_config.createMinistep("MINISTEP") # Add all dataset to localize data_all = local_config.createDataset("DATASET") for data in data_parameters: data_all.addNode(data) # Add all obs to be used in this updating scheme obsdata = local_config.createObsdata("OBS") for obs in obs_group: obsdata.addNode(obs) # Attach the created dataset and obsset to the ministep ministep.attachDataset(data_all) ministep.attachObsset(obsdata) # Then attach the ministep to the update step local_config.getUpdatestep().attachMinistep(ministep) # Perform update analysis ert.analysisConfig().set_log_path(output_path) run_context = ErtRunContext.ensemble_smoother_update( ert.getEnkfFsManager().getFileSystem(prior_name), ert.getEnkfFsManager().getFileSystem(target_name), ) ESUpdate(ert).smootherUpdate(run_context)
def test_update_workflow(self): with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() workflow_list = main.getWorkflowList() workflow = workflow_list["ROW_SCALING_WORKFLOW1"] self.assertTrue(workflow.run(main)) init_fs = init_data(main) target_fs = main.getEnkfFsManager().getFileSystem("target") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, target_fs) es_update.smootherUpdate(run_context)
def analyse(ert, target, source): """Runs analysis using target and source cases. Returns whether or not the analysis was successful.""" fs_manager = ert.getEnkfFsManager() es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(target) source_fs = fs_manager.getFileSystem(source) run_context = ErtRunContext.ensemble_smoother_update( source_fs, target_fs, ) es_update.smootherUpdate(run_context)
def test_update_report(setup_case, snapshot): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) log_file = Path(ert.analysisConfig().get_log_path()) / "deprecated" snapshot.assert_match(log_file.read_text("utf-8"), "update_log")
def test_snapshot_alpha(setup_case, alpha, expected): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") obs_file = Path("observations") / "observations.txt" with obs_file.open(mode="w") as fin: fin.write( """ SUMMARY_OBSERVATION LOW_STD { VALUE = 10; ERROR = 0.1; DATE = 2015-06-23; KEY = FOPR; }; SUMMARY_OBSERVATION HIGH_STD { VALUE = 10; ERROR = 1.0; DATE = 2015-06-23; KEY = FOPR; }; SUMMARY_OBSERVATION EXTREMELY_HIGH_STD { VALUE = 10; ERROR = 10.0; DATE = 2015-06-23; KEY = FOPR; }; """ ) ert = EnKFMain(res_config) es_update = ESUpdate(ert) ert.analysisConfig().selectModule("IES_ENKF") fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) ert.analysisConfig().setEnkfAlpha(alpha) es_update.smootherUpdate(run_context) result_snapshot = ert.update_snapshots[run_context.get_id()] assert result_snapshot.alpha == alpha assert result_snapshot.ministep_snapshots["ALL_ACTIVE"].obs_status == expected
def test_large_case(self): with open("config", "w") as fp: fp.write( """NUM_REALIZATIONS 10 GRID CASE.EGRID FIELD PORO PARAMETER poro.grdecl INIT_FILES:fields/poro%d.grdecl SUMMARY WBHP OBS_CONFIG observations.txt TIME_MAP timemap.txt """ ) for f in ["timemap.txt", "observations.txt"]: src_file = self.createTestPath(os.path.join("local/row_scaling", f)) shutil.copy(src_file, "./") # The grid size must be greater than 250000 (the default matrix size in # enkf_main_update()) grid = EclGridGenerator.create_rectangular((70, 70, 70), (1, 1, 1)) grid.save_EGRID("CASE.EGRID") res_config = ResConfig(user_config_file="config") main = EnKFMain(res_config) init_fs = init_data(main) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Apply the row scaling row_scaling = local_data.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() row_scaling.assign(field_config.get_data_size(), ScalingTest(grid)) es_update = ESUpdate(main) update_fs = main.getEnkfFsManager().getFileSystem("target2") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs) es_update.smootherUpdate(run_context)
def test_update(setup_case, module, expected_gen_kw): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) ert.analysisConfig().selectModule(module) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = list(sim_node.asGenKw()) target_gen_kw = list(target_node.asGenKw()) assert sim_gen_kw != target_gen_kw assert sim_gen_kw == pytest.approx( [ -1.3035319087841115, 0.8222709205428339, -1.1400029486153482, 0.7477534046493867, -0.10400064074767973, -1.7223242794585338, 0.0761604027734105, 0.4039137216428462, 0.10001691562080614, 0.09549338450036506, ] ) assert target_gen_kw == pytest.approx(expected_gen_kw)
def test_attach_obs_data_to_ministep(self): config = self.createTestPath("local/snake_oil/snake_oil.ert") expected_keys = { "WPR_DIFF_1", "WOPR_OP1_108", "FOPR", "WOPR_OP1_144", "WOPR_OP1_190", "WOPR_OP1_9", "WOPR_OP1_36", "WOPR_OP1_72", } with ErtTestContext("obs_data_ministep_test", config) as context: ert = context.getErt() es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update( sim_fs, target_fs) es_update.smootherUpdate(run_context) update_step = ert.getLocalConfig().getUpdatestep() ministep = update_step[len(update_step) - 1] obs_data = ministep.get_obs_data() self.assertEqual(len(expected_keys), obs_data.get_num_blocks()) observed_obs_keys = set() for block_num in range(obs_data.get_num_blocks()): block = obs_data.get_block(block_num) obs_key = block.get_obs_key() observed_obs_keys.add(obs_key) for i in range(len(block)): self.assertTrue(block.is_active(i)) self.assertSetEqual(expected_keys, observed_obs_keys)
def run(self): target = self._run_widget.target_case() source = self._run_widget.source_case() ert = ERT.ert fs_manager = ert.getEnkfFsManager() es_update = ESUpdate(ert) target_fs = fs_manager.getFileSystem(target) source_fs = fs_manager.getFileSystem(source) run_context = ErtRunContext.ensemble_smoother_update( source_fs, target_fs, ) success = es_update.smootherUpdate( run_context ) if not success: msg = QMessageBox() msg.setIcon(QMessageBox.Warning) msg.setWindowTitle("Run Analysis") msg.setText("Unable to run analysis for case '%s'." % source) msg.setStandardButtons(QMessageBox.Ok) msg.exec_() return ERT.ertChanged.emit() self._dialog.accept()
def test_reuse_ALL_ACTIVE(self): random_seed = "ABCDEFGHIJK0123456" with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() grid = main.eclConfig().getGrid() init_fs = init_data(main) es_update = ESUpdate(main) update_fs1 = main.getEnkfFsManager().getFileSystem("target1") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs1) rng = main.rng() rng.setState(random_seed) # Normal update without any local configuration es_update.smootherUpdate(run_context) local_config = main.getLocalConfig() local_config.clear_active() with self.assertRaises(KeyError): obs_data = local_config.copyObsdata("NO_SUCH_OBS", "my_obs") local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs_data = local_config.copyObsdata("ALL_OBS", "my_obs") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs_data) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) update_fs2 = main.getEnkfFsManager().getFileSystem("target2") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs2) rng.setState(random_seed) # Local update with reused ALL_OBS observation configuration es_update.smootherUpdate(run_context) del obs_data["WBHP0"] ministep2 = local_config.createMinistep("MINISTEP_LOCAL2") obs_data2 = local_config.createObsdata("OBSDATA2") obs_data2.addNode("WBHP0") ministep2.attachDataset(local_data) ministep2.attachObsset(obs_data2) updatestep.attachMinistep(ministep2) update_fs3 = main.getEnkfFsManager().getFileSystem("target3") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs3) # Local update with two ministeps - where one observation has been removed from the first es_update.smootherUpdate(run_context) ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] update_node1 = EnkfNode(poro_config) update_node2 = EnkfNode(poro_config) update_node3 = EnkfNode(poro_config) for iens in range(main.getEnsembleSize()): node_id = NodeId(0, iens) update_node1.load(update_fs1, node_id) update_node2.load(update_fs2, node_id) update_node3.load(update_fs3, node_id) field1 = update_node1.asField() field2 = update_node2.asField() field3 = update_node3.asField() for k in range(grid.nz): for j in range(grid.ny): for i in range(grid.nx): assert field1.ijk_get_double( i, j, k ) == field2.ijk_get_double(i, j, k) f1 = field1.ijk_get_double(i, j, k) f3 = field3.ijk_get_double(i, j, k) # Due to the randomness in the sampling process, # which becomes different when the update steps is # split in two ministeps we can not enforce # equality here. diff = abs(f1 - f3) assert diff < 0.01
def test_2ministep(self): with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() init_fs = init_data(main) update_fs1 = main.getEnkfFsManager().getFileSystem("target1") # The first smoother update without row scaling es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs1) rng = main.rng() es_update.smootherUpdate(run_context) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") ministep1 = local_config.createMinistep("MINISTEP1") local_data1 = local_config.createDataset("LOCAL1") local_data1.addNode("PORO") row_scaling1 = local_data1.row_scaling("PORO") ministep1.attachDataset(local_data1) ministep1.attachObsset(obs) ministep2 = local_config.createMinistep("MINISTEP2") local_data2 = local_config.createDataset("LOCAL2") local_data2.addNode("PORO") row_scaling2 = local_data2.row_scaling("PORO") ministep2.attachDataset(local_data2) ministep2.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep1) updatestep.attachMinistep(ministep2) # Apply the row scaling ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() row_scaling1.assign(field_config.get_data_size(), SelectLayer(0, grid)) row_scaling2.assign(field_config.get_data_size(), SelectLayer(1, grid)) update_fs2 = main.getEnkfFsManager().getFileSystem("target2") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs2) es_update.smootherUpdate(run_context) init_node = EnkfNode(poro_config) node1 = EnkfNode(poro_config) node2 = EnkfNode(poro_config) for iens in range(main.getEnsembleSize()): node_id = NodeId(0, iens) init_node.load(init_fs, node_id) node1.load(update_fs1, node_id) node2.load(update_fs2, node_id) init_field = init_node.asField() field1 = node1.asField() field2 = node2.asField() for iv, v1, v2 in zip(init_field, field1, field2): assert iv != v1
def test_row_scaling_using_assign_vector(self): random_seed = "ABCDEFGHIJK0123456" with ErtTestContext("row_scaling", self.config_file) as tc: main = tc.getErt() init_fs = init_data(main) update_fs1 = main.getEnkfFsManager().getFileSystem("target1") # The first smoother update without row scaling es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs1) rng = main.rng() rng.setState(random_seed) es_update.smootherUpdate(run_context) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WWCT0") obs.addNode("WBHP0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Apply the row scaling row_scaling = local_data.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() scaling = ScalingTest(grid) scaling_vector = np.ndarray( [field_config.get_data_size()], dtype=np.float32 ) for i in range(field_config.get_data_size()): scaling_vector[i] = scaling(i) row_scaling.assign_vector(scaling_vector) # Second update with row scaling update_fs2 = main.getEnkfFsManager().getFileSystem("target2") es_update = ESUpdate(main) run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs2) rng.setState(random_seed) es_update.smootherUpdate(run_context) # Fetch the three values initial, update without row scaling and # update with row scaling and verify that the row scaling has been # correctly applied. init_node = EnkfNode(poro_config) update_node1 = EnkfNode(poro_config) update_node2 = EnkfNode(poro_config) for iens in range(main.getEnsembleSize()): node_id = NodeId(0, iens) init_node.load(init_fs, node_id) update_node1.load(update_fs1, node_id) update_node2.load(update_fs2, node_id) assert_field_update( grid, init_node.asField(), update_node1.asField(), update_node2.asField(), )