def loadObservationData(ert: EnKFMain, case_name, keys=None): """ @type ert: EnKFMain @type case_name: str @type keys: list of str @rtype: DataFrame """ observations = ert.getObservations() history_length = ert.getHistoryLength() dates = [ observations.getObservationTime(index).datetime() for index in range(1, history_length + 1) ] summary_keys = SummaryObservationCollector.getAllObservationKeys(ert) if keys is not None: summary_keys = [key for key in keys if key in summary_keys ] # ignore keys that doesn't exist columns = summary_keys std_columns = ["STD_%s" % key for key in summary_keys] df = DataFrame(index=dates, columns=columns + std_columns) for key in summary_keys: observation_keys = ert.ensembleConfig().getNode( key).getObservationKeys() for obs_key in observation_keys: observation_data = observations[obs_key] for index in range(0, history_length + 1): if observation_data.isActive(index): obs_time = observations.getObservationTime( index).datetime() node = observation_data.getNode(index) value = node.getValue() std = node.getStandardDeviation() df[key][obs_time] = value df["STD_%s" % key][obs_time] = std return df
def test_with_enkf_fs(self): config_file = self.createTestPath("Statoil/config/with_data/config") with TestAreaContext("enkf/summary_key_set/enkf_fs", store_area=True) as context: context.copy_parent_content(config_file) fs = EnkfFs("storage/default") summary_key_set = fs.getSummaryKeySet() summary_key_set.addSummaryKey("FOPT") summary_key_set.addSummaryKey("WWCT") summary_key_set.addSummaryKey("WOPR") fs.umount() res_config = ResConfig("config") ert = EnKFMain(res_config) fs = ert.getEnkfFsManager().getCurrentFileSystem() summary_key_set = fs.getSummaryKeySet() self.assertTrue("FOPT" in summary_key_set) self.assertTrue("WWCT" in summary_key_set) self.assertTrue("WOPR" in summary_key_set) ensemble_config = ert.ensembleConfig() self.assertTrue("FOPT" in ensemble_config) self.assertTrue("WWCT" in ensemble_config) self.assertTrue("WOPR" in ensemble_config) self.assertFalse("TCPU" in ensemble_config)
def loadAllGenKwData(ert: EnKFMain, case_name, keys=None, realization_index=None): """ @type ert: EnKFMain @type case_name: str @type keys: list of str @rtype: DataFrame """ fs = ert.getEnkfFsManager().getFileSystem(case_name) realizations = GenKwCollector.createActiveList(ert, fs) if realization_index is not None: if realization_index not in realizations: raise IndexError(f"No such realization ({realization_index})") realizations = [realization_index] gen_kw_keys = ert.getKeyManager().genKwKeys() if keys is not None: gen_kw_keys = [ key for key in keys if key in gen_kw_keys ] # ignore keys that doesn't exist gen_kw_array = _lib.enkf_fs_keyword_data.keyword_data_get_realizations( ert.ensembleConfig(), fs, gen_kw_keys, realizations ) gen_kw_data = DataFrame( data=gen_kw_array, index=realizations, columns=gen_kw_keys ) gen_kw_data.index.name = "Realization" return gen_kw_data
def loadGenData(ert: EnKFMain, case_name, key, report_step, realization_index=None): """@type ert: EnKFMain @type case_name: str @type key: str @type report_step: int @rtype: DataFrame In the returned dataframe the realisation index runs along the rows, and the gen_data element index runs vertically along the columns. """ fs = ert.getEnkfFsManager().getFileSystem(case_name) realizations = fs.realizationList(RealizationStateEnum.STATE_HAS_DATA) if realization_index: if realization_index not in realizations: raise IndexError(f"No such realization {realization_index}") realizations = IntVector.active_list(str(realization_index)) config_node = ert.ensembleConfig().getNode(key) config_node.getModelConfig() ensemble_data = EnsemblePlotGenData(config_node, fs, report_step) data_array = ensemble_data.getRealizations(realizations) realizations = numpy.array(realizations) return DataFrame(data=data_array, columns=realizations)
def test_localization(setup_case, expected_target_gen_kw): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") # perform localization localized_idxs = (1, 2) local_config = ert.getLocalConfig() local_config.clear() obs = local_config.createObsdata("OBSSET_LOCA") obs.addNode("WOPR_OP1_72") ministep = local_config.createMinistep("MINISTEP_LOCA") ministep.addActiveData("SNAKE_OIL_PARAM") # replace dataset.addNode() active_list = ministep.getActiveList("SNAKE_OIL_PARAM") for i in localized_idxs: active_list.addActiveIndex(i) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Run ensemble smoother mask = [True] * ert.getEnsembleSize() model_config = ert.getModelConfig() path_fmt = model_config.getRunpathFormat() jobname_fmt = model_config.getJobnameFormat() subst_list = None run_context = ErtRunContext.ensemble_smoother( sim_fs, target_fs, mask, path_fmt, jobname_fmt, subst_list, 0 ) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = list(sim_node.asGenKw()) target_gen_kw = list(target_node.asGenKw()) # Test that the localized values has been updated assert sim_gen_kw[1:3] != target_gen_kw[1:3] # test that all the other values are left unchanged assert sim_gen_kw[3:] == target_gen_kw[3:] assert sim_gen_kw[0] == target_gen_kw[0] assert target_gen_kw == pytest.approx(expected_target_gen_kw)
def test_large_case(self): with open("config", "w") as fp: fp.write( """NUM_REALIZATIONS 10 GRID CASE.EGRID FIELD PORO PARAMETER poro.grdecl INIT_FILES:fields/poro%d.grdecl SUMMARY WBHP OBS_CONFIG observations.txt TIME_MAP timemap.txt """ ) for f in ["timemap.txt", "observations.txt"]: src_file = self.createTestPath(os.path.join("local/row_scaling", f)) shutil.copy(src_file, "./") # The grid size must be greater than 250000 (the default matrix size in # enkf_main_update()) grid = EclGridGenerator.create_rectangular((70, 70, 70), (1, 1, 1)) grid.save_EGRID("CASE.EGRID") res_config = ResConfig(user_config_file="config") main = EnKFMain(res_config) init_fs = init_data(main) # Configure the local updates local_config = main.getLocalConfig() local_config.clear() local_data = local_config.createDataset("LOCAL") local_data.addNode("PORO") obs = local_config.createObsdata("OBSSET_LOCAL") obs.addNode("WBHP0") ministep = local_config.createMinistep("MINISTEP_LOCAL") ministep.attachDataset(local_data) ministep.attachObsset(obs) updatestep = local_config.getUpdatestep() updatestep.attachMinistep(ministep) # Apply the row scaling row_scaling = local_data.row_scaling("PORO") ens_config = main.ensembleConfig() poro_config = ens_config["PORO"] field_config = poro_config.getFieldModelConfig() grid = main.eclConfig().getGrid() row_scaling.assign(field_config.get_data_size(), ScalingTest(grid)) es_update = ESUpdate(main) update_fs = main.getEnkfFsManager().getFileSystem("target2") run_context = ErtRunContext.ensemble_smoother_update(init_fs, update_fs) es_update.smootherUpdate(run_context)
def test_update(setup_case, module, expected_gen_kw): """ Note that this is now a snapshot test, so there is no guarantee that the snapshots are correct, they are just documenting the current behavior. """ res_config = setup_case("local/snake_oil", "snake_oil.ert") ert = EnKFMain(res_config) es_update = ESUpdate(ert) ert.analysisConfig().selectModule(module) fsm = ert.getEnkfFsManager() sim_fs = fsm.getFileSystem("default_0") target_fs = fsm.getFileSystem("target") run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs) es_update.smootherUpdate(run_context) conf = ert.ensembleConfig()["SNAKE_OIL_PARAM"] sim_node = EnkfNode(conf) target_node = EnkfNode(conf) node_id = NodeId(0, 0) sim_node.load(sim_fs, node_id) target_node.load(target_fs, node_id) sim_gen_kw = list(sim_node.asGenKw()) target_gen_kw = list(target_node.asGenKw()) assert sim_gen_kw != target_gen_kw assert sim_gen_kw == pytest.approx( [ -1.3035319087841115, 0.8222709205428339, -1.1400029486153482, 0.7477534046493867, -0.10400064074767973, -1.7223242794585338, 0.0761604027734105, 0.4039137216428462, 0.10001691562080614, 0.09549338450036506, ] ) assert target_gen_kw == pytest.approx(expected_gen_kw)
def loadAllSummaryData(ert: EnKFMain, case_name, keys=None, realization_index=None): """ @type ert: EnKFMain @type case_name: str @type keys: list of str @rtype: DataFrame """ fs = ert.getEnkfFsManager().getFileSystem(case_name) time_map = fs.getTimeMap() dates = [ time_map[index].datetime() for index in range(1, len(time_map)) ] realizations = SummaryCollector.createActiveList(ert, fs) if realization_index is not None: if realization_index not in realizations: raise IndexError(f"No such realization {realization_index}") realizations = [realization_index] summary_keys = ert.getKeyManager().summaryKeys() if keys is not None: summary_keys = [key for key in keys if key in summary_keys ] # ignore keys that doesn't exist summary_data = _lib.enkf_fs_summary_data.get_summary_data( ert.ensembleConfig(), fs, summary_keys, realizations, len(dates)) multi_index = MultiIndex.from_product([realizations, dates], names=["Realization", "Date"]) df = DataFrame(data=summary_data, index=multi_index, columns=summary_keys) return df
def summaryKeyHasObservations(cls, ert: EnKFMain, key): return len(ert.ensembleConfig().getNode(key).getObservationKeys()) > 0
# and submit the simulation. path_fmt = "/tmp/run%d" arg_list = [ RunArg.createEnsembleExperimentRunArg(fs, iens, path_fmt % iens) for iens in range(ert.getEnsembleSize()) ] for arg in arg_list: ert.createRunPath( arg ) ert.submitSimulation( arg ) while True: print("Waiting:%d Running:%d Complete:%d/%d" % (queue_manager.getNumWaiting( ), queue_manager.getNumRunning( ) , queue_manager.getNumSuccess() , queue_manager.getNumFailed( ))) if not queue_manager.isRunning( ): break time.sleep( 5 ) ens_config = ert.ensembleConfig( ) data_config = ens_config["SNAKE_OIL_OPR_DIFF"] param_config = ens_config["SNAKE_OIL_PARAM"] for iens in range(ert.getEnsembleSize( )): data_id = NodeId( realization_number = iens, report_step = 199 ) enkf_node1 = EnkfNode( data_config ) enkf_node1.load( fs , data_id ) gen_data = enkf_node1.asGenData( ) data = gen_data.getData( ) param_id = NodeId( realization_number = iens, report_step = 0 ) enkf_node2 = EnkfNode( param_config )
for iens in range(ert.getEnsembleSize()) ] for arg in arg_list: ert.createRunPath(arg) ert.submitSimulation(arg) while True: print("Waiting:%d Running:%d Complete:%d/%d" % (queue_manager.getNumWaiting(), queue_manager.getNumRunning(), queue_manager.getNumSuccess(), queue_manager.getNumFailed())) if not queue_manager.isRunning(): break time.sleep(5) ens_config = ert.ensembleConfig() data_config = ens_config["SNAKE_OIL_OPR_DIFF"] param_config = ens_config["SNAKE_OIL_PARAM"] for iens in range(ert.getEnsembleSize()): data_id = NodeId(realization_number=iens, report_step=199) enkf_node1 = EnkfNode(data_config) enkf_node1.load(fs, data_id) gen_data = enkf_node1.asGenData() data = gen_data.getData() param_id = NodeId(realization_number=iens, report_step=0) enkf_node2 = EnkfNode(param_config) enkf_node2.load(fs, param_id) gen_kw = enkf_node2.asGenKw()