def test_gen_data_collector(self): config = self.createTestPath("local/mini_ert/mini_config") with ErtTestContext( "python/enkf/export/gen_data_observation_collector", config ) as context: ert = context.getErt() obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 1 ) self.assertEqual(obs_key, "GEN_PERLIN_1") obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 2 ) self.assertEqual(obs_key, "GEN_PERLIN_2") obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 3 ) self.assertEqual(obs_key, "GEN_PERLIN_3") obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 4 ) self.assertIsNone(obs_key) obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLINk", 1 ) self.assertIsNone(obs_key)
def test_gen_data_report_steps(): with open("config_file.ert", "w") as fout: # Write a minimal config file fout.write( dedent(""" NUM_REALIZATIONS 1 OBS_CONFIG observations TIME_MAP time_map GEN_DATA RESPONSE RESULT_FILE:result_%d.out REPORT_STEPS:0,1 INPUT_FORMAT:ASCII """)) with open("obs_data_0.txt", "w") as fout: fout.write("1.0 0.1") with open("obs_data_1.txt", "w") as fout: fout.write("2.0 0.1") with open("time_map", "w") as fout: fout.write("2014-09-10\n2017-02-05") with open("observations", "w") as fout: fout.write( dedent(""" GENERAL_OBSERVATION OBS_0 { DATA = RESPONSE; INDEX_LIST = 0; RESTART = 0; OBS_FILE = obs_data_0.txt; }; GENERAL_OBSERVATION OBS_1 { DATA = RESPONSE; INDEX_LIST = 0; RESTART = 1; OBS_FILE = obs_data_1.txt; }; """)) res_config = ResConfig("config_file.ert") ert = EnKFMain(res_config) obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "RESPONSE", 0) assert obs_key == "OBS_0" obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "RESPONSE", 1) assert obs_key == "OBS_1" obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "RESPONSE", 2) assert obs_key is None obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "NOT_A_KEY", 0) assert obs_key is None
def gatherGenDataObservationData(ert, case, key_with_report_step): """ :rtype: pandas.DataFrame """ key, report_step = key_with_report_step.split("@", 1) report_step = int(report_step) obs_key = GenDataObservationCollector.getObservationKeyForDataKey(ert, key, report_step) if obs_key is not None: obs_data = GenDataObservationCollector.loadGenDataObservations(ert, case, obs_key) columns = {obs_key: key_with_report_step, "STD_%s" % obs_key: "STD_%s" % key_with_report_step} obs_data = obs_data.rename(columns=columns) else: obs_data = DataFrame() return obs_data.dropna()
def observation_keys(self, key): if self._enkf_main.getKeyManager().isGenDataKey(key): key_parts = key.split("@") key = key_parts[0] if len(key_parts) > 1: report_step = int(key_parts[1]) else: report_step = 0 obs_key = GenDataObservationCollector.getObservationKeyForDataKey( self._enkf_main, key, report_step ) if obs_key is not None: return [obs_key] else: return [] elif self._enkf_main.getKeyManager().isSummaryKey(key): return [ str(k) for k in self._enkf_main.ensembleConfig() .getNode(key) .getObservationKeys() ] else: return []
def test_gen_data_collector(self): config = self.createTestPath("local/mini_ert/mini_config") with ErtTestContext( "python/enkf/export/gen_data_observation_collector", config ) as context: ert = context.getErt() obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 1 ) self.assertEqual(obs_key, "GEN_PERLIN_1") obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 2 ) self.assertEqual(obs_key, "GEN_PERLIN_2") obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 3 ) self.assertEqual(obs_key, "GEN_PERLIN_3") obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLIN", 4 ) self.assertIsNone(obs_key) obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, "PERLINk", 1 ) self.assertIsNone(obs_key) data = GenDataObservationCollector.loadGenDataObservations( ert, "default", "GEN_PERLIN_1" ) self.assertFloatEqual(data["GEN_PERLIN_1"][0], -0.616789) self.assertFloatEqual(data["STD_GEN_PERLIN_1"][0], 0.2) with self.assertRaises(KeyError): GenDataObservationCollector.loadGenDataObservations( ert, "default", "GEN_PERLIN_4" )
def test_ahmanalysis_run_field(test_data_root, grid_prop): """test data_set with scalar and Field parameters""" test_data_dir = os.path.join(test_data_root, "snake_oil") shutil.copytree(test_data_dir, "test_data") os.chdir(os.path.join("test_data")) os.makedirs("fields") grid = EclGridGenerator.createRectangular((10, 12, 5), (1, 1, 1)) for iens in range(10): grid_prop("PERMX", 10, grid.getGlobalSize(), "fields/permx%d.grdecl" % iens) grid_prop("PORO", 0.2, grid.getGlobalSize(), "fields/poro%d.grdecl" % iens) res_config = ResConfig("snake_oil_field.ert") res_config.convertToCReference(None) ert = EnKFMain(res_config) ahmanalysis.AhmAnalysisJob(ert).run(prior_name="default") # assert that this returns/generates the delta field parameter gen_obs_list = GenDataObservationCollector.getAllObservationKeys(ert) summary_obs_list = SummaryObservationCollector.getAllObservationKeys(ert) obs_keys = gen_obs_list + summary_obs_list output_deltafield = os.path.join( "storage", "snake_oil_field", "reports", "default", "AhmAnalysisJob", "delta_fieldPERMX.csv", ) assert os.path.isfile(output_deltafield) delta_df = pd.read_csv(output_deltafield, index_col=0) assert len(delta_df.columns) == 8 + (len(obs_keys) * 2) + 1 # check field parameter is present and not empty in the final KS matrix output_ks = output_deltafield.replace("delta_fieldPERMX.csv", "ks.csv") ks_df = pd.read_csv(output_ks, index_col=0) assert not ks_df.empty assert "FIELD_PERMX" in ks_df.index.tolist() check_empty = ks_df.loc[["FIELD_PERMX"], :].isnull().all(axis=1) assert not check_empty["FIELD_PERMX"]
key, report_step = key.split("@", 1) report_step = int(report_step) try: data = GenDataCollector.loadGenData(ert, case, key, report_step) except ValueError: data = DataFrame() return data.dropna() # removes all rows that has a NaN @staticmethod def gatherGenDataObservationData(ert, case, key_with_report_step): """ :rtype: pandas.DataFrame """ key, report_step = key_with_report_step.split("@", 1) report_step = int(report_step) obs_key = GenDataObservationCollector.getObservationKeyForDataKey( ert, key, report_step) if obs_key is not None: obs_data = GenDataObservationCollector.loadGenDataObservations( ert, case, obs_key) columns = { obs_key: key_with_report_step, "STD_%s" % obs_key: "STD_%s" % key_with_report_step } obs_data = obs_data.rename(columns=columns) else: obs_data = DataFrame() return obs_data.dropna() @staticmethod