Exemple #1
0
def test_main_entry_point_summary_data_calc():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WOPR_OP1_108"
            }, {
                "key": "WOPR_OP1_144"
            }]
        }
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["WOPR_OP1_108"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == np.sqrt(1.0)
 def loadObservationData(ert: EnKFMain, case_name, keys=None):
     """
     @type ert: EnKFMain
     @type case_name: str
     @type keys: list of str
     @rtype: DataFrame
     """
     observations = ert.getObservations()
     history_length = ert.getHistoryLength()
     dates = [
         observations.getObservationTime(index).datetime()
         for index in range(1, history_length + 1)
     ]
     summary_keys = SummaryObservationCollector.getAllObservationKeys(ert)
     if keys is not None:
         summary_keys = [key for key in keys if key in summary_keys
                         ]  # ignore keys that doesn't exist
     columns = summary_keys
     std_columns = ["STD_%s" % key for key in summary_keys]
     df = DataFrame(index=dates, columns=columns + std_columns)
     for key in summary_keys:
         observation_keys = ert.ensembleConfig().getNode(
             key).getObservationKeys()
         for obs_key in observation_keys:
             observation_data = observations[obs_key]
             for index in range(0, history_length + 1):
                 if observation_data.isActive(index):
                     obs_time = observations.getObservationTime(
                         index).datetime()
                     node = observation_data.getNode(index)
                     value = node.getValue()
                     std = node.getStandardDeviation()
                     df[key][obs_time] = value
                     df["STD_%s" % key][obs_time] = std
     return df
Exemple #3
0
def test_create_observation_vectors(setup_ert):

    valid_config_data = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
    }
    config = configsuite.ConfigSuite(
        valid_config_data,
        job_config._CORRELATED_OBSERVATIONS_SCHEMA,
        deduce_required=True,
    )

    res_config = setup_ert
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    new_events = create_active_lists(obs, config.snapshot.UPDATE_KEYS.keys)

    keys = [event.key for event in new_events]

    assert "WPR_DIFF_1" in keys
    assert "SNAKE_OIL_WPR_DIFF" not in keys
Exemple #4
0
def test_add_observation_vectors(test_data_root):

    valid_config_data = {"UPDATE_KEYS": {"keys": [{"key": "WOPR_OP1_108"}]}}

    schema = job_config._CORRELATED_OBSERVATIONS_SCHEMA
    config = configsuite.ConfigSuite(valid_config_data,
                                     schema,
                                     deduce_required=True)

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    obs = ert.getObservations()

    new_events = create_active_lists(obs, config.snapshot.UPDATE_KEYS.keys)

    keys = [event.key for event in new_events]

    assert "WOPR_OP1_108" in keys
    assert "WOPR_OP1_144" not in keys
def test_add_observation_vectors():

    valid_config_data = {"UPDATE_KEYS": {"keys": [{"key": "WOPR_OP1_108"}]}}

    schema = job_config.build_schema()
    config = configsuite.ConfigSuite(valid_config_data, schema)

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil_field")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    obs = ert.getObservations()

    new_events = job._create_active_lists(obs,
                                          config.snapshot.UPDATE_KEYS.keys)

    keys = [event.key for event in new_events]

    assert "WOPR_OP1_108" in keys
    assert "WOPR_OP1_144" not in keys
def test_main_entry_point_block_and_summary_data_calc():
    arguments = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "FOPT"
            }, {
                "key": "RFT3"
            }]
        }
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "Equinor", "config",
                                 "with_RFT")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("config")
    ert = EnKFMain(res_config)
    facade = LibresFacade(ert)
    obs = ert.getObservations()

    obs_vector = obs["RFT3"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    scaling_job.scaling_job(facade, arguments)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == np.sqrt(64)
def test_compare_different_jobs():

    arguments = {"CALCULATE_KEYS": {"keys": [{"key": "WPR_DIFF_1"}]}}

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    facade = LibresFacade(ert)
    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0)

    job = ert.getWorkflowList().getJob("STD_SCALE_CORRELATED_OBS")
    job.run(ert, ["WPR_DIFF_1"])

    # Result of old job:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))

    scaling_job.scaling_job(facade, arguments)

    # Result of new job with no sub-indexing:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))
Exemple #8
0
def test_misfit_preprocessor_with_scaling(test_data_root):
    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    config = {
        "workflow": {
            "type": "custom_scale",
            "clustering": {
                "fcluster": {
                    "threshold": 1.0
                },
            },
        }
    }
    config_file = "my_config_file.yaml"
    with open(config_file, "w") as f:
        yaml.dump(config, f)

    misfit_preprocessor.MisfitPreprocessorJob(ert).run(config_file)

    # assert that this arbitrarily chosen cluster gets scaled as expected
    obs = ert.getObservations()["FOPR"]
    for index in [13, 14, 15, 16, 17, 18, 19, 20]:
        assert obs.getNode(index).getStdScaling() == 2.8284271247461903

    for index in (38, 39, 40, 41, 42, 43, 44):
        assert obs.getNode(index).getStdScaling() == 2.6457513110645907
Exemple #9
0
def test_main_entry_point_summary_data_update():
    cos_config = {
        "CALCULATE_KEYS": {"keys": [{"key": "WWCT:OP_1"}, {"key": "WWCT:OP_2"}]},
        "UPDATE_KEYS": {"keys": [{"key": "WWCT:OP_2", "index": [1, 2, 3, 4, 5]}]},
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "Equinor", "config", "obs_testing")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("config")
    ert = EnKFMain(res_config)
    obs = ert.getObservations()
    obs_vector = obs["WWCT:OP_2"]

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        if index in cos_config["UPDATE_KEYS"]["keys"][0]["index"]:
            assert node.getStdScaling(index) == np.sqrt(61.0 * 2.0)
        else:
            assert node.getStdScaling(index) == 1.0

    obs_vector = obs["WWCT:OP_1"]

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0
Exemple #10
0
    def loadAllMisfitData(ert: EnKFMain, case_name) -> DataFrame:
        """
        @type ert: EnKFMain
        @type case_name: str
        @rtype: DataFrame
        """
        fs = ert.getEnkfFsManager().getFileSystem(case_name)

        realizations = MisfitCollector.createActiveList(ert, fs)
        misfit_keys = ert.getKeyManager().misfitKeys(sort_keys=False)
        misfit_sum_index = len(misfit_keys) - 1

        misfit_array = numpy.empty(shape=(len(misfit_keys), len(realizations)),
                                   dtype=numpy.float64)
        misfit_array.fill(numpy.nan)
        misfit_array[misfit_sum_index] = 0.0

        for column_index, obs_vector in enumerate(ert.getObservations()):

            for realization_index, realization_number in enumerate(
                    realizations):
                misfit = obs_vector.getTotalChi2(fs, realization_number)

                misfit_array[column_index][realization_index] = misfit
                misfit_array[misfit_sum_index][realization_index] += misfit

        misfit_data = DataFrame(data=numpy.transpose(misfit_array),
                                index=realizations,
                                columns=misfit_keys)
        misfit_data.index.name = "Realization"

        return misfit_data
def test_installed_python_version_of_enkf_scaling_job(setup_ert, monkeypatch):
    res_config = setup_ert
    ert = EnKFMain(res_config)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0)

    job_config = {"CALCULATE_KEYS": {"keys": [{"key": "WPR_DIFF_1"}]}}

    with open("job_config.yml", "w") as fout:
        yaml.dump(job_config, fout)

    ert.getWorkflowList().addJob(
        "CORRELATE_OBSERVATIONS_SCALING",
        os.path.join(get_job_dir(), "CORRELATED_OBSERVATIONS_SCALING"),
    )

    job = ert.getWorkflowList().getJob("CORRELATE_OBSERVATIONS_SCALING")
    job.run(ert, ["job_config.yml"])

    assert_obs_vector(obs_vector, np.sqrt(4.0 / 2.0))

    job_config["CALCULATE_KEYS"]["keys"][0].update({"index": [400, 800, 1200]})
    with open("job_config.yml", "w") as fout:
        yaml.dump(job_config, fout)
    job.run(ert, ["job_config.yml"])

    assert_obs_vector(
        obs_vector,
        np.sqrt(4.0 / 2.0),
        index_list=[0, 1, 2],
        val_2=np.sqrt(3.0 / 2.0),
    )
def test_create_observation_vectors(setup_ert):

    valid_config_data = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
    }
    config = configsuite.ConfigSuite(valid_config_data,
                                     job_config.build_schema())

    res_config = setup_ert
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    new_events = job._create_active_lists(obs,
                                          config.snapshot.UPDATE_KEYS.keys)

    keys = [event.key for event in new_events]

    assert "WPR_DIFF_1" in keys
    assert "SNAKE_OIL_WPR_DIFF" not in keys
Exemple #13
0
def test_misfit_preprocessor_with_scaling(test_data_root):
    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    config = {
        "clustering": {
            "method": "spearman_correlation",
            "spearman_correlation": {
                "fcluster": {
                    "t": 1.0
                }
            },
        }
    }
    config_file = "my_config_file.yaml"
    with open(config_file, "w") as f:
        yaml.dump(config, f)

    misfit_preprocessor.MisfitPreprocessorJob(ert).run(config_file)

    # assert that this arbitrarily chosen cluster gets scaled as expected
    obs = ert.getObservations()["FOPR"]
    for index in [13, 14, 15, 16, 17, 18, 19, 20]:
        assert obs.getNode(index).getStdScaling() == 2.8284271247461903
Exemple #14
0
def test_compare_different_jobs(test_data_root):
    cos_config = {"CALCULATE_KEYS": {"keys": [{"key": "WPR_DIFF_1"}]}}

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0)

    job = ert.getWorkflowList().getJob("STD_SCALE_CORRELATED_OBS")
    job.run(ert, ["WPR_DIFF_1"])

    # Result of old job:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    # Result of new job with no sub-indexing:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))
Exemple #15
0
def test_main_entry_point_gen_data():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1",
                "index": [400, 800]
            }]
        },
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0, [0, 1], np.sqrt(4 / 2))

    cos_config["CALCULATE_KEYS"]["keys"][0].update({"index": [400, 800, 1200]})

    CorrelatedObservationsScalingJob(ert).run(cos_config)
    assert_obs_vector(
        obs_vector,
        1.0,
        [0, 1],
        np.sqrt(3.0 / 2.0),
    )

    svd_file = (
        "storage/snake_oil/ensemble/reports/CorrelatedObservationsScalingJob/svd.json"
    )
    # Assert that data was published correctly
    with open(svd_file) as f:
        reported_svd = json.load(f)
        assert reported_svd == pytest.approx((
            6.531760256452532,
            2.0045135017540487,
            1.1768827000026516,
        ), 0.1)

    scale_file = ("storage/snake_oil/ensemble/reports/"
                  "CorrelatedObservationsScalingJob/scale_factor.json")
    with open(scale_file) as f:
        reported_scalefactor = json.load(f)
        assert reported_scalefactor == pytest.approx(1.224744871391589, 0.1)
 def getAllObservationKeys(ert: EnKFMain):
     """
     @type ert: EnKFMain
     @rtype: list of str
     """
     enkf_obs = ert.getObservations()
     observation_keys = enkf_obs.getTypedKeylist(
         EnkfObservationImplementationType.GEN_OBS)
     return [key for key in observation_keys]
Exemple #17
0
def test_old_enkf_scaling_job(setup_ert):
    res_config = setup_ert
    ert = EnKFMain(res_config)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0)

    job = ert.getWorkflowList().getJob("STD_SCALE_CORRELATED_OBS")
    job.run(ert, ["WPR_DIFF_1"])

    assert_obs_vector(obs_vector, np.sqrt(4.0 / 2.0))
Exemple #18
0
def test_scaling(test_data_root):
    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    sc.SpearmanCorrelationJob(ert).run(*["-t", "1.0"])

    # assert that this arbitrarily chosen cluster gets scaled as expected
    obs = ert.getObservations()["FOPR"]
    for index in [13, 14, 15, 16, 17, 18, 19, 20]:
        assert obs.getNode(index).getStdScaling() == 2.8284271247461903
def test_main_entry_point_summary_data_calc():

    arguments = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WOPR_OP1_108"
            }, {
                "key": "WOPR_OP1_144"
            }]
        }
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    facade = LibresFacade(ert)
    obs = ert.getObservations()

    obs_vector = obs["WOPR_OP1_108"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    scaling_job.scaling_job(facade, arguments)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == np.sqrt((2.0 * 6.0) / 2.0)

    arguments["CALCULATE_KEYS"]["keys"][0].update({"index": [1, 2, 3]})
    arguments["CALCULATE_KEYS"]["keys"][1].update({"index": [1, 2, 3]})

    with pytest.raises(ValueError):  # Will give an empty data set
        scaling_job.scaling_job(facade, arguments)

    arguments["CALCULATE_KEYS"]["keys"][0].update({"index": [8, 35, 71]})
    arguments["CALCULATE_KEYS"]["keys"][1].update({"index": [8, 35, 71]})
    scaling_job.scaling_job(facade, arguments)

    for index, node in enumerate(obs_vector):
        if index in arguments["CALCULATE_KEYS"]["keys"][0]["index"]:
            assert node.getStdScaling(index) == np.sqrt((2.0 * 6.0) / 1.0)
        else:
            assert node.getStdScaling(index) == np.sqrt((2.0 * 6.0) / 2.0)
    def getObservationKeyForDataKey(ert: EnKFMain, data_key, data_report_step):
        """
        @type ert: EnKFMain
        @rtype: str
        """
        observation_key = None

        enkf_obs = ert.getObservations()
        for obs_vector in enkf_obs:
            if EnkfObservationImplementationType.GEN_OBS:
                report_step = obs_vector.firstActiveStep()
                key = obs_vector.getDataKey()

                if key == data_key and report_step == data_report_step:
                    observation_key = obs_vector.getObservationKey()

        return observation_key
Exemple #21
0
def test_main_entry_point_sum_data_update(setup_ert, monkeypatch):
    cos_config = {"CALCULATE_KEYS": {"keys": [{"key": "WOPR_OP1_108"}]}}

    res_config = setup_ert
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["WOPR_OP1_108"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0
    monkeypatch.setattr(
        cos.ObservationScaleFactor, "get_scaling_factor", MagicMock(return_value=1.23)
    )
    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.23
Exemple #22
0
def test_main_entry_point_summary_data_calc(setup_ert, monkeypatch):
    cos_config = {
        "CALCULATE_KEYS": {"keys": [{"key": "WOPR_OP1_108"}, {"key": "WOPR_OP1_144"}]}
    }

    res_config = setup_ert

    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["WOPR_OP1_108"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0
Exemple #23
0
def test_validate_failed_realizations(test_data_root):
    """
    Config has several failed realisations
    """
    test_data_dir = os.path.join(test_data_root, "failed_runs_in_storage")
    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("mini_fail_config")
    ert = EnKFMain(res_config)
    observations = ert.getObservations()

    result = keys_with_data(
        observations,
        ["GEN_PERLIN_1"],
        ert.getEnsembleSize(),
        ert.getEnkfFsManager().getCurrentFileSystem(),
    )
    assert result == ["GEN_PERLIN_1"]
Exemple #24
0
def test_validate_no_realizations(test_data_root):
    """
    Ensamble has not run
    """
    test_data_dir = os.path.join(test_data_root, "poly_normal")
    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("poly.ert")
    ert = EnKFMain(res_config)
    observations = ert.getObservations()

    result = keys_with_data(
        observations,
        ["POLY_OBS"],
        ert.getEnsembleSize(),
        ert.getEnkfFsManager().getCurrentFileSystem(),
    )
    assert result == []
def test_main_entry_point_gen_data():

    arguments = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1",
                "index": [400, 800]
            }]
        },
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    facade = LibresFacade(ert)

    scaling_job.scaling_job(facade, arguments)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0, [0, 1], np.sqrt(4 / 2))

    arguments["CALCULATE_KEYS"]["keys"][0].update({"index": [400, 800, 1200]})
    scaling_job.scaling_job(facade, arguments)
    assert_obs_vector(
        obs_vector,
        1.0,
        [0, 1],
        np.sqrt(3.0 / 2.0),
    )
Exemple #26
0
def test_main_entry_point_block_data_calc():
    cos_config = {"CALCULATE_KEYS": {"keys": [{"key": "RFT3"}]}}

    test_data_dir = os.path.join(TEST_DATA_DIR, "Equinor", "config", "with_RFT")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("config")
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["RFT3"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 2.0
Exemple #27
0
def test_compare_different_jobs(setup_ert):
    cos_config = {"CALCULATE_KEYS": {"keys": [{"key": "WPR_DIFF_1"}]}}

    res_config = setup_ert

    ert = EnKFMain(res_config)
    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0)

    job = ert.getWorkflowList().getJob("STD_SCALE_CORRELATED_OBS")
    job.run(ert, ["WPR_DIFF_1"])

    # Result of old job:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    # Result of new job with no sub-indexing:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))
Exemple #28
0
def test_main_entry_point_gen_data():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1",
                "index": [400, 800]
            }]
        },
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0, [0, 1], np.sqrt(4 / 2))

    cos_config["CALCULATE_KEYS"]["keys"][0].update({"index": [400, 800, 1200]})

    CorrelatedObservationsScalingJob(ert).run(cos_config)
    assert_obs_vector(
        obs_vector,
        1.0,
        [0, 1],
        np.sqrt(3.0 / 2.0),
    )
Exemple #29
0
def test_main_entry_point_shielded_data(setup_ert, monkeypatch):
    cos_config = {
        "CALCULATE_KEYS": {"keys": [{"key": "FOPR", "index": [1, 2, 3, 4, 5]}]}
    }

    res_config = setup_ert
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["FOPR"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    monkeypatch.setattr(
        cos.ObservationScaleFactor, "get_scaling_factor", MagicMock(return_value=1.23)
    )
    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        if index in [1, 2, 3, 4, 5]:
            assert node.getStdScaling(index) == 1.23, f"index: {index}"
        else:
            assert node.getStdScaling(index) == 1.0, f"index: {index}"
Exemple #30
0
def test_misfit_preprocessor_all_obs(test_data_root, monkeypatch):
    from unittest.mock import MagicMock
    from semeio.workflows.correlated_observations_scaling import cos

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    monkeypatch.setattr(cos.ObservationScaleFactor, "get_scaling_factor",
                        MagicMock(return_value=1.234))

    misfit_preprocessor.MisfitPreprocessorJob(ert).run()

    scaling_factors = []

    obs = ert.getObservations()
    for key in [
            "FOPR",
            "WOPR_OP1_9",
            "WOPR_OP1_36",
            "WOPR_OP1_72",
            "WOPR_OP1_108",
            "WOPR_OP1_144",
            "WOPR_OP1_190",
            "WPR_DIFF_1",
    ]:
        obs_vector = obs[key]
        for index, node in enumerate(obs_vector):
            scaling_factors.append((index, key, node.getStdScaling(index)))

    for index, key, scaling_factor in scaling_factors:
        assert scaling_factor == 1.234, f"{index}, {key}"