示例#1
0
def test_main_entry_point_gen_data():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1",
                "index": [400, 800]
            }]
        },
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0, [0, 1], np.sqrt(4 / 2))

    cos_config["CALCULATE_KEYS"]["keys"][0].update({"index": [400, 800, 1200]})

    CorrelatedObservationsScalingJob(ert).run(cos_config)
    assert_obs_vector(
        obs_vector,
        1.0,
        [0, 1],
        np.sqrt(3.0 / 2.0),
    )

    svd_file = (
        "storage/snake_oil/ensemble/reports/CorrelatedObservationsScalingJob/svd.json"
    )
    # Assert that data was published correctly
    with open(svd_file) as f:
        reported_svd = json.load(f)
        assert reported_svd == pytest.approx((
            6.531760256452532,
            2.0045135017540487,
            1.1768827000026516,
        ), 0.1)

    scale_file = ("storage/snake_oil/ensemble/reports/"
                  "CorrelatedObservationsScalingJob/scale_factor.json")
    with open(scale_file) as f:
        reported_scalefactor = json.load(f)
        assert reported_scalefactor == pytest.approx(1.224744871391589, 0.1)
示例#2
0
def test_compare_different_jobs():
    cos_config = {"CALCULATE_KEYS": {"keys": [{"key": "WPR_DIFF_1"}]}}

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0)

    job = ert.getWorkflowList().getJob("STD_SCALE_CORRELATED_OBS")
    job.run(ert, ["WPR_DIFF_1"])

    # Result of old job:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    # Result of new job with no sub-indexing:
    assert_obs_vector(obs_vector, np.sqrt(4 / 2))
示例#3
0
def test_main_entry_point_block_and_summary_data_calc():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "FOPT"
            }, {
                "key": "RFT3"
            }]
        }
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "Equinor", "config",
                                 "with_RFT")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("config")
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["RFT3"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == np.sqrt(64)
示例#4
0
def test_main_entry_point_summary_data_calc():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WOPR_OP1_108"
            }, {
                "key": "WOPR_OP1_144"
            }]
        }
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["WOPR_OP1_108"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == np.sqrt(1.0)
示例#5
0
def test_main_entry_point_summary_data_update():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WWCT:OP_1"
            }, {
                "key": "WWCT:OP_2"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WWCT:OP_2",
                "index": [1, 2, 3, 4, 5]
            }]
        },
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "Equinor", "config",
                                 "obs_testing")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("config")
    ert = EnKFMain(res_config)
    obs = ert.getObservations()
    obs_vector = obs["WWCT:OP_2"]

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        if index in cos_config["UPDATE_KEYS"]["keys"][0]["index"]:
            assert node.getStdScaling(index) == np.sqrt(61.0 * 2.0)
        else:
            assert node.getStdScaling(index) == 1.0

    obs_vector = obs["WWCT:OP_1"]

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0
示例#6
0
def test_main_entry_point_gen_data():
    cos_config = {
        "CALCULATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1"
            }]
        },
        "UPDATE_KEYS": {
            "keys": [{
                "key": "WPR_DIFF_1",
                "index": [400, 800]
            }]
        },
    }

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    obs = ert.getObservations()
    obs_vector = obs["WPR_DIFF_1"]

    assert_obs_vector(obs_vector, 1.0, [0, 1], np.sqrt(4 / 2))

    cos_config["CALCULATE_KEYS"]["keys"][0].update({"index": [400, 800, 1200]})

    CorrelatedObservationsScalingJob(ert).run(cos_config)
    assert_obs_vector(
        obs_vector,
        1.0,
        [0, 1],
        np.sqrt(3.0 / 2.0),
    )
    def run(self, *args):
        facade = LibresFacade(self.ert())

        obs_keys = [
            facade.get_observation_key(nr)
            for nr, _ in enumerate(facade.get_observations())
        ]
        measured_data = MeasuredData(facade, obs_keys)

        parser = spearman_job_parser()
        args = parser.parse_args(args)

        scaling_configs = spearman_job(measured_data, args.threshold)

        if not args.dry_run:
            try:
                CorrelatedObservationsScalingJob(
                    self.ert()).run(scaling_configs)
            except EmptyDatasetException:
                pass
示例#8
0
    def run(self, *args):
        config_record = _fetch_config_record(args)
        measured_record = _load_measured_record(self.ert())
        scaling_configs = misfit_preprocessor.run(
            **{
                "misfit_preprocessor_config": config_record,
                "measured_data": measured_record,
                "reporter": self.reporter,
            })

        # The execution of COS should be moved into
        # misfit_preprocessor.run when COS no longer depend on self.ert
        # to run.
        scaling_params = _fetch_scaling_parameters(config_record,
                                                   measured_record)
        for scaling_config in scaling_configs:
            scaling_config["CALCULATE_KEYS"].update(scaling_params)

        try:
            CorrelatedObservationsScalingJob(self.ert()).run(scaling_configs)
        except EmptyDatasetException:
            pass