Exemplo n.º 1
0
def test_smry():
    """Test the support for smry observations, these are
    observations relating to summary data, but where
    the observed values are specified in yaml, not through
    *H summary variables"""

    if "__file__" in globals():
        # Easen up copying test code into interactive sessions
        testdir = os.path.dirname(os.path.abspath(__file__))
    else:
        testdir = os.path.abspath(".")

    obs = Observations(
        testdir
        + "/data/testensemble-reek001/"
        + "/share/observations/"
        + "observations.yml"
    )
    real = ScratchRealization(
        testdir + "/data/testensemble-reek001/" + "realization-0/iter-0/"
    )

    # Compute the mismatch from this particular observation set to the
    # loaded realization.
    mismatch = obs.mismatch(real)

    assert len(mismatch) == 21  # later: implement counting in the obs object
    assert mismatch.L1.sum() > 0
    assert mismatch.L2.sum() > 0
Exemplo n.º 2
0
def test_ecl2df_real():
    """Check that we can utilize ecl2df on single realizations"""

    if not HAVE_ECL2DF:
        pytest.skip()

    if "__file__" in globals():
        # Easen up copying test code into interactive sessions
        testdir = os.path.dirname(os.path.abspath(__file__))
    else:
        testdir = os.path.abspath(".")
    realdir = os.path.join(testdir, "data/testensemble-reek001", "realization-0/iter-0")
    real = ScratchRealization(realdir)

    eclfiles = real.get_eclfiles()
    assert isinstance(eclfiles, ecl2df.EclFiles)
    compdat_df = ecl2df.compdat.df(eclfiles)
    assert not compdat_df.empty
    assert "KH" in compdat_df
Exemplo n.º 3
0
    def get_volumetric_rates(self,
                             column_keys=None,
                             time_index=None,
                             time_unit=None):
        """Compute volumetric rates from cumulative summary vectors

        Column names that are not referring to cumulative summary
        vectors are silently ignored.

        A Dataframe is returned with volumetric rates, that is rate
        values that can be summed up to the cumulative version. The
        'T' in the column name is switched with 'R'. If you ask for
        FOPT, you will get FOPR in the returned dataframe.

        Rates in the returned dataframe are valid **forwards** in time,
        opposed to rates coming directly from the Eclipse simulator which
        are valid backwards in time.

        If time_unit is set, the rates will be scaled to represent
        either daily, monthly or yearly rates. These will sum up to the
        cumulative as long as you multiply with the correct number
        of days, months or year between each consecutive date index.
        Month lengths and leap years are correctly handled.

        The returned dataframe is indexed by DATE.

        Args:
            column_keys: str or list of strings, cumulative summary vectors
            time_index: str or list of datetimes
            time_unit: str or None. If None, the rates returned will
                be the difference in cumulative between each included
                time step (where the time interval can vary arbitrarily)
                If set to 'days', 'months' or 'years', the rates will
                be scaled to represent a daily, monthly or yearly rate that
                is compatible with the date index and the cumulative data.

        """
        from fmu.ensemble import ScratchRealization

        return ScratchRealization._get_volumetric_rates(
            self, column_keys, time_index, time_unit)
Exemplo n.º 4
0
def test_real_mismatch():
    """Test calculation of mismatch from the observation set to a
    realization"""
    if "__file__" in globals():
        # Easen up copying test code into interactive sessions
        testdir = os.path.dirname(os.path.abspath(__file__))
    else:
        testdir = os.path.abspath(".")

    real = ScratchRealization(testdir + "/data/testensemble-reek001/" +
                              "realization-0/iter-0/")

    real.load_smry()
    real.load_txt("outputs.txt")
    real.load_scalar("npv.txt")

    obs = Observations({
        "txt": [{
            "localpath": "parameters.txt",
            "key": "FWL",
            "value": 1702
        }]
    })
    realmis = obs.mismatch(real)

    # Check layout of returned data
    assert isinstance(realmis, pd.DataFrame)
    assert len(realmis) == 1
    assert "REAL" not in realmis.columns  # should only be there for ensembles.
    assert "OBSTYPE" in realmis.columns
    assert "OBSKEY" in realmis.columns
    assert "DATE" not in realmis.columns  # date is not relevant
    assert "MISMATCH" in realmis.columns
    assert "L1" in realmis.columns
    assert "L2" in realmis.columns

    # Check actually computed values, there should only be one row with data:
    assert realmis.loc[0, "OBSTYPE"] == "txt"
    assert realmis.loc[0, "OBSKEY"] == "parameters.txt/FWL"
    assert realmis.loc[0, "MISMATCH"] == -2
    assert realmis.loc[0, "SIGN"] == -1
    assert realmis.loc[0, "L1"] == 2
    assert realmis.loc[0, "L2"] == 4

    # Another observation set:
    obs2 = Observations({
        "txt": [
            {
                "localpath": "parameters.txt",
                "key": "RMS_SEED",
                "value": 600000000
            },
            {
                "localpath": "outputs.txt",
                "key": "top_structure",
                "value": 3200
            },
        ],
        "scalar": [{
            "key": "npv.txt",
            "value": 3400
        }],
    })
    realmis2 = obs2.mismatch(real)
    assert len(realmis2) == 3
    assert "parameters.txt/RMS_SEED" in realmis2["OBSKEY"].values
    assert "outputs.txt/top_structure" in realmis2["OBSKEY"].values
    assert "npv.txt" in realmis2["OBSKEY"].values

    # assert much more!

    # Test that we can write the observations to yaml
    # and verify that the exported yaml can be reimported
    # and yield the same result
    obs2r = Observations(yaml.full_load(obs2.to_yaml()))
    realmis2r = obs2r.mismatch(real)
    assert np.all(realmis2["MISMATCH"].values.sort() ==
                  realmis2r["MISMATCH"].values.sort())

    # Test use of allocated values:
    obs3 = Observations({"smryh": [{"key": "FOPT", "histvec": "FOPTH"}]})
    fopt_mis = obs3.mismatch(real)
    assert fopt_mis.loc[0, "OBSTYPE"] == "smryh"
    assert fopt_mis.loc[0, "OBSKEY"] == "FOPT"
    assert fopt_mis.loc[0, "L1"] > 0
    assert fopt_mis.loc[0, "L1"] != fopt_mis.loc[0, "L2"]

    # Test mismatch where some data is missing:
    obs4 = Observations({"smryh": [{"key": "FOOBAR", "histvec": "FOOBARH"}]})
    mis_mis = obs4.mismatch(real)
    assert mis_mis.empty

    # This test fails, the consistency check is not implemented.
    # obs_bogus = Observations({'smryh': [{'keddy': 'FOOBAR',
    #                               'histdddvec': 'FOOBARH'}]})
    # mis_mis = obs_bogus.mismatch(real)
    # assert mis_mis.empty

    obs_bogus_scalar = Observations(
        {"scalar": [{
            "key": "nonexistingnpv.txt",
            "value": 3400
        }]})
    # (a warning should be logged)
    assert obs_bogus_scalar.mismatch(real).empty

    obs_bogus_param = Observations({
        "txt": [{
            "localpath": "bogusparameters.txt",
            "key": "RMS_SEED",
            "value": 600000000,
        }]
    })
    # (a warning should be logged)
    assert obs_bogus_param.mismatch(real).empty

    obs_bogus_param = Observations({
        "txt": [{
            "localpath": "parameters.txt",
            "key": "RMS_SEEEEEEED",
            "value": 600000000,
        }]
    })
    # (a warning should be logged)
    assert obs_bogus_param.mismatch(real).empty

    # Non-existing summary key:
    obs_bogus_smry = Observations({
        "smry": [{
            "key":
            "WBP4:OP_XXXXX",
            "observations": [{
                "date": datetime.date(2001, 1, 1),
                "error": 4,
                "value": 251
            }],
        }]
    })
    assert obs_bogus_smry.mismatch(real).empty