Esempio n. 1
0
def test_regsurf_metadata_with_timedata(fmurun_w_casemetadata, rmsglobalconfig,
                                        regsurf):
    """Export the regular surface to file with correct metadata/name/timedata."""

    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,
        verbosity="INFO",
    )

    meta1 = edata.generate_metadata(
        regsurf,
        name="TopVolantis",
        timedata=[[20300101, "moni"], [20100203, "base"]],
        verbosity="INFO",
    )
    assert meta1["data"]["time"]["t0"]["value"] == "2010-02-03T00:00:00"
    assert meta1["data"]["time"]["t0"]["label"] == "base"
    assert meta1["data"]["time"]["t1"]["value"] == "2030-01-01T00:00:00"
    assert meta1["data"]["time"]["t1"]["label"] == "moni"

    meta1 = edata.generate_metadata(
        regsurf,
        name="TopVolantis",
        timedata=[[20300123, "one"]],
        verbosity="INFO",
    )

    assert meta1["data"]["time"]["t0"]["value"] == "2030-01-23T00:00:00"
    assert meta1["data"]["time"]["t0"]["label"] == "one"
    assert meta1["data"]["time"].get("t1", None) is None

    logger.debug(prettyprint_dict(meta1))
Esempio n. 2
0
def test_regsurf_metadata_with_timedata(rmssetup, rmsglobalconfig, regsurf):
    """Export a regular surface to file with correct metadata and name and timedata."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig,
        verbosity="INFO",
    )  # read from global config
    meta1 = edata.generate_metadata(
        regsurf,
        name="TopVolantis",
        timedata=[[20300101, "moni"], [20100203, "base"]],
        verbosity="INFO",
    )
    assert meta1["data"]["time"]["t0"]["value"] == "2010-02-03T00:00:00"
    assert meta1["data"]["time"]["t0"]["label"] == "base"
    assert meta1["data"]["time"]["t1"]["value"] == "2030-01-01T00:00:00"
    assert meta1["data"]["time"]["t1"]["label"] == "moni"

    meta1 = edata.generate_metadata(
        regsurf,
        name="TopVolantis",
        timedata=[[20300123, "one"]],
        verbosity="INFO",
    )

    assert meta1["data"]["time"]["t0"]["value"] == "2030-01-23T00:00:00"
    assert meta1["data"]["time"]["t0"]["label"] == "one"
    assert meta1["data"]["time"].get("t1", None) is None

    logger.info(prettyprint_dict(meta1))
Esempio n. 3
0
def test_points_export_file_set_name_xtgeoheaders(fmurun_w_casemetadata,
                                                  rmsglobalconfig, points):
    """Export the points to file with correct metadata and name but here xtgeo var."""

    logger.info("Active folder is %s", fmurun_w_casemetadata)
    os.chdir(fmurun_w_casemetadata)

    dataio.ExportData.points_fformat = "csv"
    edata = dataio.ExportData(config=rmsglobalconfig,
                              verbosity="INFO")  # read from global config
    edata.points_fformat = "csv|xtgeo"  # override

    output = edata.export(points, name="TopVolantiz")
    logger.info("Output is %s", output)

    assert str(output) == str((
        edata._rootpath /
        "realization-0/iter-0/share/results/points/topvolantiz.csv").resolve())

    thefile = pd.read_csv(
        edata._rootpath /
        "realization-0/iter-0/share/results/points/topvolantiz.csv")
    assert thefile.columns[0] == "X_UTME"

    dataio.ExportData.points_fformat = "csv"
Esempio n. 4
0
def test_regsurf_generate_metadata_change_content_invalid(
        rmsglobalconfig, regsurf):
    """As above but change an invalid name of key in the generate_metadata"""
    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    with pytest.raises(ValidationError):
        _ = edata.generate_metadata(regsurf, blablabla="time")
Esempio n. 5
0
def test_cube_export_file_set_name(rmssetup, rmsglobalconfig, cube):
    """Export the cube to file with correct metadata and name."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(cube, name="MyCube")
    logger.info("Output is %s", output)

    assert str(output) == str(
        (edata._rootpath / "share/results/cubes/mycube.segy").resolve())
Esempio n. 6
0
def test_polys_export_file_set_name(rmssetup, rmsglobalconfig, polygons):
    """Export the polygon to file with correct metadata and name."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(polygons, name="TopVolantis")
    logger.info("Output is %s", output)

    assert str(output) == str(
        (edata._rootpath / "share/results/polygons/topvolantis.csv").resolve())
Esempio n. 7
0
def test_regsurf_export_file_set_name(rmssetup, rmsglobalconfig, regsurf):
    """Export the regular surface to file with correct metadata and name."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(regsurf, name="TopVolantis")
    logger.info("Output is %s", output)

    assert str(output) == str(
        (edata._rootpath / "share/results/maps/topvolantis.gri").resolve())
Esempio n. 8
0
def test_grid_export_file_set_name(rmssetup, rmsglobalconfig, grid):
    """Export the grid to file with correct metadata and name."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(grid, name="MyGrid")
    logger.info("Output is %s", output)

    assert str(output) == str(
        (edata._rootpath / "share/results/grids/mygrid.roff").resolve())
Esempio n. 9
0
def test_regsurf_generate_metadata_change_content(rmssetup, rmsglobalconfig,
                                                  regsurf):
    """As above but change a key in the generate_metadata"""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    meta1 = edata.generate_metadata(regsurf)
    meta2 = edata.generate_metadata(regsurf, content="time")

    assert meta1["data"]["content"] == "depth"
    assert meta2["data"]["content"] == "time"
Esempio n. 10
0
def test_dataframe_export_file_set_name(rmssetup, rmsglobalconfig, dataframe):
    """Export the dataframe to file with correct metadata and name."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(dataframe, name="MyDataframe")
    logger.info("Output is %s", output)

    assert str(output) == str(
        (edata._rootpath / "share/results/tables/mydataframe.csv").resolve())

    metaout = dataio.read_metadata(output)
    assert metaout["data"]["spec"]["columns"] == ["COL1", "COL2"]
Esempio n. 11
0
def test_regsurf_generate_metadata(rmssetup, rmsglobalconfig, regsurf):
    """Test generating metadata for a surface pretend inside RMS"""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    logger.debug(prettyprint_dict(rmsglobalconfig["access"]))

    edata = dataio.ExportData(
        config=rmsglobalconfig,  # read from global config
    )
    logger.info("Inside RMS status now %s", dataio.ExportData._inside_rms)

    edata.generate_metadata(regsurf)
    assert str(edata._pwd) == str(rmssetup)
    assert str(edata._rootpath.resolve()) == str(
        rmssetup.parent.parent.resolve())
def test_regsurf_aggregated_diffdata(fmurun_w_casemetadata, rmsglobalconfig, regsurf):
    """Test surfaces, where input is diffdata."""
    logger.info("Active folder is %s", fmurun_w_casemetadata)

    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,  # read from global config
    )

    aggs = []
    # create "forward" files
    for i in range(10):
        use_regsurf = regsurf.copy()
        use_regsurf.values += float(i)
        expfile = edata.export(
            use_regsurf,
            name="mymap_" + str(i),
            realization=i,
            timedata=[[20300201], [19990204]],
        )
        aggs.append(expfile)

    # next task is to do an aggradation, and now the metadata already exists
    # per input element which shall be re-used
    surfs = xtgeo.Surfaces()
    metas = []
    for mapfile in aggs:
        surf = xtgeo.surface_from_file(mapfile)
        meta = dataio.read_metadata(mapfile)

        metas.append(meta)
        surfs.append([surf])

    aggregated = surfs.statistics()
    logger.info("Aggr. mean is %s", aggregated["mean"].values.mean())  # shall be 1238.5

    aggdata = dataio.AggregatedData(
        configs=metas,
        operation="mean",
        name="myaggrd",
        verbosity="INFO",
        aggregation_id="789politipoliti",
    )
    newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"])
    logger.info("New metadata:\n%s", utils.prettyprint_dict(newmeta))
Esempio n. 13
0
def test_regsurf_generate_metadata_incl_jobs(fmurun_w_casemetadata,
                                             rmsglobalconfig, regsurf):
    """As above but now with jobs.json stuff included via class variable flag."""
    logger.info("Active folder is %s", fmurun_w_casemetadata)
    os.chdir(fmurun_w_casemetadata)

    dataio.ExportData.include_ert2jobs = True

    edata = dataio.ExportData(
        config=rmsglobalconfig,
        verbosity="INFO",
    )

    meta = edata.generate_metadata(regsurf)
    assert meta["fmu"]["realization"]["jobs"]["umask"] == "0002"

    dataio.ExportData.include_ert2jobs = False
Esempio n. 14
0
def test_gridproperty_export_file_set_name(fmurun_w_casemetadata,
                                           rmsglobalconfig, gridproperty):
    """Export the gridprop to file with correct metadata and name."""

    logger.info("Active folder is %s", fmurun_w_casemetadata)
    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(gridproperty, name="MyGridProperty")
    logger.info("Output is %s", output)

    assert str(output) == str(
        (edata._rootpath /
         "realization-0/iter-0/share/results/grids/mygridproperty.roff"
         ).resolve())
Esempio n. 15
0
def test_regsurf_generate_metadata(fmurun_w_casemetadata, rmsglobalconfig,
                                   regsurf):
    """Test generating metadata for a surface pretend ERT2 job"""
    logger.info("Active folder is %s", fmurun_w_casemetadata)
    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,
        verbosity="INFO",
    )

    meta = edata.generate_metadata(regsurf)
    assert str(edata._pwd) == str(fmurun_w_casemetadata)
    assert str(edata._rootpath.resolve()) == str(
        fmurun_w_casemetadata.parent.parent.resolve())
    assert meta["file"]["relative_path"].startswith(
        "realization-0/iter-0/share")
    assert "jobs" not in meta["fmu"]["realization"]
Esempio n. 16
0
def test_pyarrow_export_file_set_name(rmssetup, rmsglobalconfig, arrowtable):
    """Export the arrow to file with correct metadata and name."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    if arrowtable:  # is None if PyArrow package is not present
        output = edata.export(arrowtable, name="MyArrowtable")
        logger.info("Output is %s", output)

        assert str(output) == str(
            (edata._rootpath /
             "share/results/tables/myarrowtable.arrow").resolve())

        metaout = dataio.read_metadata(output)
        assert metaout["data"]["spec"]["columns"] == ["COL1", "COL2"]
Esempio n. 17
0
def test_regsurf_export_file_fmurun(rmsrun_fmu_w_casemetadata, rmsglobalconfig,
                                    regsurf):
    """Being in RMS and in an active FMU ERT2 run with case metadata present.

    Export the regular surface to file with correct metadata and name.
    """

    logger.info("Active folder is %s", rmsrun_fmu_w_casemetadata)
    os.chdir(rmsrun_fmu_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,
        verbosity="INFO",
        workflow="My test workflow",
        unit="myunit",
    )  # read from global config

    assert edata.unit == "myunit"

    # generating metadata without export is possible
    themeta = edata.generate_metadata(
        regsurf,
        unit="furlongs",  # intentional override
    )
    assert themeta["data"]["unit"] == "furlongs"
    logger.debug("Metadata: \n%s", prettyprint_dict(themeta))

    # doing actual export with a few ovverides
    output = edata.export(
        regsurf,
        name="TopVolantis",
        access_ssdl={
            "access_level": "restricted",
            "rep_include": False
        },
        unit="forthnite",  # intentional override
    )
    logger.info("Output is %s", output)

    assert edata._metadata["access"]["ssdl"]["access_level"] == "restricted"
    assert edata._metadata["data"]["unit"] == "forthnite"
def test_regsurf_case_observation(fmurun_w_casemetadata, rmsglobalconfig,
                                  regsurf):
    """Test generating pre-realization surfaces."""
    logger.info("Active folder is %s", fmurun_w_casemetadata)

    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,  # read from global config
        fmu_context="case",
        name="mymap",
        is_observation=True,
    )

    metadata = edata.generate_metadata(regsurf)
    logger.debug("\n%s", utils.prettyprint_dict(metadata))
    assert ("ertrun1/share/observation/maps/mymap.gri"
            in metadata["file"]["absolute_path"])

    exp = edata.export(regsurf)
    assert "ertrun1/share/observation/maps/mymap.gri" in exp
Esempio n. 19
0
def test_points_export_file_set_name(fmurun_w_casemetadata, rmsglobalconfig,
                                     points):
    """Export the points to file with correct metadata and name."""

    logger.info("Active folder is %s", fmurun_w_casemetadata)
    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig)  # read from global config

    output = edata.export(points, name="TopVolantis")
    logger.info("Output is %s", output)

    assert str(output) == str((
        edata._rootpath /
        "realization-0/iter-0/share/results/points/topvolantis.csv").resolve())

    thefile = pd.read_csv(
        edata._rootpath /
        "realization-0/iter-0/share/results/points/topvolantis.csv")
    assert thefile.columns[0] == "X"
def test_regsurf_case_observation_w_symlinks(fmurun_w_casemetadata,
                                             rmsglobalconfig, regsurf):
    """Generating case level surface, with symlinks on realization folders."""
    logger.info("Active folder is %s", fmurun_w_casemetadata)

    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,  # read from global config
        fmu_context="case_symlink_realization",
        name="mymap",
        is_observation=True,
    )

    with pytest.raises(NotImplementedError):
        metadata = edata.generate_metadata(regsurf)
        logger.debug("\n%s", utils.prettyprint_dict(metadata))
        assert ("ertrun1/share/observation/maps/mymap.gri"
                in metadata["file"]["absolute_path"])

        exp = edata.export(regsurf)
        assert "ertrun1/share/observation/maps/mymap.gri" in exp
Esempio n. 21
0
def test_regsurf_metadata_with_timedata_legacy(rmssetup, rmsglobalconfig,
                                               regsurf):
    """Export the regular surface to file with correct metadata timedata, legacy ver."""
    logger.info("Active folder is %s", rmssetup)
    os.chdir(rmssetup)

    dataio.ExportData.legacy_time_format = True
    edata = dataio.ExportData(
        config=rmsglobalconfig,
        verbosity="INFO",
    )  # read from global config
    meta1 = edata.generate_metadata(
        regsurf,
        name="TopVolantis",
        timedata=[[20300101, "moni"], [20100203, "base"]],
        verbosity="INFO",
    )
    logger.info(prettyprint_dict(meta1))

    assert meta1["data"]["time"][1]["value"] == "2010-02-03T00:00:00"
    assert meta1["data"]["time"][1]["label"] == "base"
    assert meta1["data"]["time"][0]["value"] == "2030-01-01T00:00:00"
    assert meta1["data"]["time"][0]["label"] == "moni"

    meta1 = edata.generate_metadata(
        regsurf,
        name="TopVolantis",
        timedata=[[20300123, "one"]],
        verbosity="INFO",
    )

    assert meta1["data"]["time"][0]["value"] == "2030-01-23T00:00:00"
    assert meta1["data"]["time"][0]["label"] == "one"

    assert len(meta1["data"]["time"]) == 1

    dataio.ExportData.legacy_time_format = False
def test_regsurf_aggregated(fmurun_w_casemetadata, rmsglobalconfig, regsurf):
    """Test generating aggragated metadata for a surface, where input has metadata."""
    logger.info("Active folder is %s", fmurun_w_casemetadata)

    os.chdir(fmurun_w_casemetadata)

    edata = dataio.ExportData(
        config=rmsglobalconfig,  # read from global config
        verbosity="INFO",
    )

    aggs = []
    # create "forward" files
    for i in range(1):  # TODO! 10
        use_regsurf = regsurf.copy()
        use_regsurf.values += float(i)
        expfile = edata.export(use_regsurf, name="mymap_" + str(i), realization=i)
        aggs.append(expfile)

    # next task is to do an aggradation, and now the metadata already exists
    # per input element which shall be re-used
    surfs = xtgeo.Surfaces()
    metas = []
    for mapfile in aggs:
        surf = xtgeo.surface_from_file(mapfile)
        meta = dataio.read_metadata(mapfile)
        print(utils.prettyprint_dict(meta))

        metas.append(meta)
        surfs.append([surf])

    aggregated = surfs.statistics()
    logger.info("Aggr. mean is %s", aggregated["mean"].values.mean())  # shall be 1238.5

    aggdata = dataio.AggregatedData(
        configs=metas,
        operation="mean",
        name="myaggrd",
        verbosity="INFO",
        aggregation_id="1234",
    )
    newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"])
    logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta))
    assert newmeta["fmu"]["aggregation"]["id"] == "1234"

    # let aggregation input True generate hash
    aggdata = dataio.AggregatedData(
        configs=metas,
        operation="mean",
        name="myaggrd2",
        verbosity="INFO",
        aggregation_id=True,
    )
    newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"])
    logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta))
    assert newmeta["fmu"]["aggregation"]["id"] != "1234"
    assert newmeta["fmu"]["aggregation"]["id"] is not True

    # let aggregation input None generate a missing key
    aggdata = dataio.AggregatedData(
        configs=metas,
        operation="mean",
        name="myaggrd2",
        verbosity="INFO",
        aggregation_id=None,
    )
    newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"])
    logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta))
    assert "id" not in newmeta["fmu"]["aggregation"]