def test_inicase_generate_case_metadata_exists_but_force( fmurun_w_casemetadata, globalconfig2): logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) logger.info("Folder is %s", fmurun_w_casemetadata) casemetafolder = fmurun_w_casemetadata.parent.parent old_metafile = casemetafolder / "share/metadata/fmu_case.yml" with open(old_metafile, "r", encoding="utf-8") as stream: old_content = yaml.safe_load(stream) icase = InitializeCase(globalconfig2, verbosity="INFO") icase.export( rootfolder=casemetafolder, force=True, casename="ertrun1", caseuser="******", description="My curious case", restart_from="Jurassic era", ) new_metafile = casemetafolder / "share/metadata/fmu_case.yml" with open(new_metafile, "r", encoding="utf-8") as stream: new_content = yaml.safe_load(stream) logger.debug("\n%s\n", prettyprint_dict(old_content)) logger.debug("\n%s\n", prettyprint_dict(new_content)) assert old_content["class"] == new_content["class"] assert old_content["fmu"]["case"]["uuid"] != new_content["fmu"]["case"][ "uuid"]
def test_regsurf_metadata_with_timedata(rmssetup, rmsglobalconfig, regsurf): """Export a regular surface to file with correct metadata and name and timedata.""" logger.info("Active folder is %s", rmssetup) os.chdir(rmssetup) edata = dataio.ExportData( config=rmsglobalconfig, verbosity="INFO", ) # read from global config meta1 = edata.generate_metadata( regsurf, name="TopVolantis", timedata=[[20300101, "moni"], [20100203, "base"]], verbosity="INFO", ) assert meta1["data"]["time"]["t0"]["value"] == "2010-02-03T00:00:00" assert meta1["data"]["time"]["t0"]["label"] == "base" assert meta1["data"]["time"]["t1"]["value"] == "2030-01-01T00:00:00" assert meta1["data"]["time"]["t1"]["label"] == "moni" meta1 = edata.generate_metadata( regsurf, name="TopVolantis", timedata=[[20300123, "one"]], verbosity="INFO", ) assert meta1["data"]["time"]["t0"]["value"] == "2030-01-23T00:00:00" assert meta1["data"]["time"]["t0"]["label"] == "one" assert meta1["data"]["time"].get("t1", None) is None logger.info(prettyprint_dict(meta1))
def test_regsurf_metadata_with_timedata(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Export the regular surface to file with correct metadata/name/timedata.""" os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, verbosity="INFO", ) meta1 = edata.generate_metadata( regsurf, name="TopVolantis", timedata=[[20300101, "moni"], [20100203, "base"]], verbosity="INFO", ) assert meta1["data"]["time"]["t0"]["value"] == "2010-02-03T00:00:00" assert meta1["data"]["time"]["t0"]["label"] == "base" assert meta1["data"]["time"]["t1"]["value"] == "2030-01-01T00:00:00" assert meta1["data"]["time"]["t1"]["label"] == "moni" meta1 = edata.generate_metadata( regsurf, name="TopVolantis", timedata=[[20300123, "one"]], verbosity="INFO", ) assert meta1["data"]["time"]["t0"]["value"] == "2030-01-23T00:00:00" assert meta1["data"]["time"]["t0"]["label"] == "one" assert meta1["data"]["time"].get("t1", None) is None logger.debug(prettyprint_dict(meta1))
def test_generate_full_metadata(regsurf, edataobj2): """Generating the full metadata block for a xtgeo surface.""" mymeta = _MetaData(regsurf, edataobj2) metadata_result = mymeta.generate_export_metadata( skip_null=False) # want to have None logger.debug("\n%s", prettyprint_dict(metadata_result)) # check some samples assert metadata_result["masterdata"]["smda"]["country"][0][ "identifier"] == "Norway" assert metadata_result["access"]["ssdl"]["access_level"] == "internal" assert metadata_result["data"]["unit"] == "m"
def test_regsurf_generate_metadata(rmssetup, rmsglobalconfig, regsurf): """Test generating metadata for a surface pretend inside RMS""" logger.info("Active folder is %s", rmssetup) os.chdir(rmssetup) logger.debug(prettyprint_dict(rmsglobalconfig["access"])) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config ) logger.info("Inside RMS status now %s", dataio.ExportData._inside_rms) edata.generate_metadata(regsurf) assert str(edata._pwd) == str(rmssetup) assert str(edata._rootpath.resolve()) == str( rmssetup.parent.parent.resolve())
def test_regsurf_aggregated_diffdata(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Test surfaces, where input is diffdata.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config ) aggs = [] # create "forward" files for i in range(10): use_regsurf = regsurf.copy() use_regsurf.values += float(i) expfile = edata.export( use_regsurf, name="mymap_" + str(i), realization=i, timedata=[[20300201], [19990204]], ) aggs.append(expfile) # next task is to do an aggradation, and now the metadata already exists # per input element which shall be re-used surfs = xtgeo.Surfaces() metas = [] for mapfile in aggs: surf = xtgeo.surface_from_file(mapfile) meta = dataio.read_metadata(mapfile) metas.append(meta) surfs.append([surf]) aggregated = surfs.statistics() logger.info("Aggr. mean is %s", aggregated["mean"].values.mean()) # shall be 1238.5 aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd", verbosity="INFO", aggregation_id="789politipoliti", ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.info("New metadata:\n%s", utils.prettyprint_dict(newmeta))
def test_regsurf_export_file_fmurun(rmsrun_fmu_w_casemetadata, rmsglobalconfig, regsurf): """Being in RMS and in an active FMU ERT2 run with case metadata present. Export the regular surface to file with correct metadata and name. """ logger.info("Active folder is %s", rmsrun_fmu_w_casemetadata) os.chdir(rmsrun_fmu_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, verbosity="INFO", workflow="My test workflow", unit="myunit", ) # read from global config assert edata.unit == "myunit" # generating metadata without export is possible themeta = edata.generate_metadata( regsurf, unit="furlongs", # intentional override ) assert themeta["data"]["unit"] == "furlongs" logger.debug("Metadata: \n%s", prettyprint_dict(themeta)) # doing actual export with a few ovverides output = edata.export( regsurf, name="TopVolantis", access_ssdl={ "access_level": "restricted", "rep_include": False }, unit="forthnite", # intentional override ) logger.info("Output is %s", output) assert edata._metadata["access"]["ssdl"]["access_level"] == "restricted" assert edata._metadata["data"]["unit"] == "forthnite"
def test_regsurf_case_observation(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Test generating pre-realization surfaces.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config fmu_context="case", name="mymap", is_observation=True, ) metadata = edata.generate_metadata(regsurf) logger.debug("\n%s", utils.prettyprint_dict(metadata)) assert ("ertrun1/share/observation/maps/mymap.gri" in metadata["file"]["absolute_path"]) exp = edata.export(regsurf) assert "ertrun1/share/observation/maps/mymap.gri" in exp
def test_regsurf_case_observation_w_symlinks(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Generating case level surface, with symlinks on realization folders.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config fmu_context="case_symlink_realization", name="mymap", is_observation=True, ) with pytest.raises(NotImplementedError): metadata = edata.generate_metadata(regsurf) logger.debug("\n%s", utils.prettyprint_dict(metadata)) assert ("ertrun1/share/observation/maps/mymap.gri" in metadata["file"]["absolute_path"]) exp = edata.export(regsurf) assert "ertrun1/share/observation/maps/mymap.gri" in exp
def test_regsurf_metadata_with_timedata_legacy(rmssetup, rmsglobalconfig, regsurf): """Export the regular surface to file with correct metadata timedata, legacy ver.""" logger.info("Active folder is %s", rmssetup) os.chdir(rmssetup) dataio.ExportData.legacy_time_format = True edata = dataio.ExportData( config=rmsglobalconfig, verbosity="INFO", ) # read from global config meta1 = edata.generate_metadata( regsurf, name="TopVolantis", timedata=[[20300101, "moni"], [20100203, "base"]], verbosity="INFO", ) logger.info(prettyprint_dict(meta1)) assert meta1["data"]["time"][1]["value"] == "2010-02-03T00:00:00" assert meta1["data"]["time"][1]["label"] == "base" assert meta1["data"]["time"][0]["value"] == "2030-01-01T00:00:00" assert meta1["data"]["time"][0]["label"] == "moni" meta1 = edata.generate_metadata( regsurf, name="TopVolantis", timedata=[[20300123, "one"]], verbosity="INFO", ) assert meta1["data"]["time"][0]["value"] == "2030-01-23T00:00:00" assert meta1["data"]["time"][0]["label"] == "one" assert len(meta1["data"]["time"]) == 1 dataio.ExportData.legacy_time_format = False
def test_regsurf_aggregated(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Test generating aggragated metadata for a surface, where input has metadata.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config verbosity="INFO", ) aggs = [] # create "forward" files for i in range(1): # TODO! 10 use_regsurf = regsurf.copy() use_regsurf.values += float(i) expfile = edata.export(use_regsurf, name="mymap_" + str(i), realization=i) aggs.append(expfile) # next task is to do an aggradation, and now the metadata already exists # per input element which shall be re-used surfs = xtgeo.Surfaces() metas = [] for mapfile in aggs: surf = xtgeo.surface_from_file(mapfile) meta = dataio.read_metadata(mapfile) print(utils.prettyprint_dict(meta)) metas.append(meta) surfs.append([surf]) aggregated = surfs.statistics() logger.info("Aggr. mean is %s", aggregated["mean"].values.mean()) # shall be 1238.5 aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd", verbosity="INFO", aggregation_id="1234", ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta)) assert newmeta["fmu"]["aggregation"]["id"] == "1234" # let aggregation input True generate hash aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd2", verbosity="INFO", aggregation_id=True, ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta)) assert newmeta["fmu"]["aggregation"]["id"] != "1234" assert newmeta["fmu"]["aggregation"]["id"] is not True # let aggregation input None generate a missing key aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd2", verbosity="INFO", aggregation_id=None, ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta)) assert "id" not in newmeta["fmu"]["aggregation"]