def fixture_edataobj2(globalconfig2): """Combined globalconfig2 and other settings; NB for internal unit testing""" eobj = dio.ExportData( config=globalconfig2, name="TopVolantis", unit="m", tagname="mytag", parent="", timedata=[[20330105, "moni"], [19990102, "base"]], is_prediction=True, is_observation=False, forcefolder=None, subfolder="", fmu_context="realization", ) eobj.surface_fformat = "irap_binary" eobj.createfolder = False eobj.verifyfolder = False eobj.legacy_time_format = False eobj._rootpath = Path(".") eobj._pwd = Path(".") logger.info("Ran %s", inspect.currentframe().f_code.co_name) return eobj
def export_geogrid_parameters(): """Export geogrid assosiated parameters based on user defined lists""" props = PROPS_SEISMIC + PROPS_OTHER print("Write grid properties...") for propname in props: filename = (FOLDER / (GFILE + "_" + propname)).with_suffix(".roff") prop = xtgeo.gridproperty_from_file(filename) ed = dataio.ExportData( name=propname, # parent={"name": GNAME}, config=CFG, content="depth", unit="m", vertical_domain={"depth": "msl"}, timedata=None, is_prediction=True, is_observation=False, verbosity=VERBOSITY, workflow="rms property model", ) out = ed.export(prop) print(f"Stored {propname} as {out}")
def export_faultlines(): """Return faultlines as both dataframe and original (xyz)""" ed = dataio.ExportData( config=CFG, content="depth", unit="m", vertical_domain={"depth": "msl"}, timedata=None, is_prediction=True, is_observation=False, tagname="faultlines", verbosity="INFO", workflow="rms structural model", ) for hname in HORISONNAMES: # RMS version for reading polygons from a project: # poly = xtgeo.polygons_from_roxar(project, hname, RMS_POL_CATEGORY) # File version: poly = xtgeo.polygons_from_file((FILEROOT / hname.lower()).with_suffix(".pol")) poly.name = hname # Export both csv (keeping xtgeo column names) and irap text format # The difference bewtween "csv" and "csv|xtgeo" is that the latter will keep # xtgeo column names as-is while "csv" will force column names to "X Y Z ID" for fmt in ["csv|xtgeo", "irap_ascii"]: ed.polygons_fformat = fmt ed.export(poly, verbosity="WARNING")
def test_fmuprovider_no_provider(testroot, globalconfig1): """Testing the FmuProvider basics where no ERT context is found from folder tree.""" os.chdir(testroot) ex = dio.ExportData(fmu_context="realization", config=globalconfig1) myfmu = _FmuProvider(ex) myfmu.detect_provider() assert myfmu.is_fmurun is False assert myfmu.case_name is None
def test_metadata_populate_masterdata_is_empty(globalconfig1): """Testing the masterdata part, first with no settings.""" some = dio.ExportData(config=globalconfig1) del some.config["masterdata"] # to force missing masterdata mymeta = _MetaData("dummy", some) with pytest.warns(UserWarning): mymeta._populate_meta_masterdata() assert mymeta.meta_masterdata is None
def test_fmuprovider_ert2_provider(fmurun, globalconfig1): """Testing the FmuProvider for an ERT2 case""" os.chdir(fmurun) ex = dio.ExportData(fmu_context="realization", config=globalconfig1) ex._rootpath = fmurun myfmu = _FmuProvider(ex) myfmu.detect_provider() assert myfmu.case_name == "ertrun1" assert myfmu.real_name == "realization-0" assert myfmu.real_id == 0
def fixture_edataobj1(globalconfig1): """Combined globalconfig and settings to instance, for internal testing""" logger.info("Establish edataobj1") eobj = dio.ExportData( config=globalconfig1, name="TopWhatever", content="depth", tagname="mytag", is_observation=False, ) eobj.surface_fformat = "irap_binary" eobj.createfolder = False eobj.verifyfolder = False logger.info("Ran %s returning %s", inspect.currentframe().f_code.co_name, type(eobj)) return eobj
def export_geogrid_geometry(): filename = (FOLDER / GFILE).with_suffix(".roff") grd = xtgeo.grid_from_file(filename) ed = dataio.ExportData( config=CFG, name=GNAME, content="depth", unit="m", vertical_domain={"depth": "msl"}, timedata=None, is_prediction=True, is_observation=False, tagname="", verbosity=VERBOSITY, workflow="rms structural model", ) out = ed.export(grd) print(f"Stored grid as {out}")
def main(): """Exporting maps from clipboard""" surf = xtgeo.surface_from_file(INPUT_FOLDER / FILE) print(f"Average value of map is {surf.values.mean()}") ed = dataio.ExportData( config=CFG, name="noname_here", unit="fraction", vertical_domain={"depth": "msl"}, content="property", timedata=None, is_prediction=True, is_observation=False, tagname="average_poro", workflow="rms property model", ) fname = ed.export(surf, name="all") # note that 'name' here will be used print(f"File name is {fname}")
def test_metadata_populate_change_access_ok(globalconfig1): """Testing the access part, now with ok config and a change in access.""" edata = dio.ExportData( config=globalconfig1, access_ssdl={ "access_level": "paranoid", "rep_include": False }, ) mymeta = _MetaData("dummy", edata) mymeta._populate_meta_access() assert mymeta.meta_access == { "asset": { "name": "Test" }, "ssdl": { "access_level": "paranoid", "rep_include": False }, }
def main(): """Exporting maps from clipboard""" files = INPUT_FOLDER.glob("*.gri") for file in files: surf = xtgeo.surface_from_file(file) attribute = "unset" for pattern, attr in TRANSLATE.items(): if pattern in str(file).lower(): attribute = attr name = "unset" for pattern, attr in NAMETRANSLATE.items(): if pattern in str(file).lower(): name = attr ed = dataio.ExportData( config=CFG, name=name, unit="fraction", content={ "property": { "attribute": attribute, "is_discrete": False } }, vertical_domain={"depth": "msl"}, timedata=None, is_prediction=True, is_observation=False, tagname="average_" + attribute, verbosity="INFO", workflow="rms property model", ) fname = ed.export(surf) print(f"File name is {fname}")