def extractdf(): """Extract dataframe from Eclipse case""" # gete dataframe from the grid only grd = xtgeo.grid_from_file(GRIDFILEROOT.with_suffix(".EGRID")) dataframe = grd.dataframe() # will not have any grid props print(dataframe) # load as Eclipse run; this will automatically look for EGRID, INIT, UNRST grd = xtgeo.grid_from_file( GRIDFILEROOT, fformat="eclipserun", initprops=INITS, restartprops=RESTARTS, restartdates=MYDATES, ) # dataframe from a GridProperties instance, in this case grd.gridprops dataframe = grd.gridprops.dataframe() # properties for all cells print(dataframe) # Get a dataframe for all cells, with ijk and xyz. In this case # a grid key input is required: dataframe = grd.dataframe() print(dataframe) # default is for all cells # For active cells only: dataframe = grd.dataframe(activeonly=True) print(dataframe) dataframe.to_csv(TMPDIR / "reek_sim.csv")
def show_stats(): """Get statistics for one realisation, poro/perm filtered on facies. But note that values here are unweighted as total volume is not present. """ # read grid grd = xtgeo.grid_from_file(GRIDFILE) # read facies (to be used as filter) facies = xtgeo.gridproperty_from_file(FACIESFILE, name=FACIES, grid=grd) print("Facies codes are: {}".format(facies.codes)) for propname in PROPS: pfile = ojn(EXPATH1, ROOT + "--" + propname + EXT) pname = "geogrid--" + propname prop = xtgeo.gridproperty_from_file(pfile, name=pname, grid=grd) print("Working with {}".format(prop.name)) # now find statistics for each facies, and all facies for key, fname in facies.codes.items(): avg = prop.values[facies.values == key].mean() std = prop.values[facies.values == key].std() print("For property {} in facies {}, avg is {:10.3f} and " "stddev is {:9.3f}".format(propname, fname, avg, std)) avg = prop.values.mean() std = prop.values.std() print("For property {} in ALL facies, avg is {:10.3f} and " "stddev is {:9.3f}".format(propname, avg, std))
def fixture_create_project(): """Create a tmp RMS project for testing, populate with basic data. After the yield command, the teardown phase will remove the tmp RMS project. """ prj1 = str(PRJ) print("\n******** Setup RMS project!\n") if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1) project = roxar.Project.create() rox = xtgeo.RoxUtils(project) print("Roxar version is", rox.roxversion) print("RMS version is", rox.rmsversion(rox.roxversion)) assert "1." in rox.roxversion for wfile in WELLS1: wobj = xtgeo.well_from_file(WELLSFOLDER1 / wfile) if "XP_with" in wfile: wobj.name = "OP2_w_repeat" wobj.to_roxar(project, wobj.name, logrun="log", trajectory="My trajectory") # populate with cube data cube = xtgeo.cube_from_file(CUBEDATA1) cube.to_roxar(project, CUBENAME1, domain="depth") # populate with surface data rox.create_horizons_category(SURFCAT1) for num, name in enumerate(SURFNAMES1): srf = xtgeo.surface_from_file(SURFTOPS1[num]) project.horizons.create(name, roxar.HorizonType.interpreted) srf.to_roxar(project, name, SURFCAT1) # populate with grid and props grd = xtgeo.grid_from_file(GRIDDATA1) grd.to_roxar(project, GRIDNAME1) por = xtgeo.gridproperty_from_file(PORODATA1, name=PORONAME1) por.to_roxar(project, GRIDNAME1, PORONAME1) zon = xtgeo.gridproperty_from_file(ZONEDATA1, name=ZONENAME1) zon.values = zon.values.astype(np.uint8) zon.to_roxar(project, GRIDNAME1, ZONENAME1) # save project (both an initla version and a work version) and exit project.save_as(prj1) project.close() yield project print("\n******* Teardown RMS project!\n") if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1)
def make_map(): """Make a map of poro or perm in lowermost K layer of the grid""" # read grid grd = xtgeo.grid_from_file(GNAMEROOT + ".EGRID") _ = xtgeo.gridproperty_from_file(GNAMEROOT + ".INIT", name="PORO", grid=grd) df = grd.dataframe() # make a map from the grid geometry to be used as a template surf = xtgeo.RegularSurface() surf.from_grid3d(grd) # get only bottom layer: lastlayer = df["KZ"].max() df = df[df["KZ"] == lastlayer].reset_index() # prepare as input to a Points dataframe (3 columns X Y Z) df = df[["X_UTME", "Y_UTMN", "PORO"]].copy() points = xtgeo.Points() points.zname = "PORO" points.dataframe = df # do gridding: surf.gridding(points) # optional plot surf.quickplot()
def loop_for_compute( config: dict, sinfo: ScreenInfo, _dryrun: bool = False ) -> EnsembleWellProps: """Collect for computing the ensemble statistics. Args: config: The input configuration dictonary sinfo: Messages to screen instance _dryrun: For testing, skipping computation """ cfg = ConfigData(config) wcase = WellCase( xtgeo.well_from_file(cfg.wellfile, lognames=cfg.lognames), cfg.mdlog, cfg.mdranges, cfg.welldelta, ) grd = None sinfo.oprint("Loop data over realizations...") used_realizations = [] for real in cfg.reals: sinfo.oprint(f"Realization no. {real}") realiterpath = cfg.root / f"realization-{real}" / cfg.itera if not isinstance(grd, xtgeo.Grid) or not cfg.gridreuse: # one may choose to reuse grid if not structural uncertainty sinfo.oprint(f"Read grid geometry for realization {real}") gpath = realiterpath / cfg.gridfilestub try: grd = xtgeo.grid_from_file(gpath) except OSError: sinfo.oprint(f"Not able to read grid {gpath}, skip realization...") continue wcase.well.delete_logs(GCELLNAMES) wcase.well.make_ijk_from_grid(grd) for propcase in cfg.proplist: proppath = realiterpath / propcase.filestub sinfo.oprint(f"Read: {proppath}...") try: theprop = xtgeo.gridproperty_from_file(proppath) except OSError: sinfo.oprint( f"Not able to read property {propcase.name} from {proppath}, " "skip realization..." ) continue theprop.geometry = grd if _dryrun is False: run_compute(real, wcase.well, propcase, theprop) used_realizations.append(real) sinfo.xprint("Delete logs referring to cells...") wcase.well.delete_logs(GCELLNAMES) return EnsembleWellProps(wcase.well, used_realizations, cfg, sinfo)
def test_import_dualperm_grid_soil(): """Test grid with flag for dual perm setup (will also mean dual poro also)""" grd = xtgeo.grid_from_file(DUALFILE2.with_suffix(".EGRID")) grd._dualactnum.to_file("TMP/dualact.roff") sgas = xtgeo.gridproperty_from_file( DUALFILE2.with_suffix(".UNRST"), grid=grd, name="SGAS", date=20170121, fracture=False, ) sgas.describe() tsetup.assert_almostequal(sgas.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(sgas.values[0, 1, 0], 0.0, 0.001) soil = xtgeo.gridproperty_from_file( DUALFILE2.with_suffix(".UNRST"), grid=grd, name="SOIL", date=20170121, fracture=False, ) soil.describe() tsetup.assert_almostequal(soil.values[3, 0, 0], 0.44525, 0.001) tsetup.assert_almostequal(soil.values[0, 1, 0], 0.0, 0.001) tsetup.assert_almostequal(soil.values[3, 2, 0], 0.0, 0.0001) # fractures sgas = xtgeo.gridproperty_from_file( DUALFILE2.with_suffix(".UNRST"), grid=grd, name="SGAS", date=20170121, fracture=True, ) tsetup.assert_almostequal(sgas.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(sgas.values[0, 1, 0], 0.0, 0.0001) soil = xtgeo.gridproperty_from_file( DUALFILE2.with_suffix(".UNRST"), grid=grd, name="SOIL", date=20170121, fracture=True, ) tsetup.assert_almostequal(soil.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(soil.values[0, 1, 0], 0.011741, 0.0001) tsetup.assert_almostequal(soil.values[3, 2, 0], 0.11676, 0.0001)
def test_import_dualporo_grid(): """Test grid with flag for dual porosity setup, oil water""" grd = xtgeo.grid_from_file(DUALFILE1.with_suffix(".EGRID")) assert grd.dualporo is True assert grd.dualperm is False assert grd.dimensions == (5, 3, 1) poro = xtgeo.gridproperty_from_file(DUALFILE1.with_suffix(".INIT"), grid=grd, name="PORO") tsetup.assert_almostequal(poro.values[0, 0, 0], 0.1, 0.001) tsetup.assert_almostequal(poro.values[1, 1, 0], 0.16, 0.001) tsetup.assert_almostequal(poro.values[4, 2, 0], 0.24, 0.001) assert poro.name == "POROM" poro.describe() poro = xtgeo.gridproperty_from_file(DUALFILE1.with_suffix(".INIT"), grid=grd, name="PORO", fracture=True) tsetup.assert_almostequal(poro.values[0, 0, 0], 0.25, 0.001) tsetup.assert_almostequal(poro.values[4, 2, 0], 0.39, 0.001) assert poro.name == "POROF" poro.describe() swat = xtgeo.gridproperty_from_file( DUALFILE1.with_suffix(".UNRST"), grid=grd, name="SWAT", date=20170121, fracture=False, ) swat.describe() tsetup.assert_almostequal(swat.values[0, 0, 0], 0.60924, 0.001) swat = xtgeo.gridproperty_from_file( DUALFILE1.with_suffix(".UNRST"), grid=grd, name="SWAT", date=20170121, fracture=True, ) swat.describe() tsetup.assert_almostequal(swat.values[0, 0, 0], 0.989687, 0.001) swat.to_file("TMP/swat.roff")
def all_init_as_csv(): """Get dataframes, print as CSV.""" print("Loading Eclipse data {}".format(GRIDFILEROOT)) grd = xtgeo.grid_from_file(GRIDFILEROOT, fformat="eclipserun", initprops=INITPROPS) print("Get dataframes...") dfr = grd.dataframe(activeonly=True) print(dfr.head()) print("Filter out columns with constant values...") dfr = dfr.iloc[:, ~np.isclose(0, dfr.var())] print(dfr.head()) print("Write to file...") dfr.to_csv(TMPDIR / "mycsvdump.csv", index=False)
def test_create_project(): """Create a tmp RMS project for testing, populate with basic data""" prj1 = PRJ prj2 = PRJ + "_initial" if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1) if isdir(prj2): print("Remove existing project! (2)") shutil.rmtree(prj2) project = roxar.Project.create() rox = xtgeo.RoxUtils(project) print("Roxar version is", rox.roxversion) print("RMS version is", rox.rmsversion(rox.roxversion)) assert "1." in rox.roxversion # populate with cube data cube = xtgeo.cube_from_file(CUBEDATA1) cube.to_roxar(project, CUBENAME1, domain="depth") # populate with surface data rox.create_horizons_category(SURFCAT1) for num, name in enumerate(SURFNAMES1): srf = xtgeo.surface_from_file(SURFTOPS1[num]) project.horizons.create(name, roxar.HorizonType.interpreted) srf.to_roxar(project, name, SURFCAT1) # populate with grid and props grd = xtgeo.grid_from_file(GRIDDATA1) grd.to_roxar(project, GRIDNAME1) por = xtgeo.gridproperty_from_file(PORODATA1, name=PORONAME1) por.to_roxar(project, GRIDNAME1, PORONAME1) # populate with well data (postponed) # save project (both an initla version and a work version) and exit project.save_as(prj1) project.save_as(prj2) project.close()
def cropper(): """Do a cropping of a 3D grid""" # pylint: disable=too-many-locals grd = xtgeo.grid_from_file(GRIDFILEROOT, fformat="eclipserun", initprops=INITPROPS) print(grd.props) # find current NCOL, NROW and divide into 4 pieces ncol = grd.ncol nrow = grd.nrow nlay = grd.nlay ncol1 = int(ncol / 2) nrow1 = int(nrow / 2) print("Original grid dimensions are {} {} {}".format(ncol, nrow, nlay)) print("Crop ranges are {} {} {}".format(ncol1, nrow1, nlay)) ncolranges = [(1, ncol1), (ncol1 + 1, ncol)] nrowranges = [(1, nrow1), (nrow1 + 1, nrow)] for ncr in ncolranges: nc1, nc2 = ncr for nrr in nrowranges: nr1, nr2 = nrr fname = "_{}-{}_{}-{}".format(nc1, nc2, nr1, nr2) tmpgrd = grd.copy() tmpgrd.crop(ncr, nrr, (1, nlay), props="all") # save to disk as ROFF files tmpgrd.to_file(TMPDIR / ("grid" + fname + ".roff")) for prop in tmpgrd.props: print("{} for {} .. {}".format(prop.name, ncr, nrr)) fname2 = prop.name + fname + ".roff" fname2 = fname2.lower() prop.to_file(TMPDIR / fname2)
def export_geogrid_geometry(): filename = (FOLDER / GFILE).with_suffix(".roff") grd = xtgeo.grid_from_file(filename) ed = dataio.ExportData( config=CFG, name=GNAME, content="depth", unit="m", vertical_domain={"depth": "msl"}, timedata=None, is_prediction=True, is_observation=False, tagname="", verbosity=VERBOSITY, workflow="rms structural model", ) out = ed.export(grd) print(f"Stored grid as {out}")
def sum_stats(): """Accumulate numpies for all realisations and then do stats. This will be quite memory intensive, and memory consumption will increase linearly. """ propsd = {} for irel in range(NRUN): # load as Eclipse run; this will look for EGRID, INIT, UNRST print("Loading realization no {}".format(irel)) grd = xtgeo.grid_from_file( GRIDFILEROOT, fformat="eclipserun", initprops=INITPROPS, restartprops=RESTARTPROPS, restartdates=RDATES, ) for prop in grd.props: if prop.name not in propsd: propsd[prop.name] = [] if prop.name == "PORO": prop.values += irel * 0.001 # mimic variability aka ensembles else: prop.values += irel * 1 # just to mimic variability propsd[prop.name].append(prop.values1d) # find the averages: porovalues = npma.vstack(propsd["PORO"]) poromeanarray = porovalues.mean(axis=0) porostdarray = porovalues.std(axis=0) print(poromeanarray) print(poromeanarray.mean()) print(porostdarray) print(porostdarray.mean()) return poromeanarray.mean()
def sum_running_stats(): """Find avg per realisation and do a cumulative rolling mean. Memory consumption shall be very low. """ for irel in range(NRUN): # load as Eclipse run; this will look for EGRID, INIT, UNRST print("Loading realization no {}".format(irel)) grd = xtgeo.grid_from_file( GRIDFILEROOT, fformat="eclipserun", restartprops=RESTARTPROPS, restartdates=RDATES, initprops=INITPROPS, ) nnum = float(irel + 1) for prop in grd.props: if prop.name == "PORO": prop.values += irel * 0.001 # mimic variability aka ensembles else: prop.values += irel * 1 # just to mimic variability if prop.name == "PORO": if irel == 0: pcum = prop.values1d else: pavg = prop.values1d / nnum pcum = pcum * (nnum - 1) / nnum pcum = npma.vstack([pcum, pavg]) pcum = pcum.sum(axis=0) # find the averages: print(pcum) print(pcum.mean()) return pcum.mean()
def slice_a_grid(): """Slice a 3D grid property with maps (looping)""" expath1 = pathlib.Path("../../xtgeo-testdata/3dgrids/reek") expath2 = pathlib.Path("../../xtgeo-testdata/surfaces/reek/1") gridfileroot = expath1 / "REEK" surfacefile = expath2 / "midreek_rota.gri" initprops = ["PORO", "PERMX"] grd = xtgeo.grid_from_file(gridfileroot, fformat="eclipserun", initprops=initprops) # read a surface, which is used for "template" surf = xtgeo.surface_from_file(surfacefile) surf.refine(2) # make finer for nicer sampling (NB takes time then) slices = [1700, 1720, 1740] for myslice in slices: print("Slice is {}".format(myslice)) for prp in grd.props: sconst = surf.copy() sconst.values = myslice # set constant value for surface print("Work with {}, slice at {}".format(prp.name, myslice)) sconst.slice_grid3d(grd, prp) fname = "{}_{}.gri".format(prp.name, myslice) sconst.to_file(TMPDIR / fname) fname = TMPDIR / ("{}_{}.png".format(prp.name, myslice)) if "SKIP_PLOT" not in os.environ: sconst.quickplot(filename=fname)
def test_compute_some_props(configdata): """Test the actual compute of one well on one realization.""" cfg = ensemble_well_props.ConfigData(configdata) wcase = ensemble_well_props.WellCase( xtgeo.well_from_file(WELLNAME2, lognames=cfg.lognames), cfg.mdlog, cfg.mdranges) grd = xtgeo.grid_from_file(GFILE1) wcase.well.make_ijk_from_grid(grd) myprops = [FACIESFILE1, POROFILE1] for ncount, pcase in enumerate(myprops): prop = xtgeo.gridproperty_from_file(pcase) prop.geometry = grd ensemble_well_props.run_compute(0, wcase.well, cfg.proplist[ncount], prop) assert "Facies_r0" in wcase.well.dataframe assert wcase.well.dataframe["PHIT_r0"].mean() == pytest.approx(0.171533, abs=0.001)
def load_grid(gridpath: str) -> xtgeo.Grid: return xtgeo.grid_from_file(gridpath)
def ecldiff2roff_main( eclroot: str, prop: str, diffdates: Union[str, List[Tuple[str, str]]], outputfilebase: str = "eclgrid", sep: str = "--", datesep: str = "_", datefmt: str = "YYYYMMDD", ) -> None: """Main function for ecldiff2roff, taking positional and named arguments. Arguments correspond to argparse documentation """ if not diffdates: logger.warning("No dates given. Nothing to do") return if isinstance(diffdates, str): diffdates = parse_diff_dates(diffdates) alldates: Set[str] alldates = set() for date_pair in diffdates: alldates = alldates.union(set(date_pair)) ecl_grid = xtgeo.grid_from_file( eclroot, fformat="eclipserun", restartprops=[prop], restartdates=alldates ) logger.info("Loaded UNRST data at %s dates from %s", len(alldates), eclroot) supp_datefmts = {"YYYYMMDD": "%Y%m%d", "YYYY-MM-DD": "%Y-%m-%d"} if datefmt not in supp_datefmts: raise ValueError(f"Requested dateformat not supported {datefmt}") for date_pair in diffdates: prop1 = ecl_grid.get_prop_by_name(f"{prop}_{date_pair[0]}") if prop1 is None or prop1.values is None: raise ValueError(f"Could not extract {prop} at date {date_pair[0]}") prop2 = ecl_grid.get_prop_by_name(f"{prop}_{date_pair[1]}") if prop2 is None or prop2.values is None: raise ValueError(f"Could not extract {prop} at date {date_pair[1]}") logger.info( "Computing difference for property %s between dates %s and %s", prop, str(date_pair[0]), str(date_pair[1]), ) diffprop = prop1.copy() diffprop.values = prop1.values - prop2.values diffpropname = ( prop.lower() + sep + dateutil.parser.parse(date_pair[0]).strftime(supp_datefmts[datefmt]) + datesep + dateutil.parser.parse(date_pair[1]).strftime(supp_datefmts[datefmt]) ) filename = outputfilebase + sep + diffpropname + ".roff" logger.info("Writing to file %s", filename) diffprop.to_file(filename, name=diffpropname)
def extract_grid_zone_tops( project: Optional[_roxar.Project] = None, well_list: Optional[list] = None, logrun: str = "log", trajectory: str = "Drilled trajectory", gridzonelog: str = None, mdlogname: str = None, grid: str = None, zone_param: str = None, alias_file: str = None, rms_name: str = "RMS_WELL_NAME", ecl_name: str = "ECLIPSE_WELL_NAME", ) -> pd.DataFrame: """ Function for extracting top and base from gridzones, both in TVD and MD. A pandas dataframe will be returned. Users can either input a pre-generated gridzonelog or a grid and a zone parameter for computing the gridzonelog. The function works both inside RMS and outside with file input. If input from files, and a MD log is not present in the well a quasi md log will be computed and used. """ use_gridzonelog = False if gridzonelog is None else True if not use_gridzonelog: if grid is not None and zone_param is not None: if project is not None: mygrid = xtgeo.grid_from_roxar(project, grid) gridzones = xtgeo.gridproperty_from_roxar(project, grid, zone_param) else: mygrid = xtgeo.grid_from_file(grid) gridzones = xtgeo.gridproperty_from_file(zone_param, grid=mygrid) gridzones.name = "Zone" else: raise ValueError("Specify either 'gridzonelog' or 'grid' and 'zone_param") dfs = [] if well_list is None: well_list = [] for well in well_list: try: if project is not None: xtg_well = xtgeo.well_from_roxar( project, str(well), trajectory=trajectory, logrun=logrun, inclmd=True, ) else: xtg_well = xtgeo.well_from_file(str(well), mdlogname=mdlogname) # quasi md log will be computed xtg_well.geometrics() except (ValueError, KeyError): continue # if no gridzonelog create one from the zone parameter if not use_gridzonelog: xtg_well.get_gridproperties(gridzones, mygrid) gridzonelog = "Zone_model" if xtg_well.dataframe[gridzonelog].isnull().values.all(): continue # Set gridzonelog as zonelog and extract zonation tops from it xtg_well.zonelogname = gridzonelog dframe = xtg_well.get_zonation_points(top_prefix="", use_undef=True) dframe.rename( columns={ "Z_TVDSS": "TOP_TVD", xtg_well.mdlogname: "TOP_MD", "Zone": "ZONE_CODE", "WellName": "WELL", }, inplace=True, ) # find deepest point in well while in grid df_max = ( xtg_well.dataframe[["Z_TVDSS", xtg_well.mdlogname, gridzonelog]] .dropna() .sort_values(by=xtg_well.mdlogname) ) # create base picks also dframe["BASE_TVD"] = dframe["TOP_TVD"].shift(-1) dframe["BASE_MD"] = dframe["TOP_MD"].shift(-1) dframe.at[dframe.index[-1], "BASE_TVD"] = df_max.iloc[-1]["Z_TVDSS"] dframe.at[dframe.index[-1], "BASE_MD"] = df_max.iloc[-1][xtg_well.mdlogname] # adjust zone values to get correct zone information dframe["ZONE_CODE"] = shift_zone_values(dframe["ZONE_CODE"].values.copy()) dframe["ZONE"] = ( dframe["ZONE_CODE"] .map(xtg_well.get_logrecord(xtg_well.zonelogname)) .fillna("Outside") ) dfs.append(dframe.drop(columns=["TopName", "Q_INCL", "Q_AZI"])) df = pd.concat(dfs) if alias_file is not None: well_dict = make_alias_dict(alias_file, rms_name, ecl_name) df["WELL"] = df["WELL"].replace(well_dict) return df
def load_grid(gridpath): return xtgeo.grid_from_file(gridpath)
def test_import_dualperm_grid(): """Test grid with flag for dual perm setup (hence dual poro also) water/oil""" grd = xtgeo.grid_from_file(DUALFILE2 + ".EGRID") assert grd.dualporo is True assert grd.dualperm is True assert grd.dimensions == (5, 3, 1) grd.to_file(os.path.join(TMPDIR, "dual2.roff")) poro = xtgeo.gridproperty_from_file(DUALFILE2 + ".INIT", grid=grd, name="PORO") print(poro.values) tsetup.assert_almostequal(poro.values[0, 0, 0], 0.1, 0.001) tsetup.assert_almostequal(poro.values[1, 1, 0], 0.16, 0.001) tsetup.assert_almostequal(poro.values[4, 2, 0], 0.24, 0.001) assert poro.name == "POROM" poro.describe() poro = xtgeo.gridproperty_from_file(DUALFILE2 + ".INIT", grid=grd, name="PORO", fracture=True) tsetup.assert_almostequal(poro.values[0, 0, 0], 0.25, 0.001) tsetup.assert_almostequal(poro.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(poro.values[4, 2, 0], 0.39, 0.001) assert poro.name == "POROF" poro.describe() perm = xtgeo.gridproperty_from_file(DUALFILE2 + ".INIT", grid=grd, name="PERMX") tsetup.assert_almostequal(perm.values[0, 0, 0], 100.0, 0.001) tsetup.assert_almostequal(perm.values[3, 0, 0], 100.0, 0.001) tsetup.assert_almostequal(perm.values[0, 1, 0], 0.0, 0.001) tsetup.assert_almostequal(perm.values[4, 2, 0], 100, 0.001) assert perm.name == "PERMXM" perm.to_file(os.path.join(TMPDIR, "dual2_permxm.roff")) perm = xtgeo.gridproperty_from_file(DUALFILE2 + ".INIT", grid=grd, name="PERMX", fracture=True) tsetup.assert_almostequal(perm.values[0, 0, 0], 100.0, 0.001) tsetup.assert_almostequal(perm.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(perm.values[0, 1, 0], 100.0, 0.001) tsetup.assert_almostequal(perm.values[4, 2, 0], 100, 0.001) assert perm.name == "PERMXF" perm.to_file(os.path.join(TMPDIR, "dual2_permxf.roff")) swat = xtgeo.gridproperty_from_file(DUALFILE2 + ".UNRST", grid=grd, name="SWAT", date=20170121, fracture=False) tsetup.assert_almostequal(swat.values[3, 0, 0], 0.55475, 0.001) soil = xtgeo.gridproperty_from_file(DUALFILE2 + ".UNRST", grid=grd, name="SOIL", date=20170121, fracture=False) print(soil.values) tsetup.assert_almostequal(soil.values[3, 0, 0], 0.44525, 0.001) tsetup.assert_almostequal(soil.values[0, 1, 0], 0.0, 0.001) assert np.ma.is_masked(soil.values[1, 2, 0]) tsetup.assert_almostequal(soil.values[3, 2, 0], 0.0, 0.001) tsetup.assert_almostequal(soil.values[4, 2, 0], 0.41271, 0.001) swat = xtgeo.gridproperty_from_file(DUALFILE2 + ".UNRST", grid=grd, name="SWAT", date=20170121, fracture=True) swat.describe() assert "SWATF" in swat.name tsetup.assert_almostequal(swat.values[3, 0, 0], 0.0, 0.001) swat.to_file("TMP/swat.roff")
def test_import_dualperm_grid_sgas(): """Test grid with flag for dual perm/poro setup gas/water""" grd = xtgeo.grid_from_file(DUALFILE3 + ".EGRID") sgas = xtgeo.gridproperty_from_file(DUALFILE3 + ".UNRST", grid=grd, name="SGAS", date=20170121, fracture=False) sgas.describe() tsetup.assert_almostequal(sgas.values[3, 0, 0], 0.06639, 0.001) tsetup.assert_almostequal(sgas.values[0, 1, 0], 0.0, 0.001) tsetup.assert_almostequal(sgas.values[4, 2, 0], 0.10696, 0.001) assert "SGASM in sgas.name" swat = xtgeo.gridproperty_from_file(DUALFILE3 + ".UNRST", grid=grd, name="SWAT", date=20170121, fracture=False) swat.describe() tsetup.assert_almostequal(swat.values[3, 0, 0], 0.93361, 0.001) tsetup.assert_almostequal(swat.values[0, 1, 0], 0.0, 0.001) tsetup.assert_almostequal(swat.values[4, 2, 0], 0.89304, 0.001) assert "SWATM in swat.name" # shall be not soil actually soil = xtgeo.gridproperty_from_file(DUALFILE3 + ".UNRST", grid=grd, name="SOIL", date=20170121, fracture=False) soil.describe() tsetup.assert_almostequal(soil.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(soil.values[0, 1, 0], 0.0, 0.001) assert "SOILM" in soil.name # fractures sgas = xtgeo.gridproperty_from_file(DUALFILE3 + ".UNRST", grid=grd, name="SGAS", date=20170121, fracture=True) sgas.describe() tsetup.assert_almostequal(sgas.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(sgas.values[0, 1, 0], 0.0018198, 0.001) tsetup.assert_almostequal(sgas.values[4, 2, 0], 0.17841, 0.001) assert "SGASF" in sgas.name swat = xtgeo.gridproperty_from_file(DUALFILE3 + ".UNRST", grid=grd, name="SWAT", date=20170121, fracture=True) swat.describe() tsetup.assert_almostequal(swat.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(swat.values[0, 1, 0], 0.99818, 0.001) tsetup.assert_almostequal(swat.values[4, 2, 0], 0.82159, 0.001) assert "SWATF" in swat.name # shall be not soil actually soil = xtgeo.gridproperty_from_file(DUALFILE3 + ".UNRST", grid=grd, name="SOIL", date=20170121, fracture=True) soil.describe() tsetup.assert_almostequal(soil.values[3, 0, 0], 0.0, 0.001) tsetup.assert_almostequal(soil.values[0, 1, 0], 0.0, 0.001) assert "SOILF" in soil.name