def _render_surface( surfacepath, surface_type, gridparameter, color_values, hillshade, ): surface = xtgeo.surface_from_file(get_path(surfacepath)) min_val = None max_val = None if surface_type == "attribute": min_val = color_values[0] if color_values else None max_val = color_values[1] if color_values else None grid = load_grid(get_path(self.gridfile)) gridparameter = load_grid_parameter(grid, get_path(gridparameter)) surface.slice_grid3d(grid, gridparameter) return [ SurfaceLeafletModel( surface, name="surface", clip_min=min_val, clip_max=max_val, apply_shading=hillshade.get("value", False), ).layer ]
def _render_fence(coords, gridparameter, surfacepath, color_values, colorscale): if not coords: raise PreventUpdate grid = load_grid(get_path(self.gridfile)) gridparameter = load_grid_parameter(grid, get_path(gridparameter)) fence = get_fencespec(coords) hmin, hmax, vmin, vmax, values = grid.get_randomline( fence, gridparameter, zincrement=0.5) surface = xtgeo.surface_from_file(get_path(surfacepath)) s_arr = get_surface_fence(fence, surface) return make_heatmap( values, s_arr=s_arr, theme=self.plotly_theme, s_name=self.surfacenames[self.surfacefiles.index(surfacepath)], colorscale=colorscale, xmin=hmin, xmax=hmax, ymin=vmin, ymax=vmax, zmin=color_values[0], zmax=color_values[1], xaxis_title="Distance along polyline", yaxis_title=self.zunit, )
def _render_fence(coords, cubepath, surfacepath, color_values, colorscale): if not coords: raise PreventUpdate cube = load_cube_data(get_path(cubepath)) fence = get_fencespec(coords) hmin, hmax, vmin, vmax, values = cube.get_randomline(fence) surface = xtgeo.surface_from_file(get_path(surfacepath)) s_arr = get_surface_fence(fence, surface) return make_heatmap( values, s_arr=s_arr, theme=self.plotly_theme, s_name=self.surfacenames[self.surfacefiles.index(surfacepath)], colorscale=colorscale, xmin=hmin, xmax=hmax, ymin=vmin, ymax=vmax, zmin=color_values[0], zmax=color_values[1], xaxis_title="Distance along polyline", yaxis_title=self.zunit, )
def sum_running_stats_bytestream(): """Find avg per realisation and do a cumulative rolling mean. Memory consumption shall be very low. """ for irel in range(NRUN): # load as Eclipse run; this will look for EGRID, INIT, UNRST print("Loading realization no {}".format(irel)) with open(EXPATH1, "rb") as myfile: stream = io.BytesIO(myfile.read()) srf = xtgeo.surface_from_file(stream, fformat="irap_binary") nnum = float(irel + 1) srf.values += irel * 1 # just to mimic variability if irel == 0: pcum = srf.values1d else: pavg = srf.values1d / nnum pcum = pcum * (nnum - 1) / nnum pcum = npma.vstack([pcum, pavg]) pcum = pcum.sum(axis=0) # find the averages: print(pcum) print(pcum.mean()) return pcum.mean()
def get_realization_surface( self, name: str, attribute: str, realization: int, date: Optional[str] = None) -> xtgeo.RegularSurface: """Returns a Xtgeo surface instance of a single realization surface""" columns = ["name", "attribute", "REAL"] column_values = [name, attribute, realization] if date is not None: columns.append("date") column_values.append(date) df = self._filter_surface_table(name=name, attribute=attribute, date=date, realizations=[int(realization)]) if len(df.index) == 0: warnings.warn( f"No surface found for name: {name}, attribute: {attribute}, date: {date}, " f"realization: {realization}") return xtgeo.RegularSurface(ncol=1, nrow=1, xinc=1, yinc=1) # 1's as input is required if len(df.index) > 1: warnings.warn( f"Multiple surfaces found for name: {name}, attribute: {attribute}, date: {date}, " f"realization: {realization}. Returning first surface") return xtgeo.surface_from_file( get_stored_surface_path(df.iloc[0]["path"]))
def _get_observed_surface( self, address: ObservedSurfaceAddress ) -> Optional[xtgeo.RegularSurface]: """Returns a Xtgeo surface instance for an observed surface""" timer = PerfTimer() surf_fns: List[str] = self._locate_observed_surfaces( attribute=address.attribute, name=address.name, datestr=address.datestr if address.datestr is not None else "", ) if len(surf_fns) == 0: LOGGER.warning(f"No observed surface found for {address}") return None if len(surf_fns) > 1: LOGGER.warning( f"Multiple observed surfaces found for: {address}" "Returning first surface." ) surf = xtgeo.surface_from_file(surf_fns[0]) LOGGER.debug(f"Loaded simulated surface in: {timer.elapsed_s():.2f}s") return surf
def fixture_create_project(): """Create a tmp RMS project for testing, populate with basic data. After the yield command, the teardown phase will remove the tmp RMS project. """ prj1 = str(PRJ) print("\n******** Setup RMS project!\n") if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1) project = roxar.Project.create() rox = xtgeo.RoxUtils(project) print("Roxar version is", rox.roxversion) print("RMS version is", rox.rmsversion(rox.roxversion)) assert "1." in rox.roxversion for wfile in WELLS1: wobj = xtgeo.well_from_file(WELLSFOLDER1 / wfile) if "XP_with" in wfile: wobj.name = "OP2_w_repeat" wobj.to_roxar(project, wobj.name, logrun="log", trajectory="My trajectory") # populate with cube data cube = xtgeo.cube_from_file(CUBEDATA1) cube.to_roxar(project, CUBENAME1, domain="depth") # populate with surface data rox.create_horizons_category(SURFCAT1) for num, name in enumerate(SURFNAMES1): srf = xtgeo.surface_from_file(SURFTOPS1[num]) project.horizons.create(name, roxar.HorizonType.interpreted) srf.to_roxar(project, name, SURFCAT1) # populate with grid and props grd = xtgeo.grid_from_file(GRIDDATA1) grd.to_roxar(project, GRIDNAME1) por = xtgeo.gridproperty_from_file(PORODATA1, name=PORONAME1) por.to_roxar(project, GRIDNAME1, PORONAME1) zon = xtgeo.gridproperty_from_file(ZONEDATA1, name=ZONENAME1) zon.values = zon.values.astype(np.uint8) zon.to_roxar(project, GRIDNAME1, ZONENAME1) # save project (both an initla version and a work version) and exit project.save_as(prj1) project.close() yield project print("\n******* Teardown RMS project!\n") if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1)
def _get_regsurff(i): logger.info("Start %s", i) sfile = TESTFILE logger.info("File is %s", sfile) rf = xtgeo.surface_from_file(sfile) logger.info("End %s", i) return rf
def surface_from_zone_prop(surface_table: pd.DataFrame, zone: str, prop: str, ensemble: str, stype: str) -> xtgeo.RegularSurface: df = surface_table[(surface_table["zone"] == zone) & (surface_table["prop"] == prop) & (surface_table["ensemble"] == ensemble) & (surface_table["statistic"] == stype)] if df.empty or len(df["path"].unique()) > 1: return make_undefined_surface() path = get_path(pathlib.Path(df["path"].unique()[0])) return xtgeo.surface_from_file(path.resolve())
def _get_regsurfi(i): logger.info("Start %s", i) sfile = TESTFILE with open(sfile, "rb") as fin: stream = io.BytesIO(fin.read()) logger.info("File is %s", sfile) rf = xtgeo.surface_from_file(stream, fformat="irap_binary") logger.info("End %s", i) return rf
def append(self, slist): """Append surfaces from either a list of RegularSurface objects, a list of files, or a mix.""" for item in slist: if isinstance(item, xtgeo.RegularSurface): self._surfaces.append(item) else: try: sobj = xtgeo.surface_from_file(item, fformat="guess") self._surfaces.append(sobj) except OSError: xtg.warnuser("Cannot read as file, skip: {}".format(item))
def surface_to_json(surfacepath: Path) -> str: surface = xtgeo.surface_from_file(str(surfacepath), fformat="irap_binary") return json.dumps({ "ncol": surface.ncol, "nrow": surface.nrow, "xori": surface.xori, "yori": surface.yori, "rotation": surface.rotation, "xinc": surface.xinc, "yinc": surface.yinc, "values": surface.values.copy().filled(np.nan).tolist(), })
def fetch( self, address: StatisticalSurfaceAddress ) -> Optional[xtgeo.RegularSurface]: full_surf_path = self.cache_dir / _compose_stat_surf_file_name( address, FILE_EXTENSION) try: surf = xtgeo.surface_from_file(full_surf_path, fformat=FILE_FORMAT_READ) return surf # pylint: disable=bare-except except: return None
def first_surface_geometry(self) -> Dict: surface = xtgeo.surface_from_file( get_stored_surface_path(self._surface_table.iloc[0]["path"])) return { "xmin": surface.xmin, "xmax": surface.xmax, "ymin": surface.ymin, "ymax": surface.ymax, "xori": surface.xori, "yori": surface.yori, "ncol": surface.ncol, "nrow": surface.nrow, "xinc": surface.xinc, "yinc": surface.yinc, }
def surface_from_zone_prop( parent, zone: str, prop: str, ensemble: str, stype: str ) -> dict: path = get_surface_path( ens_path=parent.surface_folders[ensemble], statistic=stype, zone=zone, prop=prop, ) try: return xtgeo.surface_from_file(path.resolve()) except OSError: surf = xtgeo.RegularSurface() surf.values = 0 return surf
def test_regsurf_aggregated_diffdata(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Test surfaces, where input is diffdata.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config ) aggs = [] # create "forward" files for i in range(10): use_regsurf = regsurf.copy() use_regsurf.values += float(i) expfile = edata.export( use_regsurf, name="mymap_" + str(i), realization=i, timedata=[[20300201], [19990204]], ) aggs.append(expfile) # next task is to do an aggradation, and now the metadata already exists # per input element which shall be re-used surfs = xtgeo.Surfaces() metas = [] for mapfile in aggs: surf = xtgeo.surface_from_file(mapfile) meta = dataio.read_metadata(mapfile) metas.append(meta) surfs.append([surf]) aggregated = surfs.statistics() logger.info("Aggr. mean is %s", aggregated["mean"].values.mean()) # shall be 1238.5 aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd", verbosity="INFO", aggregation_id="789politipoliti", ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.info("New metadata:\n%s", utils.prettyprint_dict(newmeta))
def surface_from_zone_prop(parent: "PropertyStatistics", zone: str, prop: str, ensemble: str, stype: str) -> xtgeo.RegularSurface: if not isinstance(parent.surface_folders, dict): raise TypeError("parent.surface_folders must be of type dict") path = get_surface_path( ens_path=parent.surface_folders[ensemble], statistic=stype, zone=zone, prop=prop, ) try: return xtgeo.surface_from_file(path.resolve()) except OSError: surf = xtgeo.RegularSurface() surf.values = 0 return surf
def test_create_project(): """Create a tmp RMS project for testing, populate with basic data""" prj1 = PRJ prj2 = PRJ + "_initial" if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1) if isdir(prj2): print("Remove existing project! (2)") shutil.rmtree(prj2) project = roxar.Project.create() rox = xtgeo.RoxUtils(project) print("Roxar version is", rox.roxversion) print("RMS version is", rox.rmsversion(rox.roxversion)) assert "1." in rox.roxversion # populate with cube data cube = xtgeo.cube_from_file(CUBEDATA1) cube.to_roxar(project, CUBENAME1, domain="depth") # populate with surface data rox.create_horizons_category(SURFCAT1) for num, name in enumerate(SURFNAMES1): srf = xtgeo.surface_from_file(SURFTOPS1[num]) project.horizons.create(name, roxar.HorizonType.interpreted) srf.to_roxar(project, name, SURFCAT1) # populate with grid and props grd = xtgeo.grid_from_file(GRIDDATA1) grd.to_roxar(project, GRIDNAME1) por = xtgeo.gridproperty_from_file(PORODATA1, name=PORONAME1) por.to_roxar(project, GRIDNAME1, PORONAME1) # populate with well data (postponed) # save project (both an initla version and a work version) and exit project.save_as(prj1) project.save_as(prj2) project.close()
def test_dataframe_simple(): """Get a pandas Dataframe object""" xmap = xtgeo.RegularSurface(TESTSET1) dfrc = xmap.dataframe(ijcolumns=True, order="C", activeonly=True) tsetup.assert_almostequal(dfrc["X_UTME"][2], 465956.274, 0.01) xmap = xtgeo.surface_from_file(TESTSET2) dfrc = xmap.dataframe() tsetup.assert_almostequal(dfrc["X_UTME"][2], 461582.562498, 0.01) xmap.coarsen(2) dfrc = xmap.dataframe() tsetup.assert_almostequal(dfrc["X_UTME"][2], 461577.5575, 0.01)
def slice_a_cube_with_surface(): """Slice a seismic cube with a surface on OW dat/map format""" cubefile = EXPATH1 / "ib_test_cube2.segy" surfacefile = EXPATH2 / "h1.dat" mycube = xtgeo.cube_from_file(cubefile) # import map/dat surface using cube as template (inline/xline # must match) mysurf = xtgeo.surface_from_file(surfacefile, fformat="ijxyz", template=mycube) # sample cube values to mysurf (replacing current depth values) mysurf.slice_cube(mycube, sampling="trilinear") # export result mysurf.to_file(TMPDIR / "slice.dat", fformat="ijxyz")
def test_integration_values(values_flag, expected_result): result = xtgeo.surface_from_file( StringIO("""! Example 2x2 grid @zmap_example.zmap HEADER , GRID, 5 15, -99999.0000, , 4, 1 2, 2, 1.0000, 2.0000, 1.0000, 2.0000 0.0000, 0.0000, 0.0000 @ + Grid data starts after this line 1.0000 2.0000 3.0000 4.0000 """), fformat="zmap", values=values_flag, ) assert result.xmax == 2.0 assert result.ymax == 2.0 assert result.xinc == 1.0 assert result.yinc == 1.0 assert list(result.values.data.flatten()) == expected_result
def test_bytesio_string_encoded(): """Test a case where the string is encoded, then decoded""" with open(TESTSET1, "rb") as fin: stream = io.BytesIO(fin.read()) mystream = stream.read() # this mimics data from a browser that are base64 encoded encodedstream = base64.urlsafe_b64encode(mystream).decode("utf-8") assert isinstance(encodedstream, str) # now decode this and read decodedstream = base64.urlsafe_b64decode(encodedstream) assert isinstance(decodedstream, bytes) content_string = io.BytesIO(decodedstream) xsurf = xtgeo.surface_from_file(content_string, fformat="irap_binary") assert xsurf.ncol == 554 assert xsurf.nrow == 451
def main(): """Exporting maps from clipboard""" surf = xtgeo.surface_from_file(INPUT_FOLDER / FILE) print(f"Average value of map is {surf.values.mean()}") ed = dataio.ExportData( config=CFG, name="noname_here", unit="fraction", vertical_domain={"depth": "msl"}, content="property", timedata=None, is_prediction=True, is_observation=False, tagname="average_poro", workflow="rms property model", ) fname = ed.export(surf, name="all") # note that 'name' here will be used print(f"File name is {fname}")
def _render_surface(surfacepath, surface_type, cubepath, color_values, hillshade): surface = xtgeo.surface_from_file(get_path(surfacepath)) min_val = None max_val = None if surface_type == "attribute": min_val = color_values[0] if color_values else None max_val = color_values[1] if color_values else None cube = load_cube_data(get_path(cubepath)) surface.slice_cube(cube) return [ SurfaceLeafletModel( surface, name="surface", clip_min=min_val, clip_max=max_val, apply_shading=hillshade.get("value", False), ).layer ]
def attribute_around_surface_symmetric(): """Get atttribute around a surface (symmetric)""" cubefile = EXPATH1 / "ib_test_cube2.segy" surfacefile = EXPATH2 / "h1.dat" mycube = xtgeo.cube_from_file(cubefile) mysurf = xtgeo.surface_from_file(surfacefile, fformat="ijxyz", template=mycube) attrs = ["max", "mean"] myattrs = mysurf.slice_cube_window(mycube, attribute=attrs, sampling="trilinear", zrange=10.0) for attr in myattrs.keys(): myattrs[attr].to_file(TMPDIR / ("myfile_symmetric_" + attr + ".dat"), fformat="ijxyz")
def _render_fence(coords, gridparameter, surfacepath, color_values, colorscale): if not coords: raise PreventUpdate grid = load_grid(get_path(self.gridfile)) if (grid.subgrids is not None and sum([len(subgrid) for subgrid in grid.subgrids.values()]) != grid.nlay): warnings.warn( (f"Subgrid information in {self.gridfile} does not correspond " "with number of grid layers. Subgrid information will be removed. " "This is a bug in xtgeo==2.15.2 for grids exported from RMS using Xtgeo. " "Export the grid with xtgeo>2.15.2 to remove this warning. " ), FutureWarning, ) grid.subgrids = None gridparameter = load_grid_parameter(grid, get_path(gridparameter)) fence = get_fencespec(coords) hmin, hmax, vmin, vmax, values = grid.get_randomline( fence, gridparameter, zincrement=0.5) surface = xtgeo.surface_from_file(get_path(surfacepath)) s_arr = get_surface_fence(fence, surface) return make_heatmap( values, s_arr=s_arr, theme=self.plotly_theme, s_name=self.surfacenames[self.surfacefiles.index(surfacepath)], colorscale=colorscale, xmin=hmin, xmax=hmax, ymin=vmin, ymax=vmax, zmin=color_values[0], zmax=color_values[1], xaxis_title="Distance along polyline", yaxis_title=self.zunit, )
def attribute_around_surface_asymmetric(): """Get attribute around a surface (asymmetric)""" cubefile = EXPATH1 / "ib_test_cube2.segy" surfacefile = EXPATH2 / "h1.dat" above = 10 below = 20 mycube = xtgeo.cube_from_file(cubefile) mysurf = xtgeo.surface_from_file(surfacefile, fformat="ijxyz", template=mycube) # instead of using zrange, we make some tmp surfaces that # reflects the assymmetric sabove = mysurf.copy() sbelow = mysurf.copy() sabove.values -= above sbelow.values += below if DEBUG: sabove.describe() sbelow.describe() attrs = "all" myattrs = mysurf.slice_cube_window(mycube, attribute=attrs, sampling="trilinear", zsurf=sabove, other=sbelow) for attr in myattrs.keys(): if DEBUG: myattrs[attr].describe() myattrs[attr].to_file(TMPDIR / ("myfile_asymmetric_" + attr + ".dat"), fformat="ijxyz")
def slice_a_grid(): """Slice a 3D grid property with maps (looping)""" expath1 = pathlib.Path("../../xtgeo-testdata/3dgrids/reek") expath2 = pathlib.Path("../../xtgeo-testdata/surfaces/reek/1") gridfileroot = expath1 / "REEK" surfacefile = expath2 / "midreek_rota.gri" initprops = ["PORO", "PERMX"] grd = xtgeo.grid_from_file(gridfileroot, fformat="eclipserun", initprops=initprops) # read a surface, which is used for "template" surf = xtgeo.surface_from_file(surfacefile) surf.refine(2) # make finer for nicer sampling (NB takes time then) slices = [1700, 1720, 1740] for myslice in slices: print("Slice is {}".format(myslice)) for prp in grd.props: sconst = surf.copy() sconst.values = myslice # set constant value for surface print("Work with {}, slice at {}".format(prp.name, myslice)) sconst.slice_grid3d(grd, prp) fname = "{}_{}.gri".format(prp.name, myslice) sconst.to_file(TMPDIR / fname) fname = TMPDIR / ("{}_{}.png".format(prp.name, myslice)) if "SKIP_PLOT" not in os.environ: sconst.quickplot(filename=fname)
def main(): """Exporting maps from clipboard""" files = INPUT_FOLDER.glob("*.gri") for file in files: surf = xtgeo.surface_from_file(file) attribute = "unset" for pattern, attr in TRANSLATE.items(): if pattern in str(file).lower(): attribute = attr name = "unset" for pattern, attr in NAMETRANSLATE.items(): if pattern in str(file).lower(): name = attr ed = dataio.ExportData( config=CFG, name=name, unit="fraction", content={ "property": { "attribute": attribute, "is_discrete": False } }, vertical_domain={"depth": "msl"}, timedata=None, is_prediction=True, is_observation=False, tagname="average_" + attribute, verbosity="INFO", workflow="rms property model", ) fname = ed.export(surf) print(f"File name is {fname}")
def calculate_statistical_surface( self, name: str, attribute: str, calculation: Optional[str] = "Mean", date: Optional[str] = None, realizations: Optional[List[int]] = None, ) -> xtgeo.RegularSurface: """Returns a Xtgeo surface instance for a calculated surface""" df = self._filter_surface_table(name=name, attribute=attribute, date=date, realizations=realizations) # When portable check if the surface has been stored # if not calculate try: surface_stream = save_statistical_surface(sorted(list(df["path"])), calculation) except OSError: surface_stream = save_statistical_surface_no_store( sorted(list(df["path"])), calculation) return xtgeo.surface_from_file(surface_stream, fformat="irap_binary")