def test_wellcase_class(): """Test WellCase data class.""" well = ensemble_well_props.WellCase( xtgeo.well_from_file(WELLNAME1), "MDLog", [[2200, 2300], [2350, 2400]], ) wobj = well.well assert isinstance(wobj, xtgeo.Well) print(wobj.dataframe) assert wobj.dataframe["Poro"].mean() == pytest.approx(0.200907, abs=0.001) assert wobj.nrow == 74 well = ensemble_well_props.WellCase( xtgeo.well_from_file(WELLNAME2, lognames=["MDepth", "PHIT"]), "MDepth", [[1653, 1670], [1680, 1698]], delta=1, ) wobj = well.well print(wobj.dataframe) assert wobj.dataframe["PHIT"].mean() == pytest.approx(0.176936, abs=0.001) assert wobj.nrow == 35
def read_wells(self, wells, welltype="wells", settings=False, reuse=None): """Reading wells""" settings = settings if settings else {} wellist = self._well_preparations(wells) CMN.print_info("Reading wells...") xtg_wells = [] if "wells" in reuse: reused_wells, wellist = self._reuse_wells(wellist, welltype) xtg_wells = reused_wells for well in wellist: try: if welltype == "wells": mywell = (xtgeo.well_from_file( well, lognames=settings.get("lognames", "all")) if self._project is None else xtgeo.well_from_roxar( project=self._project, name=well, lognames=settings.get("lognames", "all"), logrun=wells.get("logrun", "log"), trajectory=wells.get("trajectory", "Drilled trajectory"), )) else: mywell = (xtgeo.blockedwell_from_file(well) if self._project is None else xtgeo.blockedwell_from_roxar( project=self._project, gname=wells.get("grid", "Geogrid"), bwname=wells.get("bwname", "BW"), wname=well, lognames=settings.get("lognames", "all"), )) xtg_wells.append(mywell) self._xtgdata[welltype][well] = mywell CMN.print_debug(well) except ValueError as verr: print(f"Could not read well {well}: {verr}") CMN.print_debug(f"All valid welldata: {xtg_wells}") for mywell in xtg_wells: if "depthrange" in settings and settings["depthrange"] is not None: tmin, tmax = settings["depthrange"] mywell.limit_tvd(tmin, tmax) if "rescale" in settings and settings["rescale"] is not None: mywell.rescale(settings["rescale"]) if xtg_wells: if welltype == "wells": self._wells = xtgeo.Wells() self._wells.wells = xtg_wells else: self._bwells = xtgeo.BlockedWells() self._bwells.wells = xtg_wells else: raise RuntimeError("No wells read, wrong settings?")
def fixture_create_project(): """Create a tmp RMS project for testing, populate with basic data. After the yield command, the teardown phase will remove the tmp RMS project. """ prj1 = str(PRJ) print("\n******** Setup RMS project!\n") if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1) project = roxar.Project.create() rox = xtgeo.RoxUtils(project) print("Roxar version is", rox.roxversion) print("RMS version is", rox.rmsversion(rox.roxversion)) assert "1." in rox.roxversion for wfile in WELLS1: wobj = xtgeo.well_from_file(WELLSFOLDER1 / wfile) if "XP_with" in wfile: wobj.name = "OP2_w_repeat" wobj.to_roxar(project, wobj.name, logrun="log", trajectory="My trajectory") # populate with cube data cube = xtgeo.cube_from_file(CUBEDATA1) cube.to_roxar(project, CUBENAME1, domain="depth") # populate with surface data rox.create_horizons_category(SURFCAT1) for num, name in enumerate(SURFNAMES1): srf = xtgeo.surface_from_file(SURFTOPS1[num]) project.horizons.create(name, roxar.HorizonType.interpreted) srf.to_roxar(project, name, SURFCAT1) # populate with grid and props grd = xtgeo.grid_from_file(GRIDDATA1) grd.to_roxar(project, GRIDNAME1) por = xtgeo.gridproperty_from_file(PORODATA1, name=PORONAME1) por.to_roxar(project, GRIDNAME1, PORONAME1) zon = xtgeo.gridproperty_from_file(ZONEDATA1, name=ZONENAME1) zon.values = zon.values.astype(np.uint8) zon.to_roxar(project, GRIDNAME1, ZONENAME1) # save project (both an initla version and a work version) and exit project.save_as(prj1) project.close() yield project print("\n******* Teardown RMS project!\n") if isdir(prj1): print("Remove existing project! (1)") shutil.rmtree(prj1)
def loop_for_compute( config: dict, sinfo: ScreenInfo, _dryrun: bool = False ) -> EnsembleWellProps: """Collect for computing the ensemble statistics. Args: config: The input configuration dictonary sinfo: Messages to screen instance _dryrun: For testing, skipping computation """ cfg = ConfigData(config) wcase = WellCase( xtgeo.well_from_file(cfg.wellfile, lognames=cfg.lognames), cfg.mdlog, cfg.mdranges, cfg.welldelta, ) grd = None sinfo.oprint("Loop data over realizations...") used_realizations = [] for real in cfg.reals: sinfo.oprint(f"Realization no. {real}") realiterpath = cfg.root / f"realization-{real}" / cfg.itera if not isinstance(grd, xtgeo.Grid) or not cfg.gridreuse: # one may choose to reuse grid if not structural uncertainty sinfo.oprint(f"Read grid geometry for realization {real}") gpath = realiterpath / cfg.gridfilestub try: grd = xtgeo.grid_from_file(gpath) except OSError: sinfo.oprint(f"Not able to read grid {gpath}, skip realization...") continue wcase.well.delete_logs(GCELLNAMES) wcase.well.make_ijk_from_grid(grd) for propcase in cfg.proplist: proppath = realiterpath / propcase.filestub sinfo.oprint(f"Read: {proppath}...") try: theprop = xtgeo.gridproperty_from_file(proppath) except OSError: sinfo.oprint( f"Not able to read property {propcase.name} from {proppath}, " "skip realization..." ) continue theprop.geometry = grd if _dryrun is False: run_compute(real, wcase.well, propcase, theprop) used_realizations.append(real) sinfo.xprint("Delete logs referring to cells...") wcase.well.delete_logs(GCELLNAMES) return EnsembleWellProps(wcase.well, used_realizations, cfg, sinfo)
def load_well( wfile: Union[str, Path], zonelogname: Optional[str] = None, mdlogname: Optional[str] = None, ) -> xtgeo.Well: return xtgeo.well_from_file(wfile=wfile, zonelogname=zonelogname, mdlogname=mdlogname)
def test_load_well(): well_file = "55_33-A-1.w" well_file = Path( os.path.join(test_folder, data_folder, well_folder, well_file)) xtgeo_well_A1 = xtgeo.well_from_file(well_file, mdlogname="MD") well_A1 = load_well(well_file) assert well_A1.name == xtgeo_well_A1.name
def write_backing_store( storage_dir: Path, storage_key: str, well_file_names: List[str], md_logname: Optional[str], ) -> None: timer = PerfTimer() # All data for this provider will be stored inside a sub-directory # given by the storage key provider_dir = storage_dir / storage_key LOGGER.debug(f"Writing well backing store to: {provider_dir}") provider_dir.mkdir(parents=True, exist_ok=True) inventory_dict: Dict[str, dict] = {} LOGGER.debug( f"Writing {len(well_file_names)} wells into backing store...") timer.lap_s() for file_name in well_file_names: well = xtgeo.well_from_file(wfile=file_name, mdlogname=md_logname) if well.mdlogname is None: try: well.geometrics() except ValueError: LOGGER.debug( f"Ignoring {well.name} as MD cannot be calculated") continue print("well.mdlogname=", well.mdlogname) well_name = well.name rel_path = f"{well_name}.rmswell" # rel_path = f"{well_name}.hdf" dst_file = provider_dir / rel_path print("dst_file=", dst_file) well.to_file(wfile=dst_file, fformat="rmswell") # well.to_hdf(wfile=dst_file) inventory_dict[well_name] = { INV_KEY_REL_PATH: rel_path, INV_KEY_MD_LOGNAME: well.mdlogname, } et_copy_s = timer.lap_s() json_fn = provider_dir / "inventory.json" with open(json_fn, "w") as file: json.dump(inventory_dict, file) LOGGER.debug(f"Wrote well backing store in: {timer.elapsed_s():.2f}s (" f"copy={et_copy_s:.2f}s)")
def get_well_xtgeo_obj(self, well_name: str) -> xtgeo.Well: well_entry = self._inventory.get(well_name) if not well_entry: raise ValueError(f"Requested well name {well_name} not found") rel_fn = well_entry[INV_KEY_REL_PATH] md_logname = well_entry[INV_KEY_MD_LOGNAME] full_file_name = self._provider_dir / rel_fn well = xtgeo.well_from_file(wfile=full_file_name, fformat="rmswell", mdlogname=md_logname) return well
def test_load_all_wells(): well_file = "55_33-A-1.w" well_file = Path( os.path.join(test_folder, data_folder, well_folder, well_file)) xtgeo_well_A1 = xtgeo.well_from_file(well_file, mdlogname="MD") well_A1 = load_well(well_file) wellbore_info = "wellbore_info.csv" wellbore_info = Path( os.path.join(test_folder, data_folder, well_folder, wellbore_info)) all_wells_info = read_csv(csv_file=wellbore_info) all_wells_df = load_all_wells(all_wells_info) well_A1_df = all_wells_df[all_wells_df["WELLBORE_NAME"] == "55/33-A-1"] assert well_A1.dataframe["X_UTME"].all() == well_A1_df["X_UTME"].all()
def load_well( wfile: Union[str, Path], zonelogname: Optional[str] = None, mdlogname: Optional[str] = None, lognames: Optional[List[str]] = None, ) -> xtgeo.Well: lognames = [] if not lognames else lognames if zonelogname is not None and zonelogname not in lognames: lognames.append(zonelogname) if mdlogname is not None and mdlogname not in lognames: lognames.append(mdlogname) well = xtgeo.well_from_file(wfile=wfile, zonelogname=zonelogname, mdlogname=mdlogname, lognames=lognames) # Create a relative XYLENGTH vector (0.0 where well starts) well.create_relative_hlen() return well
def test_compute_some_props(configdata): """Test the actual compute of one well on one realization.""" cfg = ensemble_well_props.ConfigData(configdata) wcase = ensemble_well_props.WellCase( xtgeo.well_from_file(WELLNAME2, lognames=cfg.lognames), cfg.mdlog, cfg.mdranges) grd = xtgeo.grid_from_file(GFILE1) wcase.well.make_ijk_from_grid(grd) myprops = [FACIESFILE1, POROFILE1] for ncount, pcase in enumerate(myprops): prop = xtgeo.gridproperty_from_file(pcase) prop.geometry = grd ensemble_well_props.run_compute(0, wcase.well, cfg.proplist[ncount], prop) assert "Facies_r0" in wcase.well.dataframe assert wcase.well.dataframe["PHIT_r0"].mean() == pytest.approx(0.171533, abs=0.001)
def __init__( self, app: Dash, webviz_settings: WebvizSettings, basedir: Path, planned_wells_dir: Path = None, ): super().__init__() self.plotly_theme = webviz_settings.theme.plotly_theme self.uid = uuid4() WEBVIZ_ASSETS.add( Path(webviz_subsurface.__file__).parent / "_assets" / "css" / "modal.css") self.set_callbacks(app) self.basedir = basedir self.planned_wells_dir = planned_wells_dir self.modelfile_path = basedir / "model_file.xml" self.modelfile = get_path(self.modelfile_path) self.surfaces = load_surfaces(basedir, self.modelfile_path) self.planned_wellfiles = (json.load( find_files(planned_wells_dir, "*.txt")) if planned_wells_dir else None) self.wellfiles = json.load( find_files(basedir / "input" / "welldata", "*.txt")) self.wellfiles = [str(get_path(Path(w))) for w in self.wellfiles] self.allfiles = json.load(find_files(basedir)) self.allfiles.append(self.modelfile_path) self.allfiles += self.planned_wellfiles self.planned_wellfiles = [ str(get_path(Path(w))) for w in self.planned_wellfiles ] self.surface_attributes = {} for i, surface in enumerate(self.surfaces): self.surface_attributes[surface["name"]] = { "color": get_color(i), "order": i, "name": surface["name"], "topofzone": surface["topofzone"], "surface": surface["d_"], "surface_de": surface["de_"], "surface_dt": surface["dt_"], "surface_dr": surface["dr_"], "surface_dte": surface["dte_"], } self.surfacenames = [surface["name"] for surface in self.surfaces] # Log files zonation_status_file = get_zonation_status(basedir) well_points_file = get_well_points(basedir) zonelog_name = get_zonelog_name(self.modelfile) self.xsec = HuvXsection( self.surface_attributes, zonation_status_file, well_points_file, zonelog_name, ) target_points_file = get_target_points(basedir) self.df_well_target_points = FilterTable(target_points_file, well_points_file) # Wellfiles and planned wells self.planned_wells = {} if planned_wells_dir is not None: self.planned_wells = { wf: xtgeo.well_from_file(wfile=wf) for wf in self.planned_wellfiles } self.wells = { wf: xtgeo.well_from_file(wfile=wf) for wf in self.wellfiles } # Store current layers self.state = {"switch": False} self.layers_state = []
def extract_grid_zone_tops( project: Optional[_roxar.Project] = None, well_list: Optional[list] = None, logrun: str = "log", trajectory: str = "Drilled trajectory", gridzonelog: str = None, mdlogname: str = None, grid: str = None, zone_param: str = None, alias_file: str = None, rms_name: str = "RMS_WELL_NAME", ecl_name: str = "ECLIPSE_WELL_NAME", ) -> pd.DataFrame: """ Function for extracting top and base from gridzones, both in TVD and MD. A pandas dataframe will be returned. Users can either input a pre-generated gridzonelog or a grid and a zone parameter for computing the gridzonelog. The function works both inside RMS and outside with file input. If input from files, and a MD log is not present in the well a quasi md log will be computed and used. """ use_gridzonelog = False if gridzonelog is None else True if not use_gridzonelog: if grid is not None and zone_param is not None: if project is not None: mygrid = xtgeo.grid_from_roxar(project, grid) gridzones = xtgeo.gridproperty_from_roxar(project, grid, zone_param) else: mygrid = xtgeo.grid_from_file(grid) gridzones = xtgeo.gridproperty_from_file(zone_param, grid=mygrid) gridzones.name = "Zone" else: raise ValueError("Specify either 'gridzonelog' or 'grid' and 'zone_param") dfs = [] if well_list is None: well_list = [] for well in well_list: try: if project is not None: xtg_well = xtgeo.well_from_roxar( project, str(well), trajectory=trajectory, logrun=logrun, inclmd=True, ) else: xtg_well = xtgeo.well_from_file(str(well), mdlogname=mdlogname) # quasi md log will be computed xtg_well.geometrics() except (ValueError, KeyError): continue # if no gridzonelog create one from the zone parameter if not use_gridzonelog: xtg_well.get_gridproperties(gridzones, mygrid) gridzonelog = "Zone_model" if xtg_well.dataframe[gridzonelog].isnull().values.all(): continue # Set gridzonelog as zonelog and extract zonation tops from it xtg_well.zonelogname = gridzonelog dframe = xtg_well.get_zonation_points(top_prefix="", use_undef=True) dframe.rename( columns={ "Z_TVDSS": "TOP_TVD", xtg_well.mdlogname: "TOP_MD", "Zone": "ZONE_CODE", "WellName": "WELL", }, inplace=True, ) # find deepest point in well while in grid df_max = ( xtg_well.dataframe[["Z_TVDSS", xtg_well.mdlogname, gridzonelog]] .dropna() .sort_values(by=xtg_well.mdlogname) ) # create base picks also dframe["BASE_TVD"] = dframe["TOP_TVD"].shift(-1) dframe["BASE_MD"] = dframe["TOP_MD"].shift(-1) dframe.at[dframe.index[-1], "BASE_TVD"] = df_max.iloc[-1]["Z_TVDSS"] dframe.at[dframe.index[-1], "BASE_MD"] = df_max.iloc[-1][xtg_well.mdlogname] # adjust zone values to get correct zone information dframe["ZONE_CODE"] = shift_zone_values(dframe["ZONE_CODE"].values.copy()) dframe["ZONE"] = ( dframe["ZONE_CODE"] .map(xtg_well.get_logrecord(xtg_well.zonelogname)) .fillna("Outside") ) dfs.append(dframe.drop(columns=["TopName", "Q_INCL", "Q_AZI"])) df = pd.concat(dfs) if alias_file is not None: well_dict = make_alias_dict(alias_file, rms_name, ecl_name) df["WELL"] = df["WELL"].replace(well_dict) return df
def load_well(well_path): """Return a well object (xtgeo) for a given file (RMS ascii format)""" return xtgeo.well_from_file(well_path, mdlogname="MD")