def import_irap_binary(self, mfile, values=True): """Import Irap binary format.""" ifile = xtgeo._XTGeoCFile(mfile) logger.info("Enter function %s", __name__) logger.info("File handle %s", ifile.fhandle) # read with mode 0, to get mx my and other metadata ( ier, self._ncol, self._nrow, _ndef, self._xori, self._yori, self._xinc, self._yinc, self._rotation, val, ) = _cxtgeo.surf_import_irap_bin(ifile.fhandle, 0, 1, 0) if ier != 0: ifile.close() raise RuntimeError("Error in reading Irap binary file") self._yflip = 1 if self._yinc < 0.0: self._yinc *= -1 self._yflip = -1 self._filesrc = mfile self._ilines = np.array(range(1, self._ncol + 1), dtype=np.int32) self._xlines = np.array(range(1, self._nrow + 1), dtype=np.int32) # lazy loading, not reading the arrays if not values: self._values = None ifile.close() return nval = self._ncol * self._nrow xlist = _cxtgeo.surf_import_irap_bin(ifile.fhandle, 1, nval, 0) if xlist[0] != 0: ifile.close() raise RuntimeError("Problem in {}, code {}".format(__name__, ier)) val = xlist[-1] val = np.reshape(val, (self._ncol, self._nrow), order="C") val = ma.masked_greater(val, xtgeo.UNDEF_LIMIT) if np.isnan(val).any(): logger.info("NaN values are found, will mask...") val = ma.masked_invalid(val) self._values = val ifile.close()
def _export_irap_binary_cxtgeotest(self, mfile): """Export to Irap RMS binary format. TEST SWIG FLAT""" fout = xtgeo._XTGeoCFile(mfile, mode="wb") print(self.values.mask.astype(np.uint8).mean()) ier = _cxtgeo.surf_export_irap_bin_test( fout.fhandle, self._ncol, self._nrow, self._xori, self._yori, self._xinc, self._yflip * self._yinc, self._rotation, self.values.data, self.values.mask, ) if ier != 0: raise RuntimeError( "Export to Irap Binary went wrong, code is {}".format(ier)) fout.close()
def import_ijxyz_ascii_tmpl(self, mfile, template): """Import OW/DSG IJXYZ ascii format, with a Cube or RegularSurface instance as template.""" fin = xtgeo._XTGeoCFile(mfile) if isinstance(template, (xtgeo.cube.Cube, xtgeo.surface.RegularSurface)): logger.info("OK template") else: raise ValueError("Template is of wrong type: {}".format(type(template))) nxy = template.ncol * template.nrow _iok, val = _cxtgeo.surf_import_ijxyz_tmpl( fin.fhandle, template.ilines, template.xlines, nxy, 0 ) val = ma.masked_greater(val, xtgeo.UNDEF_LIMIT) self._xori = template.xori self._xinc = template.xinc self._yori = template.yori self._yinc = template.yinc self._ncol = template.ncol self._nrow = template.nrow self._rotation = template.rotation self._yflip = template.yflip self._values = val.reshape((self._ncol, self._nrow)) self._filesrc = mfile self._ilines = template._ilines.copy() self._xlines = template._xlines.copy() fin.close()
def export_ijxyz_ascii(self, mfile): """Export to DSG IJXYZ ascii format.""" fout = xtgeo._XTGeoCFile(mfile, mode="wb") vals = self.get_values1d(fill_value=xtgeo.UNDEF) ier = _cxtgeo.surf_export_ijxyz( fout.fhandle, self._ncol, self._nrow, self._xori, self._yori, self._xinc, self._yinc, self._rotation, self._yflip, self._ilines, self._xlines, vals, 0, ) if ier != 0: raise RuntimeError("Export to IJXYZ format went wrong, " "code is {}".format(ier)) fout.close()
def import_bgrdecl_prop(self, pfile, name="unknown", grid=None): """Import property from binary files with GRDECL layout""" local_fhandle = False fhandle = pfile if isinstance(pfile, str): local_fhandle = True pfile = xtgeo._XTGeoCFile(pfile) fhandle = pfile.fhandle # scan file for properties; these have similar binary format as e.g. EGRID logger.info("Make kwlist by scanning") kwlist = utils.scan_keywords(fhandle, fformat="xecl", maxkeys=1000, dataframe=False, dates=False) bpos = {} bpos[name] = -1 for kwitem in kwlist: kwname, kwtype, kwlen, kwbyte = kwitem logger.info("KWITEM: %s", kwitem) if name == kwname: bpos[name] = kwbyte break if bpos[name] == -1: raise xtgeo.KeywordNotFoundError( "Cannot find property name {} in file {}".format(name, pfile)) self._ncol = grid.ncol self._nrow = grid.nrow self._nlay = grid.nlay values = _eclbin.eclbin_record(fhandle, kwname, kwlen, kwtype, kwbyte) if kwtype == "INTE": self._isdiscrete = True # make the code list uniq = np.unique(values).tolist() codes = dict(zip(uniq, uniq)) codes = {key: str(val) for key, val in codes.items()} # val: strings self.codes = codes else: self._isdiscrete = False values = values.astype(np.float64) # cast REAL (float32) to float64 self.codes = {} # property arrays from binary GRDECL will be for all cells, but they # are in Fortran order, so need to convert... actnum = grid.get_actnum().values allvalues = values.reshape(self.dimensions, order="F") allvalues = np.asanyarray(allvalues, order="C") allvalues = ma.masked_where(actnum < 1, allvalues) self.values = allvalues self._name = name if local_fhandle and not pfile.close(cond=local_fhandle): raise RuntimeError("Error in file handling; cannot close file")
def export_irap_ascii(self, mfile): """Export to Irap RMS ascii format.""" fout = xtgeo._XTGeoCFile(mfile, mode="wb") vals = self.get_values1d(fill_value=xtgeo.UNDEF) ier = _cxtgeo.surf_export_irap_ascii( fout.fhandle, self._ncol, self._nrow, self._xori, self._yori, self._xinc, self._yflip * self._yinc, self._rotation, vals, 0, ) if ier != 0: raise RuntimeError("Export to Irap Ascii went wrong, " "code is {}".format(ier)) del vals fout.close()
def export_irap_ascii(self, mfile): """Export to Irap RMS ascii format.""" fout = xtgeo._XTGeoCFile(mfile, mode="wb") zmin = self.values.min() zmax = self.values.max() vals = self.get_values1d(fill_value=xtgeo.UNDEF) logger.debug("SHAPE %s %s", vals.shape, vals.dtype) ier = _cxtgeo.surf_export_irap_ascii( fout.fhandle, self._ncol, self._nrow, self._xori, self._yori, self._xinc, self._yflip * self._yinc, self._rotation, vals, zmin, zmax, 0, ) if ier != 0: raise RuntimeError("Export to Irap Ascii went wrong, " "code is {}".format(ier)) del vals fout.close()
def _import_roff_v2(self, pfile, name): """Import ROFF format, version 2 (improved version)""" # This routine do first a scan for all keywords. Then it grabs # the relevant data by only reading relevant portions of the input file pfile = xtgeo._XTGeoCFile(pfile) kwords = utils.scan_keywords(pfile.fhandle, fformat="roff") for kwd in kwords: logger.info(kwd) # byteswap: byteswap = _rkwquery(pfile.fhandle, kwords, "filedata!byteswaptest", -1) ncol = _rkwquery(pfile.fhandle, kwords, "dimensions!nX", byteswap) nrow = _rkwquery(pfile.fhandle, kwords, "dimensions!nY", byteswap) nlay = _rkwquery(pfile.fhandle, kwords, "dimensions!nZ", byteswap) logger.info("Dimensions in ROFF file %s %s %s", ncol, nrow, nlay) # get the actual parameter: vals = _rarraykwquery(pfile.fhandle, kwords, "parameter!name!" + name, byteswap, ncol, nrow, nlay) self._values = vals self._name = name pfile.close()
def test_xtgeocfile(): gfile = xtgeo._XTGeoCFile(TESTFILE) assert isinstance(gfile, xtgeo._XTGeoCFile) assert "Swig" in str(gfile.fhandle) assert gfile.close() is True
def test_xtgeocfile(): """Test basic system file io etc functions""" gfile = xtgeo._XTGeoCFile(TESTFILE) xfile = xtgeo._XTGeoCFile(TESTNOEXISTFILE) yfile = xtgeo._XTGeoCFile(TESTNOEXISTFOLDER) gfolder = xtgeo._XTGeoCFile(TESTFOLDER) assert isinstance(gfile, xtgeo._XTGeoCFile) assert isinstance(gfile._file, pathlib.Path) assert gfile._memstream is False assert gfile._mode == "rb" assert gfile._delete_after is False assert gfile.name == os.path.abspath(TESTFILE) assert xfile.name == os.path.abspath(TESTNOEXISTFILE) # exists, check_* assert gfile.exists() is True assert gfolder.exists() is True assert xfile.exists() is False assert gfile.check_file() is True assert xfile.check_file() is False assert yfile.check_file() is False with pytest.raises(IOError): xfile.check_file(raiseerror=IOError) assert gfile.check_folder() is True assert xfile.check_folder() is True assert yfile.check_folder() is False with pytest.raises(IOError): yfile.check_folder(raiseerror=IOError) assert "Swig" in str(gfile.fhandle) assert gfile.close() is True # extensions: stem, suff = gfile.splitext(lower=False) assert stem == "REEK" assert suff == "EGRID"
def test_xtgeocfile_bytesio(): with open(TESTFILE, "rb") as fin: stream = io.BytesIO(fin.read()) gfile = xtgeo._XTGeoCFile(stream) assert isinstance(gfile, xtgeo._XTGeoCFile) assert "Swig" in str(gfile.fhandle) assert gfile.close() is True
def import_petromod_binary(self, mfile, values=True): """Import Petromod binary format.""" ifile = xtgeo._XTGeoCFile(mfile) logger.info("Enter function %s", __name__) # read with mode 0, to get mx my and other metadata dsc, dummy = _cxtgeo.surf_import_petromod_bin(ifile.fhandle, 0, 0.0, 0, 0, 0) fields = dsc.split(",") for field in fields: key, value = field.split("=") if key == "GridNoX": self._ncol = int(value) if key == "GridNoY": self._nrow = int(value) if key == "OriginX": self._xori = float(value) if key == "OriginY": self._yori = float(value) if key == "RotationOriginX": rota_xori = float(value) if key == "RotationOriginY": rota_yori = float(value) if key == "GridStepX": self._xinc = int(value) if key == "GridStepY": self._yinc = int(value) if key == "RotationAngle": self._rotation = float(value) if key == "Undefined": undef = float(value) if self._rotation != 0.0 and (rota_xori != self._xori or rota_yori != self._yori): xtg.warnuser("Rotation origin and data origin do match") # reread file for map values dsc, values = _cxtgeo.surf_import_petromod_bin( ifile.fhandle, 1, undef, self._ncol, self._nrow, self._ncol * self._nrow ) values = np.ma.masked_greater(values, xtgeo.UNDEF_LIMIT) values = values.reshape(self._ncol, self._nrow) self.values = values self.filesrc = mfile ifile.close()
def export_petromod_binary(self, mfile, pmd_dataunits): """Export to petromod binary format.""" validunits = False if isinstance(pmd_dataunits, tuple) and len(pmd_dataunits) == 2: unitd, unitz = pmd_dataunits if isinstance(unitd, int) and isinstance(unitz, int): if unitd in PMD_DATAUNITDISTANCE.keys( ) and unitz in PMD_DATAUNITZ.keys(): validunits = True if unitd <= 0 or unitz <= 0: raise ValueError( "Values for pmd_dataunits cannot be negative!") if not validunits: UserWarning( "Format or values for pmd_dataunits out of range: Pair should be in ranges " "{} and {}".format(PMD_DATAUNITDISTANCE, PMD_DATAUNITZ)) undef = 99999 fout = xtgeo._XTGeoCFile(mfile, mode="wb") dsc = "Content=Map," dsc += "DataUnitDistance={},".format(unitd) dsc += "DataUnitZ={},".format(unitz) dsc += "GridNoX={},".format(self.ncol) dsc += "GridNoY={},".format(self.nrow) dsc += "GridStepX={},".format(self.xinc) dsc += "GridStepY={},".format(self.yinc) dsc += "MapType=GridMap," dsc += "OriginX={},".format(self.xori) dsc += "OriginY={},".format(self.yori) dsc += "RotationAngle={},".format(self.rotation) dsc += "RotationOriginX={},".format(self.xori) dsc += "RotationOriginY={},".format(self.yori) dsc += "Undefined={},".format(undef) dsc += "Version=1.0" values = np.ma.filled(self.values1d, fill_value=undef) _cxtgeo.surf_export_petromod_bin( fout.fhandle, dsc, values, ) fout.close()
def import_ijxyz_ascii(self, mfile): # pylint: disable=too-many-locals """Import OW/DSG IJXYZ ascii format.""" # import of seismic column system on the form: # 2588 1179 476782.2897888889 6564025.6954 1000.0 # 2588 1180 476776.7181777778 6564014.5058 1000.0 logger.debug("Read data from file... (scan for dimensions)") fin = xtgeo._XTGeoCFile(mfile) xlist = _cxtgeo.surf_import_ijxyz(fin.fhandle, 0, 1, 1, 1, 0) ier, ncol, nrow, _ndef, xori, yori, xinc, yinc, rot, iln, xln, val, yflip = xlist if ier != 0: fin.close() raise RuntimeError("Import from C is wrong...") # now real read mode xlist = _cxtgeo.surf_import_ijxyz(fin.fhandle, 1, ncol, nrow, ncol * nrow, 0) ier, ncol, nrow, _ndef, xori, yori, xinc, yinc, rot, iln, xln, val, yflip = xlist if ier != 0: raise RuntimeError("Import from C is wrong...") logger.info(xlist) val = ma.masked_greater(val, _cxtgeo.UNDEF_LIMIT) self._xori = xori self._xinc = xinc self._yori = yori self._yinc = yinc self._ncol = ncol self._nrow = nrow self._rotation = rot self._yflip = yflip self._values = val.reshape((self._ncol, self._nrow)) self._filesrc = mfile self._ilines = iln self._xlines = xln fin.close()
def import_irap_ascii(self, mfile): """Import Irap ascii format.""" # version using swig type mapping logger.debug("Enter function...") ifile = xtgeo._XTGeoCFile(mfile) # read with mode 0, scan to get mx my xlist = _cxtgeo.surf_import_irap_ascii(ifile.fhandle, 0, 1, 0) nvn = xlist[1] * xlist[2] # mx * my xlist = _cxtgeo.surf_import_irap_ascii(ifile.fhandle, 1, nvn, 0) ier, ncol, nrow, _ndef, xori, yori, xinc, yinc, rot, val = xlist if ier != 0: ifile.close() raise RuntimeError("Problem in {}, code {}".format(__name__, ier)) val = np.reshape(val, (ncol, nrow), order="C") val = ma.masked_greater(val, xtgeo.UNDEF_LIMIT) if np.isnan(val).any(): logger.info("NaN values are found, will mask...") val = ma.masked_invalid(val) yflip = 1 if yinc < 0.0: yinc = yinc * -1 yflip = -1 self._ncol = ncol self._nrow = nrow self._xori = xori self._yori = yori self._xinc = xinc self._yinc = yinc self._yflip = yflip self._rotation = rot self._values = val self._filesrc = mfile self._ilines = np.array(range(1, ncol + 1), dtype=np.int32) self._xlines = np.array(range(1, nrow + 1), dtype=np.int32) ifile.close()
def scan_dates(pfile, maxdates=1000, dataframe=False): """Quick scan dates in a simulation restart file. Cf. grid_properties.py description """ seq = _cxtgeo.new_intarray(maxdates) day = _cxtgeo.new_intarray(maxdates) mon = _cxtgeo.new_intarray(maxdates) yer = _cxtgeo.new_intarray(maxdates) local_fhandle = False fhandle = pfile if isinstance(pfile, str): pfile = xtgeo._XTGeoCFile(pfile) fhandle = pfile.fhandle local_fhandle = True nstat = _cxtgeo.grd3d_ecl_tsteps(fhandle, seq, day, mon, yer, maxdates, XTGDEBUG) if local_fhandle: pfile.close(cond=local_fhandle) sq = [] da = [] for i in range(nstat): sq.append(_cxtgeo.intarray_getitem(seq, i)) dday = _cxtgeo.intarray_getitem(day, i) dmon = _cxtgeo.intarray_getitem(mon, i) dyer = _cxtgeo.intarray_getitem(yer, i) date = "{0:4}{1:02}{2:02}".format(dyer, dmon, dday) da.append(int(date)) for item in [seq, day, mon, yer]: _cxtgeo.delete_intarray(item) zdates = list(zip(sq, da)) # list for PY3 if dataframe: cols = ["SEQNUM", "DATE"] df = pd.DataFrame.from_records(zdates, columns=cols) return df return zdates
def export_zmap_ascii(self, mfile): """Export to ZMAP ascii format (non-rotated).""" # zmap can only deal with non-rotated formats; hence make a copy # of the instance and derotate that prior to export, so that the # original instance is unchanged fout = xtgeo._XTGeoCFile(mfile, mode="wb") scopy = self.copy() if abs(scopy.rotation) > 1.0e-20: scopy.unrotate() zmin = scopy.values.min() zmax = scopy.values.max() yinc = scopy._yinc * scopy._yflip vals = scopy.get_values1d(order="F", asmasked=False, fill_value=xtgeo.UNDEF) ier = _cxtgeo.surf_export_zmap_ascii( fout.fhandle, scopy._ncol, scopy._nrow, scopy._xori, scopy._yori, scopy._xinc, yinc, vals, zmin, zmax, 0, ) if ier != 0: raise RuntimeError("Export to ZMAP Ascii went wrong, " "code is {}".format(ier)) del scopy fout.close()
def export_storm_binary(self, mfile): """Export to Storm binary format (non-rotated).""" # storm can only deal with non-rotated formats; hence make a copy # of the instance and derotate that prior to export, so that the # original instance is unchanged fout = xtgeo._XTGeoCFile(mfile, mode="wb") scopy = self.copy() if abs(scopy.rotation) > 1.0e-20: scopy.unrotate() zmin = scopy.values.min() zmax = scopy.values.max() yinc = scopy._yinc * scopy._yflip ier = _cxtgeo.surf_export_storm_bin( fout.fhandle, scopy._ncol, scopy._nrow, scopy._xori, scopy._yori, scopy._xinc, yinc, scopy.get_zval(), zmin, zmax, 0, ) if ier != 0: raise RuntimeError("Export to Storm binary went wrong, " "code is {}".format(ier)) del scopy fout.close()
def scan_keywords(pfile, fformat="xecl", maxkeys=100000, dataframe=False, dates=False): """Quick scan of keywords in Eclipse binary restart/init/... file, or ROFF binary files. Cf. grid_properties.py description """ local_fhandle = False fhandle = pfile if isinstance(pfile, str): pfile = xtgeo._XTGeoCFile(pfile) local_fhandle = True fhandle = pfile.fhandle if fformat == "xecl": if dates: data = _scan_ecl_keywords_w_dates(fhandle, maxkeys=maxkeys, dataframe=dataframe) else: data = _scan_ecl_keywords(fhandle, maxkeys=maxkeys, dataframe=dataframe) else: data = _scan_roff_keywords(fhandle, maxkeys=maxkeys, dataframe=dataframe) if local_fhandle: pfile.close(cond=local_fhandle) return data
def _export_irap_binary_cxtgeo(self, mfile): """Export to Irap RMS binary format.""" fout = xtgeo._XTGeoCFile(mfile, mode="wb") vals = self.get_values1d(fill_value=UNDEF_MAP_IRAPB, order="F") ier = _cxtgeo.surf_export_irap_bin( fout.fhandle, self._ncol, self._nrow, self._xori, self._yori, self._xinc, self._yflip * self._yinc, self._rotation, vals, 0, ) if ier != 0: raise RuntimeError( "Export to Irap Binary went wrong, code is {}".format(ier)) fout.close()
def import_ecl_egrid(self, gfile): """Import, private to this routine.""" eclfile = xtgeo._XTGeoCFile(gfile) # scan file for property logger.info("Make kwlist by scanning") kwlist = utils.scan_keywords(eclfile.fhandle, fformat="xecl", maxkeys=1000, dataframe=False, dates=False) bpos = {} for name in ("COORD", "ZCORN", "ACTNUM", "MAPAXES"): bpos[name] = -1 # initially self._dualporo = False for kwitem in kwlist: kwname, kwtype, kwlen, kwbyte = kwitem if kwname == "FILEHEAD": # read FILEHEAD record: filehead = eclbin_record(eclfile.fhandle, "FILEHEAD", kwlen, kwtype, kwbyte) dualp = filehead[5].tolist() logger.info("Dual porosity flag is %s", dualp) if dualp == 1: self._dualporo = True self._dualperm = False elif dualp == 2: self._dualporo = True self._dualperm = True elif kwname == "GRIDHEAD": # read GRIDHEAD record: gridhead = eclbin_record(eclfile.fhandle, "GRIDHEAD", kwlen, kwtype, kwbyte) ncol, nrow, nlay = gridhead[1:4].tolist() logger.info("%s %s %s", ncol, nrow, nlay) elif kwname in ("COORD", "ZCORN", "ACTNUM"): bpos[kwname] = kwbyte elif kwname == "MAPAXES": # not always present bpos[kwname] = kwbyte self._ncol = ncol self._nrow = nrow self._nlay = nlay logger.info("Grid dimensions in EGRID file: %s %s %s", ncol, nrow, nlay) # allocate dimensions: ntot = self._ncol * self._nrow * self._nlay ncoord = (self._ncol + 1) * (self._nrow + 1) * 2 * 3 nzcorn = self._ncol * self._nrow * (self._nlay + 1) * 4 self._p_coord_v = _cxtgeo.new_doublearray(ncoord) self._p_zcorn_v = _cxtgeo.new_doublearray(nzcorn) self._p_actnum_v = _cxtgeo.new_intarray(ntot) p_nact = _cxtgeo.new_longpointer() option = 0 if self._dualporo: option = 1 ier = _cxtgeo.grd3d_imp_ecl_egrid( eclfile.fhandle, self._ncol, self._nrow, self._nlay, bpos["MAPAXES"], bpos["COORD"], bpos["ZCORN"], bpos["ACTNUM"], self._p_coord_v, self._p_zcorn_v, self._p_actnum_v, p_nact, option, ) if ier == -1: raise RuntimeError("Error code -1 from _cxtgeo.grd3d_imp_ecl_egrid") self._nactive = _cxtgeo.longpointer_value(p_nact) # in case of DUAL PORO/PERM ACTNUM will be 0..3; need to convert if self._dualporo: self._dualactnum = self.get_actnum(name="DUALACTNUM") acttmp = self._dualactnum.copy() acttmp.values[acttmp.values >= 1] = 1 self.set_actnum(acttmp) eclfile.close()
def import_ecl_bgrdecl(self, gfile): """Import binary files with GRDECL layout""" local_fhandle = False fhandle = gfile if isinstance(gfile, str): local_fhandle = True gfile = xtgeo._XTGeoCFile(gfile) fhandle = gfile.fhandle # scan file for properties; these have similar binary format as e.g. EGRID logger.info("Make kwlist by scanning") kwlist = utils.scan_keywords(fhandle, fformat="xecl", maxkeys=1000, dataframe=False, dates=False) bpos = {} needkwlist = ["SPECGRID", "COORD", "ZCORN", "ACTNUM"] optkwlist = ["MAPAXES"] for name in needkwlist + optkwlist: bpos[name] = -1 # initially for kwitem in kwlist: kwname, kwtype, kwlen, kwbyte = kwitem if kwname == "SPECGRID": # read grid geometry record: specgrid = eclbin_record(fhandle, "SPECGRID", kwlen, kwtype, kwbyte) ncol, nrow, nlay = specgrid[0:3].tolist() logger.info("%s %s %s", ncol, nrow, nlay) elif kwname in needkwlist: bpos[kwname] = kwbyte elif kwname == "MAPAXES": # not always present bpos[kwname] = kwbyte self._ncol = ncol self._nrow = nrow self._nlay = nlay logger.info("Grid dimensions in binary GRDECL file: %s %s %s", ncol, nrow, nlay) # allocate dimensions: ntot = self._ncol * self._nrow * self._nlay ncoord = (self._ncol + 1) * (self._nrow + 1) * 2 * 3 nzcorn = self._ncol * self._nrow * (self._nlay + 1) * 4 self._p_coord_v = _cxtgeo.new_doublearray(ncoord) self._p_zcorn_v = _cxtgeo.new_doublearray(nzcorn) self._p_actnum_v = _cxtgeo.new_intarray(ntot) p_nact = _cxtgeo.new_longpointer() ier = _cxtgeo.grd3d_imp_ecl_egrid( fhandle, self._ncol, self._nrow, self._nlay, bpos["MAPAXES"], bpos["COORD"], bpos["ZCORN"], bpos["ACTNUM"], self._p_coord_v, self._p_zcorn_v, self._p_actnum_v, p_nact, 0, ) if ier == -1: raise RuntimeError("Error code -1 from _cxtgeo.grd3d_imp_ecl_egrid") self._nactive = _cxtgeo.longpointer_value(p_nact) if local_fhandle: gfile.close(cond=local_fhandle)
def import_ecl_output(props, pfile, names=None, dates=None, grid=None, namestyle=0): # pylint: disable=too-many-locals, too-many-branches, too-many-statements logger.debug("'namestyle' is %s (not in use)", namestyle) if not grid: raise ValueError("Grid Geometry object is missing") if not names: raise ValueError("Name list is empty (None)") local_fhandle = False fhandle = pfile if not isinstance(pfile, xtgeo._XTGeoCFile): pfile = xtgeo._XTGeoCFile(pfile) fhandle = pfile.fhandle local_fhandle = True # scan valid keywords kwlist = utils.scan_keywords(fhandle, fformat="xecl", maxkeys=100000, dataframe=True, dates=True) kwxlist = list(kwlist.itertuples(index=False, name=None)) usenames = list() if names == "all": nact = grid.nactive ntot = grid.ntotal for kw in kwxlist: kwname, _tmp1, nlen, _bs, _date = kw if nlen in (nact, ntot): usenames.append(kwname) else: usenames = list(names) logger.info("NAMES are %s", usenames) lookfornames = list(set(usenames)) possiblekw = [] for name in lookfornames: namefound = False for kwitem in kwxlist: possiblekw.append(kwitem[0]) if name == kwitem[0]: namefound = True if not namefound: if name in ("SOIL", "SGAS", "SWAT"): pass # check for sat's later; may be derived based on fluid system else: raise ValueError( "Keyword {} not found. Possible list: {}".format( name, possiblekw)) # check valid dates, and remove invalid entries (allowing that user # can be a bit sloppy on DATES) validdates = [None] if dates: dlist = utils.scan_dates(fhandle) validdates = [] alldates = [] for date in dates: for ditem in dlist: alldates.append(str(ditem[1])) if str(date) == str(ditem[1]): validdates.append(date) if not validdates: msg = "No valid dates given (dates: {} vs {})".format( dates, alldates) xtg.error(msg) raise ValueError(msg) if len(dates) > len(validdates): invalidddates = list(set(dates).difference(validdates)) msg = ("In file {}: Some dates not found: {}, but will continue " "with dates: {}".format(pfile, invalidddates, validdates)) xtg.warn(msg) # raise DateNotFoundError(msg) use2names = list(usenames) # to make copy logger.info("Use names: %s", use2names) logger.info("Valid dates: %s", validdates) # now import each property firstproperty = True for date in validdates: # xprop = dict() # soil_ok = False for name in use2names: logger.info("Get %s", name) if date is None: date = None propname = name etype = 1 else: propname = name + "_" + str(date) etype = 5 prop = GridProperty() # use a private GridProperty function here, for convinience # (since filehandle) _gridprop_import_eclrun.import_eclbinary( prop, fhandle, name=name, date=date, grid=grid, etype=etype, _kwlist=kwlist, ) if firstproperty: ncol = prop.ncol nrow = prop.nrow nlay = prop.nlay firstproperty = False logger.info("Appended property %s", propname) props._names.append(propname) props._props.append(prop) props._ncol = ncol props._nrow = nrow props._nlay = nlay if validdates[0] != 0: props._dates = validdates if local_fhandle: pfile.close()
def import_ecl_grdecl(self, gfile): # make a temporary file fds, tmpfile = mkstemp(prefix="tmpxtgeo") os.close(fds) with open(gfile) as oldfile, open(tmpfile, "w") as newfile: for line in oldfile: if not (re.search(r"^--", line) or re.search(r"^\s+$", line)): newfile.write(line) newfile.close() oldfile.close() # find ncol nrow nz mylist = [] found = False with open(tmpfile) as xfile: for line in xfile: if found: logger.info(line) mylist = line.split() break if re.search(r"^SPECGRID", line): found = True if not found: logger.error("SPECGRID not found. Nothing imported!") return xfile.close() self._ncol, self._nrow, self._nlay = int(mylist[0]), int(mylist[1]), int( mylist[2]) logger.info("NX NY NZ in grdecl file: %s %s %s", self._ncol, self._nrow, self._nlay) ncoord, nzcorn, ntot = self.vectordimensions logger.info("Reading...") self._coordsv = np.zeros(ncoord, dtype=np.float64) self._zcornsv = np.zeros(nzcorn, dtype=np.float64) self._actnumsv = np.zeros(ntot, dtype=np.int32) ptr_num_act = _cxtgeo.new_intpointer() eclfile = xtgeo._XTGeoCFile(tmpfile) _cxtgeo.grd3d_import_grdecl( eclfile.fhandle, self._ncol, self._nrow, self._nlay, self._coordsv, self._zcornsv, self._actnumsv, ptr_num_act, ) # close and remove tmpfile eclfile.close() os.remove(tmpfile) nact = _cxtgeo.intpointer_value(ptr_num_act) logger.info("Number of active cells: %s", nact) self._subgrids = None
def import_eclbinary(self, pfile, name=None, etype=1, date=None, grid=None, fracture=False, _kwlist=None): # if pfile is a file, then the file is opened/closed here; otherwise, the # "outer" routine must handle that local_fhandle = False fhandle = pfile if isinstance(pfile, str): local_fhandle = True pfile = xtgeo._XTGeoCFile(pfile) fhandle = pfile.fhandle status = 0 logger.info("Import ECL binary, name requested is %s", name) # scan file for properties byte positions etc if _kwlist is None: logger.info("Make kwlist, scan keywords") kwlist = utils.scan_keywords(fhandle, fformat="xecl", maxkeys=100000, dataframe=True, dates=True) else: kwlist = _kwlist metadata = _import_eclbinary_meta(self, fhandle, kwlist, etype, date, grid) date = metadata["DATE"] # Importing phases is a challenge. It depends on the fluid system and simulator; e.g # typically in a 3 phase system, only SGAS and SWAT is given while SOIL must be # computed, if E100. E300 and IX may behave different... if name == "SGAS": status = _import_sgas(self, fhandle, kwlist, metadata, grid, date, fracture) elif name == "SOIL": status = _import_soil(self, fhandle, kwlist, metadata, grid, date, fracture) elif name == "SWAT": status = _import_swat(self, fhandle, kwlist, metadata, grid, date, fracture) if status == 0: name = name.replace("{__}", "") logger.info("Importing %s", name) _import_eclbinary_checks1(self, grid) kwname, kwlen, kwtype, kwbyte = _import_eclbinary_checks2( kwlist, name, etype, date) if grid._dualporo: # _dualporo shall always be True if _dualperm is True _import_eclbinary_dualporo( self, grid, fhandle, kwname, kwlen, kwtype, kwbyte, name, date, etype, fracture, ) else: _import_eclbinary_prop(self, grid, fhandle, kwname, kwlen, kwtype, kwbyte, name, date, etype) if local_fhandle and not pfile.close(cond=local_fhandle): raise RuntimeError( "Error in closing file handle for binary Eclipse file")