def _get_files_as_regularsurfaces_multiprocess(option=1): surfs = [] with concurrent.futures.ProcessPoolExecutor( max_workers=NTHREAD) as executor: if option == 1: futures = { executor.submit(_get_regsurff, i): i for i in range(NTHREAD) } else: futures = { executor.submit(_get_regsurfi, i): i for i in range(NTHREAD) } for future in concurrent.futures.as_completed(futures): try: surf = future.result() except Exception as exc: # pylint: disable=broad-except logger.error("Error: %s", exc) else: surfs.append(surf) regular_surfaces = xtgeo.Surfaces(surfs) return regular_surfaces
def test_create_init_filelist(): """Create simple Surfaces instance, initiate with a list of files.""" flist = [TESTSET1A, TESTSET1B] surfs = xtgeo.Surfaces(flist) assert isinstance(surfs.surfaces[0], xtgeo.RegularSurface) assert isinstance(surfs, xtgeo.Surfaces)
def test_more_statistics(): """Find the mean etc measures of the surfaces""" base = xtgeo.RegularSurface(TESTSET1A) base.values *= 0.0 bmean = base.values.mean() surfs = [] surfs.append(base) # this will get 101 constant maps ranging from 0 til 100 for inum in range(1, 101): tmp = base.copy() tmp.values += float(inum) surfs.append(tmp) so = xtgeo.Surfaces(surfs) res = so.statistics() # theoretical stdev: sum2 = 0.0 for inum in range(0, 101): sum2 += (float(inum) - 50.0)**2 stdev = math.sqrt(sum2 / 100.0) # total 101 samples, use N-1 tsetup.assert_almostequal(res["mean"].values.mean(), bmean + 50.0, 0.0001) tsetup.assert_almostequal(res["std"].values.mean(), stdev, 0.0001)
def test_create_init_objectlist(): """Create simple Surfaces instance, initiate with a list of objects.""" top = xtgeo.RegularSurface(TESTSET1A) base = xtgeo.RegularSurface(TESTSET1B) surfs = xtgeo.Surfaces([top, base]) assert isinstance(surfs.surfaces[0], xtgeo.RegularSurface) assert isinstance(surfs, xtgeo.Surfaces)
def test_create_init_mixlist(): """Create simple Surfaces instance, initiate with a list of files""" top = xtgeo.RegularSurface(TESTSET1A) flist = [top, TESTSET1B] surfs = xtgeo.Surfaces(flist) assert isinstance(surfs.surfaces[0], xtgeo.RegularSurface) assert isinstance(surfs, xtgeo.Surfaces)
def _create_statistical_surface( self, address: StatisticalSurfaceAddress ) -> Optional[xtgeo.RegularSurface]: surf_fns: List[str] = self._locate_simulated_surfaces( attribute=address.attribute, name=address.name, datestr=address.datestr if address.datestr is not None else "", realizations=address.realizations, ) if len(surf_fns) == 0: LOGGER.warning(f"No input surfaces found for statistical surface {address}") return None timer = PerfTimer() surfaces = xtgeo.Surfaces(surf_fns) et_load_s = timer.lap_s() surf_count = len(surfaces.surfaces) if surf_count == 0: LOGGER.warning( f"Could not load input surfaces for statistical surface {address}" ) return None # print("########################################################") # first_surf = surfaces.surfaces[0] # for surf in surfaces.surfaces: # print( # surf.dimensions, # surf.xinc, # surf.yinc, # surf.xori, # surf.yori, # surf.rotation, # surf.filesrc, # ) # print("########################################################") # Suppress numpy warnings when surfaces have undefined z-values with warnings.catch_warnings(): warnings.filterwarnings("ignore", "All-NaN slice encountered") warnings.filterwarnings("ignore", "Mean of empty slice") warnings.filterwarnings("ignore", "Degrees of freedom <= 0 for slice") stat_surface = _calc_statistic_across_surfaces(address.statistic, surfaces) et_calc_s = timer.lap_s() LOGGER.debug( f"Created statistical surface in: {timer.elapsed_s():.2f}s (" f"load={et_load_s:.2f}s, calc={et_calc_s:.2f}s), " f"[#surfaces={surf_count}, stat={address.statistic}, " f"attr={address.attribute}, name={address.name}, date={address.datestr}]" ) return stat_surface
def test_statistics(): """Find the mean etc measures of the surfaces.""" flist = [TESTSET1A, TESTSET1B] surfs = xtgeo.Surfaces(flist) res = surfs.statistics() res["mean"].to_file(join(TMPD, "surf_mean.gri")) res["std"].to_file(join(TMPD, "surf_std.gri")) assert_almostequal(res["mean"].values.mean(), 1720.5029, 0.0001) assert_almostequal(res["std"].values.min(), 3.7039, 0.0001)
def test_create(): """Create simple Surfaces instance.""" logger.info("Simple case...") top = xtgeo.RegularSurface(TESTSET1A) base = xtgeo.RegularSurface(TESTSET1B) surfs = xtgeo.Surfaces() surfs.surfaces = [top, base] surfs.describe() assert isinstance(surfs.surfaces[0], xtgeo.RegularSurface) assert isinstance(surfs, xtgeo.Surfaces)
def test_get_surfaces_from_3dgrid(): """Create surfaces from a 3D grid.""" mygrid = xtgeo.Grid(TESTSETG1) surfs = xtgeo.Surfaces() surfs.from_grid3d(mygrid, rfactor=2) surfs.describe() assert_almostequal(surfs.surfaces[-1].values.mean(), 1742.28, 0.04) assert_almostequal(surfs.surfaces[-1].values.min(), 1589.58, 0.04) assert_almostequal(surfs.surfaces[-1].values.max(), 1977.29, 0.04) assert_almostequal(surfs.surfaces[0].values.mean(), 1697.02, 0.04) for srf in surfs.surfaces: srf.to_file(join(TMPD, srf.name + ".gri"))
def save_surface(fns, statistic) -> io.BytesIO: surfaces = xtgeo.Surfaces(fns) if len(surfaces.surfaces) == 0: surface = xtgeo.RegularSurface() elif statistic in ["Mean", "StdDev", "Min", "Max", "P10", "P90"]: # Suppress numpy warnings when surfaces have undefined z-values with warnings.catch_warnings(): warnings.filterwarnings("ignore", "All-NaN slice encountered") warnings.filterwarnings("ignore", "Mean of empty slice") warnings.filterwarnings("ignore", "Degrees of freedom <= 0 for slice") surface = calculate_statistic(surfaces, statistic) else: surface = xtgeo.RegularSurface() return io.BytesIO(surface_to_json(surface).encode())
def test_more_statistics(): """Find the mean etc measures of the surfaces.""" base = xtgeo.RegularSurface(TESTSET1A) base.values *= 0.0 bmean = base.values.mean() surfs = [] surfs.append(base) # this will get 101 constant maps ranging from 0 til 100 for inum in range(1, 101): tmp = base.copy() tmp.values += float(inum) surfs.append(tmp) so = xtgeo.Surfaces(surfs) res = so.statistics() # theoretical stdev: sum2 = 0.0 for inum in range(0, 101): sum2 += (float(inum) - 50.0) ** 2 stdev = math.sqrt(sum2 / 100.0) # total 101 samples, use N-1 assert_almostequal(res["mean"].values.mean(), bmean + 50.0, 0.0001) assert_almostequal(res["std"].values.mean(), stdev, 0.0001) small = xtgeo.RegularSurface() so2 = xtgeo.Surfaces() for inum in range(10): tmp = small.copy() tmp.values += 8.76543 so2.append([tmp]) res2 = so2.statistics(percentiles=[10, 50]) assert res2["p10"].values.mean() == pytest.approx(16.408287142, 0.001)
def test_regsurf_aggregated_diffdata(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Test surfaces, where input is diffdata.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config ) aggs = [] # create "forward" files for i in range(10): use_regsurf = regsurf.copy() use_regsurf.values += float(i) expfile = edata.export( use_regsurf, name="mymap_" + str(i), realization=i, timedata=[[20300201], [19990204]], ) aggs.append(expfile) # next task is to do an aggradation, and now the metadata already exists # per input element which shall be re-used surfs = xtgeo.Surfaces() metas = [] for mapfile in aggs: surf = xtgeo.surface_from_file(mapfile) meta = dataio.read_metadata(mapfile) metas.append(meta) surfs.append([surf]) aggregated = surfs.statistics() logger.info("Aggr. mean is %s", aggregated["mean"].values.mean()) # shall be 1238.5 aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd", verbosity="INFO", aggregation_id="789politipoliti", ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.info("New metadata:\n%s", utils.prettyprint_dict(newmeta))
def test_surfaces_apply(): base = xtgeo.RegularSurface(TESTSET1A) base.describe() base.values *= 0.0 bmean = base.values.mean() surfs = [base] for inum in range(1, 101): tmp = base.copy() tmp.values += float(inum) surfs.append(tmp) so = xtgeo.Surfaces(surfs) res = so.apply(np.nanmean) tsetup.assert_almostequal(res.values.mean(), bmean + 50.0, 0.0001) res = so.apply(np.nanpercentile, 10, axis=0, interpolation="nearest") tsetup.assert_almostequal(res.values.mean(), bmean + 10.0, 0.0001)
def save_statistical_surface(fns: List[str], calculation: str) -> io.BytesIO: """Wrapper function to store a calculated surface as BytesIO""" surfaces = xtgeo.Surfaces(fns) if len(surfaces.surfaces) == 0: surface = xtgeo.RegularSurface(ncol=1, nrow=1, xinc=1, yinc=1) # 1's as input is required elif calculation in ["Mean", "StdDev", "Min", "Max", "P10", "P90"]: # Suppress numpy warnings when surfaces have undefined z-values with warnings.catch_warnings(): warnings.filterwarnings("ignore", "All-NaN slice encountered") warnings.filterwarnings("ignore", "Mean of empty slice") warnings.filterwarnings("ignore", "Degrees of freedom <= 0 for slice") surface = get_statistical_surface(surfaces, calculation) else: surface = xtgeo.RegularSurface(ncol=1, nrow=1, xinc=1, yinc=1) # 1's as input is required return io.BytesIO(surface_to_json(surface).encode())
def save_surface(fns, statistic) -> io.BytesIO: surfaces = xtgeo.Surfaces(fns) if len(surfaces.surfaces) == 0: surface = xtgeo.RegularSurface() elif statistic == "Mean": surface = surfaces.apply(np.nanmean, axis=0) elif statistic == "StdDev": surface = surfaces.apply(np.nanstd, axis=0) elif statistic == "Min": surface = surfaces.apply(np.nanmin, axis=0) elif statistic == "Max": surface = surfaces.apply(np.nanmax, axis=0) elif statistic == "P10": surface = surfaces.apply(np.nanpercentile, 10, axis=0) elif statistic == "P90": surface = surfaces.apply(np.nanpercentile, 90, axis=0) else: surface = xtgeo.RegularSurface() return io.BytesIO(surface_to_json(surface).encode())
def save_statistical_surface_no_store(fns: List[str], calculation: Optional[str] = "Mean" ) -> io.BytesIO: """Wrapper function to store a calculated surface as BytesIO""" surfaces = xtgeo.Surfaces([get_stored_surface_path(fn) for fn in fns]) if len(surfaces.surfaces) == 0: surface = xtgeo.RegularSurface(ncol=1, nrow=1, xinc=1, yinc=1) # 1's as input is required elif calculation in ["Mean", "StdDev", "Min", "Max", "P10", "P90"]: # Suppress numpy warnings when surfaces have undefined z-values with warnings.catch_warnings(): warnings.filterwarnings("ignore", "All-NaN slice encountered") warnings.filterwarnings("ignore", "Mean of empty slice") warnings.filterwarnings("ignore", "Degrees of freedom <= 0 for slice") surface = get_statistical_surface(surfaces, calculation) else: surface = xtgeo.RegularSurface(ncol=1, nrow=1, xinc=1, yinc=1) # 1's as input is required stream = io.BytesIO() surface.to_file(stream, fformat="irap_binary") return stream
def get_surfaces(fns: List[str]) -> xtgeo.Surfaces: return xtgeo.Surfaces(fns)
def test_regsurf_aggregated(fmurun_w_casemetadata, rmsglobalconfig, regsurf): """Test generating aggragated metadata for a surface, where input has metadata.""" logger.info("Active folder is %s", fmurun_w_casemetadata) os.chdir(fmurun_w_casemetadata) edata = dataio.ExportData( config=rmsglobalconfig, # read from global config verbosity="INFO", ) aggs = [] # create "forward" files for i in range(1): # TODO! 10 use_regsurf = regsurf.copy() use_regsurf.values += float(i) expfile = edata.export(use_regsurf, name="mymap_" + str(i), realization=i) aggs.append(expfile) # next task is to do an aggradation, and now the metadata already exists # per input element which shall be re-used surfs = xtgeo.Surfaces() metas = [] for mapfile in aggs: surf = xtgeo.surface_from_file(mapfile) meta = dataio.read_metadata(mapfile) print(utils.prettyprint_dict(meta)) metas.append(meta) surfs.append([surf]) aggregated = surfs.statistics() logger.info("Aggr. mean is %s", aggregated["mean"].values.mean()) # shall be 1238.5 aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd", verbosity="INFO", aggregation_id="1234", ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta)) assert newmeta["fmu"]["aggregation"]["id"] == "1234" # let aggregation input True generate hash aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd2", verbosity="INFO", aggregation_id=True, ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta)) assert newmeta["fmu"]["aggregation"]["id"] != "1234" assert newmeta["fmu"]["aggregation"]["id"] is not True # let aggregation input None generate a missing key aggdata = dataio.AggregatedData( configs=metas, operation="mean", name="myaggrd2", verbosity="INFO", aggregation_id=None, ) newmeta = aggdata.generate_aggregation_metadata(aggregated["mean"]) logger.debug("New metadata:\n%s", utils.prettyprint_dict(newmeta)) assert "id" not in newmeta["fmu"]["aggregation"]