Exemplo n.º 1
0
def test_filedescriptors():
    """Test that filedescriptors are properly closed"""

    fd_dir = Path("/proc/") / str(os.getpid()) / "fd"
    if not fd_dir.exists():
        print("Counting file descriptors on non-Linux not supported")
        return

    pre_fd_count = len(list(fd_dir.glob("*")))

    eclfiles = EclFiles(EIGHTCELLS)
    # No opened files yet:
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.close()
    # No change, no files to close:
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.get_egrid()
    # This should not leave any file descriptor open
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.get_initfile()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._initfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._initfile is None

    eclfiles.get_rstfile()
    # Automatically closed by libecl
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rstfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rstfile is None

    eclfiles.get_eclsum()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count + 1
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.get_egridfile()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._egridfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._egridfile is None

    eclfiles.get_rftfile()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rftfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rftfile is None

    eclfiles.get_ecldeck()
    # This should not leave any file descriptor open
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
Exemplo n.º 2
0
def check_applicability(eclfiles: ecl2df.EclFiles) -> None:
    """Check that the input is relevant for usage with check_swatinit. This
    function may raise exceptions, SystemExit or only give warnings"""

    deck = eclfiles.get_ecldeck()

    init = eclfiles.get_initfile()
    if (
        "SWATINIT" not in [initheader[0] for initheader in init.headers]
        and "SWATINIT" not in deck
    ):
        logger.warning(
            "INIT-file/deck does not have SWATINIT, this tool has limited use."
        )

    if "RPTRST" not in deck:
        logger.warning(
            "RPTRST not found in DATA-file, UNRST file is expected to be missing"
        )

    try:
        eclfiles.get_rstfile()
    except FileNotFoundError as exception:
        if "UNIFOUT" not in deck:
            sys.exit(
                "Only unified RESTARTs are supported. Add UNIFOUT to your DATA file."
            )
        logger.error(str(exception))
        sys.exit(
            "No UNRST file found. This is required to get the initial water saturation"
        )
Exemplo n.º 3
0
def pvt_main(args) -> None:
    """Entry-point for module, for command line utility for Eclipse to CSV"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    logger.info("Parsed %s", args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    if "TABDIMS" in deck:
        # Things are easier when a full deck with correct TABDIMS
        # is supplied:
        pvt_df = df(deck, keywords=args.keywords)
    else:
        # When TABDIMS is not present, the code will try to infer
        # the number of saturation functions, this is necessarily
        # more error-prone, and it needs a string as input.
        stringdeck = Path(args.DATAFILE).read_text()
        pvt_df = df(stringdeck, keywords=args.keywords)
    if "PVTNUM" in pvt_df and "KEYWORD" in pvt_df:
        pvtnums = str(len(pvt_df["PVTNUM"].unique()))
        keywords = str(pvt_df["KEYWORD"].unique())
    else:
        pvtnums = "-"
        keywords = "-"
    common.write_dframe_stdout_file(
        pvt_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Unique PVTNUMs: {pvtnums}, PVT keywords: {keywords}",
    )
Exemplo n.º 4
0
def test_comp2df():
    """Test that dataframes are produced"""
    eclfiles = EclFiles(EIGHTCELLS)
    compdfs = compdat.deck2dfs(eclfiles.get_ecldeck())

    assert not compdfs["COMPDAT"].empty
    assert not compdfs["WELSEGS"].empty
    assert not compdfs["COMPSEGS"].empty
    assert not compdfs["COMPDAT"].columns.empty
Exemplo n.º 5
0
def test_comp2df():
    """Test that dataframes are produced"""
    eclfiles = EclFiles(DATAFILE)
    compdfs = compdat.deck2dfs(eclfiles.get_ecldeck())

    assert not compdfs["COMPDAT"].empty
    assert compdfs["WELSEGS"].empty  # REEK demo does not include multisegment wells
    assert compdfs["COMPSEGS"].empty
    assert len(compdfs["COMPDAT"].columns)
Exemplo n.º 6
0
def faults_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    faults_df = df(deck)
    write_dframe_stdout_file(
        faults_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Wrote to {args.output}",
    )
Exemplo n.º 7
0
def wcon_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    wcon_df = df(deck)
    write_dframe_stdout_file(
        wcon_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
Exemplo n.º 8
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args)
    )
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            print(prettyprint(dframe))
        else:
            logger.warning("No tree data to prettyprint")
    elif args.output:
        write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
Exemplo n.º 9
0
def ppcwmax_gridvector(eclfiles: ecl2df.EclFiles) -> pd.Series:
    """Generate a vector of PPCWMAX data pr cell

    PPCWMAX is pr. SATNUM in the input deck

    Args:
        eclfiles

    Returns:
        pd.Series, indexed according to ecl2df.grid.df(eclfiles)
    """

    satnum_df = ecl2df.grid.df(eclfiles, vectors="SATNUM")
    deck = eclfiles.get_ecldeck()
    for satnum in satnum_df["SATNUM"].unique():
        ppcwmax = deck["PPCWMAX"][satnum - 1][0].get_raw_data_list()[0]
        satnum_df.loc[satnum_df["SATNUM"] == satnum, "PPCWMAX"] = ppcwmax
    return satnum_df["PPCWMAX"]
Exemplo n.º 10
0
def faults_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    faults_df = df(deck)
    if faults_df.empty:
        logger.warning("Empty FAULT data, not written to disk!")
        return
    write_dframe_stdout_file(
        faults_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
Exemplo n.º 11
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            print(prettyprint(dframe))
        else:
            logger.warning("No tree data to prettyprint")
    elif args.output:
        write_dframe_stdout_file(dframe,
                                 args.output,
                                 index=False,
                                 caller_logger=logger)
Exemplo n.º 12
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            for date in dframe["DATE"].dropna().unique():
                print("Date: " + str(date.astype("M8[D]")))
                for tree in edge_dataframe2dict(dframe[dframe["DATE"] == date]):
                    print(tree_from_dict(tree))
                print("")
        else:
            logger.warning("No tree data to prettyprint")
    if dframe.empty:
        logger.error("Empty GRUPTREE dataframe, not written to disk!")
    elif args.output:
        write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
Exemplo n.º 13
0
def make_qc_gridframe(eclfiles: ecl2df.EclFiles) -> pd.DataFrame:
    """Construct a dataframe with needed information for swatinit qc from an Eclipse run.

    Makes a dataframe with one row for each active cell. Information from
    satfunc and equil merged in.
    """

    grid_df = ecl2df.grid.df(
        eclfiles,
        vectors=[
            # All of these are required.
            "FIPNUM",
            "EQLNUM",
            "SATNUM",
            "SWATINIT",  # Not outputted by OPM-flow, injected below
            "SWAT",
            "PORO",
            "PERMX",
            "NTG",
            "PRESSURE",
            "PCW",
            "PPCW",
            "SWL",
            "SWLPC",
            "SWU",
        ],
        rstdates="first",
    )

    # Circumvent bug in ecl2df that will pick SWL from both INIT and restart file:
    grid_df = grid_df.loc[:, ~grid_df.columns.duplicated()]

    # Merge in PPCWMAX from the deck, it is not reported in binary output files:
    if "PPCWMAX" in eclfiles.get_ecldeck():
        grid_df["PPCWMAX"] = ppcwmax_gridvector(eclfiles)

    # This will be unneccessary from ecl2df 0.13.0:
    grid_df = grid_df.where(grid_df > -1e20 + 1e13)

    if "SWL" not in grid_df:
        logger.warning("SWL not found in model. Using SWL=0.")
        logger.warning("Consider adding FILLEPS to the PROPS section")
        grid_df["SWL"] = 0.0

    deck = eclfiles.get_ecldeck()
    if "SWATINIT" in deck:
        swatinit_deckdata = deck["SWATINIT"][0][0].get_raw_data_list()
        # This list includes non-active cells, we must map via GLOBAL_INDEX:
        # GLOBAL_INDEX is 0-indexed.
        grid_df["SWATINIT_DECK"] = pd.Series(swatinit_deckdata)[
            grid_df["GLOBAL_INDEX"].astype(int).tolist()
        ].values

    if "SWATINIT" not in grid_df:
        # OPM-flow does not include SWATINIT in the INIT file.
        grid_df.rename({"SWATINIT_DECK": "SWATINIT"}, axis="columns", inplace=True)
    elif "SWATINIT_DECK" in grid_df:
        # (if SWATINIT is inputted using binary data in Eclipse deck, the code above
        # is not able to extract it)
        if not np.isclose(
            (grid_df["SWATINIT_DECK"] - grid_df["SWATINIT"]).abs().max(), 0, atol=1e-7
        ):
            logger.warning("SWATINIT from INIT was not close to SWATINIT  from deck")
        else:
            del grid_df["SWATINIT_DECK"]  # This is not needed

    # Exposed to issues with endpoint scaling in peculiar decks:
    satfunc_df = ecl2df.satfunc.df(eclfiles)

    # Merge in the input pcmax pr. satnum for each cell:
    grid_df = merge_pc_max(grid_df, satfunc_df)

    grid_df = merge_equil(grid_df, ecl2df.equil.df(eclfiles, keywords=["EQUIL"]))

    grid_df = augment_grid_frame_qc_vectors(grid_df)

    if "PPCW" not in grid_df:
        grid_df["PPCW"] = np.nan

    if "SWATINIT" in grid_df:
        grid_df["QC_FLAG"] = qc_flag(grid_df)

    # Above the gas-oil contact, the computed capillary pressure will
    # be p_gas - p_water, but at cells truncated by SWL, the code
    # will give p_oil - p_water. Delete these inconsistent capillary
    # pressures by ignoring PC scaling whenever SWL has been truncated:
    if "QC_FLAG" in grid_df:
        grid_df.loc[grid_df["QC_FLAG"] == __SWL_TRUNC__, "PC_SCALING"] = np.nan

    if "PC_SCALING" in grid_df:
        grid_df["PC"] = compute_pc(grid_df, satfunc_df)

    return grid_df