示例#1
0
def pillars_main(args) -> None:
    """This is the command line API"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))

    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(
        eclfiles,
        region=args.region,
        rstdates=args.rstdates,
        soilcutoff=args.soilcutoff,
        sgascutoff=args.sgascutoff,
        swatcutoff=args.swatcutoff,
        stackdates=args.stackdates,
    )
    groupbies = []
    aggregators = {
        key: AGGREGATORS[key.split("@")[0]]
        for key in dframe if key.split("@")[0] in AGGREGATORS
    }
    if args.region and args.group:
        groupbies.append(args.region)
    if args.stackdates and args.group:
        groupbies.append("DATE")
    if groupbies:
        dframe = dframe.groupby(groupbies).agg(aggregators).reset_index()
    elif args.group:
        dframe = dframe.drop("PILLAR", axis=1).mean().to_frame().transpose()
    dframe["PORO"] = dframe["PORV"] / dframe["VOLUME"]
    common.write_dframe_stdout_file(dframe,
                                    args.output,
                                    index=False,
                                    caller_logger=logger)
示例#2
0
def get_completion_list(ECL_DATA_file_name):
    """
    Create a datafram of unrolled well completions

    Args:
    Input DATA file name

    Returns:
    Tuple:
    List of unique well names
    List of completions associated to well names
    """

    ECL_file = EclFiles(ECL_DATA_file_name)
    compdat_df = compdat.df(ECL_file)

    # Convert from ECL index
    compdat_df[["I", "J", "K1", "K2"]] = compdat_df[["I", "J", "K1", "K2"]] - 1

    # Create tuples
    compdat_df["IJK"] = compdat_df[["I", "J", "K1"]].apply(tuple, axis=1)

    well_list = compdat_df["WELL"].unique().tolist()
    completion_list = []
    for well in well_list:
        completion_list.append(
            compdat_df["IJK"].loc[compdat_df["WELL"] == well].to_list())

    return completion_list, well_list
示例#3
0
def pvt_main(args) -> None:
    """Entry-point for module, for command line utility for Eclipse to CSV"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    logger.info("Parsed %s", args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    if "TABDIMS" in deck:
        # Things are easier when a full deck with correct TABDIMS
        # is supplied:
        pvt_df = df(deck, keywords=args.keywords)
    else:
        # When TABDIMS is not present, the code will try to infer
        # the number of saturation functions, this is necessarily
        # more error-prone, and it needs a string as input.
        stringdeck = Path(args.DATAFILE).read_text()
        pvt_df = df(stringdeck, keywords=args.keywords)
    if "PVTNUM" in pvt_df and "KEYWORD" in pvt_df:
        pvtnums = str(len(pvt_df["PVTNUM"].unique()))
        keywords = str(pvt_df["KEYWORD"].unique())
    else:
        pvtnums = "-"
        keywords = "-"
    common.write_dframe_stdout_file(
        pvt_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Unique PVTNUMs: {pvtnums}, PVT keywords: {keywords}",
    )
示例#4
0
def pillars_main(args) -> None:
    """This is the command line API"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(
        eclfiles,
        region=args.region,
        rstdates=args.rstdates,
        soilcutoff=args.soilcutoff,
        sgascutoff=args.sgascutoff,
        swatcutoff=args.swatcutoff,
        stackdates=args.stackdates,
    )
    groupbies = []
    aggregators = {
        key: AGGREGATORS[key.split("@")[0]]
        for key in dframe if key.split("@")[0] in AGGREGATORS
    }
    if args.region and args.group:
        groupbies.append(args.region)
    if args.stackdates and args.group:
        groupbies.append("DATE")
    if groupbies:
        dframe = dframe.groupby(groupbies).agg(aggregators).reset_index()
    elif args.group:
        dframe = dframe.mean().to_frame().transpose()
    dframe["PORO"] = dframe["PORV"] / dframe["VOLUME"]
    common.write_dframe_stdout_file(dframe,
                                    args.output,
                                    index=False,
                                    caller_logger=logger)
示例#5
0
def test_filedescriptors():
    """Test that filedescriptors are properly closed"""

    fd_dir = Path("/proc/") / str(os.getpid()) / "fd"
    if not fd_dir.exists():
        print("Counting file descriptors on non-Linux not supported")
        return

    pre_fd_count = len(list(fd_dir.glob("*")))

    eclfiles = EclFiles(EIGHTCELLS)
    # No opened files yet:
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.close()
    # No change, no files to close:
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.get_egrid()
    # This should not leave any file descriptor open
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.get_initfile()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._initfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._initfile is None

    eclfiles.get_rstfile()
    # Automatically closed by libecl
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rstfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rstfile is None

    eclfiles.get_eclsum()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count + 1
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count

    eclfiles.get_egridfile()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._egridfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._egridfile is None

    eclfiles.get_rftfile()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rftfile is not None
    eclfiles.close()
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
    assert eclfiles._rftfile is None

    eclfiles.get_ecldeck()
    # This should not leave any file descriptor open
    assert len(list(fd_dir.glob("*"))) == pre_fd_count
示例#6
0
def test_df():
    """Test main dataframe API, only testing that something comes out"""
    eclfiles = EclFiles(EIGHTCELLS)
    compdat_df = compdat.df(eclfiles)
    assert not compdat_df.empty
    assert "ZONE" in compdat_df
    assert "K1" in compdat_df
    assert "WELL" in compdat_df
示例#7
0
def test_comp2df():
    """Test that dataframes are produced"""
    eclfiles = EclFiles(DATAFILE)
    compdfs = compdat.deck2dfs(eclfiles.get_ecldeck())

    assert not compdfs["COMPDAT"].empty
    assert compdfs["WELSEGS"].empty  # REEK demo does not include multisegment wells
    assert compdfs["COMPSEGS"].empty
    assert len(compdfs["COMPDAT"].columns)
示例#8
0
def test_comp2df():
    """Test that dataframes are produced"""
    eclfiles = EclFiles(EIGHTCELLS)
    compdfs = compdat.deck2dfs(eclfiles.get_ecldeck())

    assert not compdfs["COMPDAT"].empty
    assert not compdfs["WELSEGS"].empty
    assert not compdfs["COMPSEGS"].empty
    assert not compdfs["COMPDAT"].columns.empty
示例#9
0
def nnc_main(args):
    """Command line access point from main() or from ecl2csv via subparser"""
    if args.verbose:
        logger.setLevel(logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
    if nncdf.empty:
        logger.warning("Empty NNC dataframe being written to disk!")
    nncdf.to_csv(args.output, index=False)
    print("Wrote to " + args.output)
示例#10
0
def nnc_main(args) -> None:
    """Command line access point from main() or from ecl2csv via subparser"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
    write_dframe_stdout_file(
        nncdf,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
    nncdf.to_csv(args.output, index=False)
示例#11
0
def nnc_main(args) -> None:
    """Command line access point from main() or from ecl2csv via subparser"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
    write_dframe_stdout_file(
        nncdf,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Wrote to {args.output}",
    )
    nncdf.to_csv(args.output, index=False)
示例#12
0
文件: faults.py 项目: lindjoha/ecl2df
def faults_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    faults_df = df(deck)
    write_dframe_stdout_file(
        faults_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Wrote to {args.output}",
    )
示例#13
0
def wcon_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    wcon_df = df(deck)
    write_dframe_stdout_file(
        wcon_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
示例#14
0
def fipreports_main(args) -> None:
    """Command line API"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    if args.PRTFILE.endswith(".PRT"):
        prtfile = args.PRTFILE
    else:
        prtfile = EclFiles(args.PRTFILE).get_prtfilename()
    dframe = df(prtfile, args.fipname)
    write_dframe_stdout_file(dframe,
                             args.output,
                             index=False,
                             caller_logger=logger)
示例#15
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args)
    )
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            print(prettyprint(dframe))
        else:
            logger.warning("No tree data to prettyprint")
    elif args.output:
        write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
示例#16
0
def get_ecl_unit_system(ensemble_path: str) -> Optional[str]:
    """Returns the unit system of an eclipse deck. The options are \
    METRIC, FIELD, LAB and PVT-M.

    If none of these are found, the function returns METRIC

    If no eclipse DATA file is found in the ensemble path, it
    returns None
    """
    datafile = get_ecl_datafile(ensemble_path)
    if datafile is None:
        return None
    ecl_deck = EclFiles(datafile).get_ecldeck()
    for keyword in ecl_deck:
        if keyword.name in ["METRIC", "FIELD", "LAB", "PVT-M"]:
            return keyword.name
    return "METRIC"
示例#17
0
def faults_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    faults_df = df(deck)
    if faults_df.empty:
        logger.warning("Empty FAULT data, not written to disk!")
        return
    write_dframe_stdout_file(
        faults_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
示例#18
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            print(prettyprint(dframe))
        else:
            logger.warning("No tree data to prettyprint")
    elif args.output:
        write_dframe_stdout_file(dframe,
                                 args.output,
                                 index=False,
                                 caller_logger=logger)
示例#19
0
def test_initmerging():
    """Test that we can ask for INIT vectors to be merged into the data"""
    eclfiles = EclFiles(REEK)
    noinit_df = compdat.df(eclfiles)
    df = compdat.df(eclfiles, initvectors=[])
    assert isinstance(df, pd.DataFrame)
    assert not df.empty

    df = compdat.df(eclfiles, initvectors=["FIPNUM", "EQLNUM", "SATNUM"])
    assert "FIPNUM" in df
    assert "EQLNUM" in df
    assert "SATNUM" in df
    assert len(df) == len(noinit_df)

    df = compdat.df(eclfiles, initvectors="FIPNUM")
    assert "FIPNUM" in df
    assert len(df) == len(noinit_df)

    with pytest.raises(AssertionError):
        compdat.df(eclfiles, initvectors=2)
示例#20
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            for date in dframe["DATE"].dropna().unique():
                print("Date: " + str(date.astype("M8[D]")))
                for tree in edge_dataframe2dict(dframe[dframe["DATE"] == date]):
                    print(tree_from_dict(tree))
                print("")
        else:
            logger.warning("No tree data to prettyprint")
    if dframe.empty:
        logger.error("Empty GRUPTREE dataframe, not written to disk!")
    elif args.output:
        write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)