Exemple #1
0
def equil_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    if "EQLDIMS" in deck:
        # Things are easier when a full deck with (correct) EQLDIMS
        # is supplied:
        equil_df = df(deck, keywords=args.keywords)
    else:
        # This might be an include file for which we have to infer/guess
        # EQLDIMS. Then we send it to df() as a string
        equil_df = df(Path(args.DATAFILE).read_text())

    if "EQLNUM" in equil_df and "KEYWORD" in equil_df:
        eqlnums = str(len(equil_df["EQLNUM"].unique()))
        keywords = str(equil_df["KEYWORD"].unique())
    else:
        eqlnums = "-"
        keywords = "-"
    common.write_dframe_stdout_file(
        equil_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Unique EQLNUMs: {eqlnums}, keywords: {keywords}",
    )
Exemple #2
0
def pvt_main(args):
    """Entry-point for module, for command line utility for Eclipse to CSV"""
    if args.verbose:
        logger.setLevel(logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    logger.info("Parsed %s", args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    if "TABDIMS" in deck:
        # Things are easier when a full deck with correct TABDIMS
        # is supplied:
        pvt_df = df(deck, keywords=args.keywords)
    else:
        # When TABDIMS is not present, the code will try to infer
        # the number of saturation functions, this is necessarily
        # more error-prone, and it needs a string as input.
        stringdeck = "".join(open(args.DATAFILE).readlines())
        pvt_df = df(stringdeck, keywords=args.keywords)
    if not pvt_df.empty:
        common.write_dframe_stdout_file(
            pvt_df,
            args.output,
            index=False,
            caller_logger=logger,
            logstr=(
                "Unique PVTNUMs: {}, PVT keywords: {}".format(
                    str(len(pvt_df["PVTNUM"].unique())), str(pvt_df["KEYWORD"].unique())
                )
            ),
        )
    else:
        logger.error("Empty PVT data, not written to disk")
Exemple #3
0
def pillars_main(args) -> None:
    """This is the command line API"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(
        eclfiles,
        region=args.region,
        rstdates=args.rstdates,
        soilcutoff=args.soilcutoff,
        sgascutoff=args.sgascutoff,
        swatcutoff=args.swatcutoff,
        stackdates=args.stackdates,
    )
    groupbies = []
    aggregators = {
        key: AGGREGATORS[key.split("@")[0]]
        for key in dframe if key.split("@")[0] in AGGREGATORS
    }
    if args.region and args.group:
        groupbies.append(args.region)
    if args.stackdates and args.group:
        groupbies.append("DATE")
    if groupbies:
        dframe = dframe.groupby(groupbies).agg(aggregators).reset_index()
    elif args.group:
        dframe = dframe.mean().to_frame().transpose()
    dframe["PORO"] = dframe["PORV"] / dframe["VOLUME"]
    common.write_dframe_stdout_file(dframe,
                                    args.output,
                                    index=False,
                                    caller_logger=logger)
Exemple #4
0
def equil_main(args):
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logger.setLevel(logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    if "EQLDIMS" in deck:
        # Things are easier when a full deck with (correct) EQLDIMS
        # is supplied:
        equil_df = df(deck)
    else:
        # This might be an include file for which we have to infer/guess
        # EQLDIMS. Then we send it to df() as a string
        equil_df = df("".join(open(args.DATAFILE).readlines()))
    if not equil_df.empty:
        common.write_dframe_stdout_file(
            equil_df,
            args.output,
            index=False,
            caller_logger=logger,
            logstr=("Unique EQLNUMs: {}, keywords: {}".format(
                str(len(equil_df["EQLNUM"].unique())),
                str(equil_df["KEYWORD"].unique()),
            )),
        )
    else:
        logger.error("Empty EQUIL-data, not written to disk!")
Exemple #5
0
def pvt_main(args) -> None:
    """Entry-point for module, for command line utility for Eclipse to CSV"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    logger.info("Parsed %s", args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    if "TABDIMS" in deck:
        # Things are easier when a full deck with correct TABDIMS
        # is supplied:
        pvt_df = df(deck, keywords=args.keywords)
    else:
        # When TABDIMS is not present, the code will try to infer
        # the number of saturation functions, this is necessarily
        # more error-prone, and it needs a string as input.
        stringdeck = Path(args.DATAFILE).read_text()
        pvt_df = df(stringdeck, keywords=args.keywords)
    if "PVTNUM" in pvt_df and "KEYWORD" in pvt_df:
        pvtnums = str(len(pvt_df["PVTNUM"].unique()))
        keywords = str(pvt_df["KEYWORD"].unique())
    else:
        pvtnums = "-"
        keywords = "-"
    common.write_dframe_stdout_file(
        pvt_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Unique PVTNUMs: {pvtnums}, PVT keywords: {keywords}",
    )
Exemple #6
0
def pillars_main(args) -> None:
    """This is the command line API"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))

    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(
        eclfiles,
        region=args.region,
        rstdates=args.rstdates,
        soilcutoff=args.soilcutoff,
        sgascutoff=args.sgascutoff,
        swatcutoff=args.swatcutoff,
        stackdates=args.stackdates,
    )
    groupbies = []
    aggregators = {
        key: AGGREGATORS[key.split("@")[0]]
        for key in dframe if key.split("@")[0] in AGGREGATORS
    }
    if args.region and args.group:
        groupbies.append(args.region)
    if args.stackdates and args.group:
        groupbies.append("DATE")
    if groupbies:
        dframe = dframe.groupby(groupbies).agg(aggregators).reset_index()
    elif args.group:
        dframe = dframe.drop("PILLAR", axis=1).mean().to_frame().transpose()
    dframe["PORO"] = dframe["PORV"] / dframe["VOLUME"]
    common.write_dframe_stdout_file(dframe,
                                    args.output,
                                    index=False,
                                    caller_logger=logger)
Exemple #7
0
def compdat_main(args):
    """Entry-point for module, for command line utility"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    compdat_df = df(eclfiles, initvectors=args.initvectors)
    if compdat_df.empty:
        logger.warning("Empty COMPDAT data being written to disk!")
    write_dframe_stdout_file(compdat_df,
                             args.output,
                             index=False,
                             caller_logger=logger)
Exemple #8
0
def nnc_main(args) -> None:
    """Command line access point from main() or from ecl2csv via subparser"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
    write_dframe_stdout_file(
        nncdf,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
    nncdf.to_csv(args.output, index=False)
Exemple #9
0
def nnc_main(args) -> None:
    """Command line access point from main() or from ecl2csv via subparser"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
    write_dframe_stdout_file(
        nncdf,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Wrote to {args.output}",
    )
    nncdf.to_csv(args.output, index=False)
Exemple #10
0
def wcon_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    wcon_df = df(deck)
    write_dframe_stdout_file(
        wcon_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
Exemple #11
0
def faults_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    faults_df = df(deck)
    write_dframe_stdout_file(
        faults_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr=f"Wrote to {args.output}",
    )
Exemple #12
0
def grid_main(args):
    """This is the command line API"""
    if args.verbose:
        logger.setLevel(logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    grid_df = df(
        eclfiles,
        vectors=args.vectors,
        rstdates=args.rstdates,
        dropconstants=args.dropconstants,
        stackdates=args.stackdates,
    )
    common.write_dframe_stdout_file(
        grid_df, args.output, index=False, caller_logger=logger
    )
Exemple #13
0
def fipreports_main(args) -> None:
    """Command line API"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    if args.PRTFILE.endswith(".PRT"):
        prtfile = args.PRTFILE
    else:
        prtfile = EclFiles(args.PRTFILE).get_prtfilename()
    dframe = df(prtfile, args.fipname)
    write_dframe_stdout_file(dframe,
                             args.output,
                             index=False,
                             caller_logger=logger)
Exemple #14
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args)
    )
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            print(prettyprint(dframe))
        else:
            logger.warning("No tree data to prettyprint")
    elif args.output:
        write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
Exemple #15
0
def trans_main(args):
    """This is the command line API"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    trans_df = df(
        eclfiles,
        vectors=args.vectors,
        boundaryfilter=args.boundaryfilter,
        onlykdir=args.onlyk,
        onlyijdir=args.onlyij,
        coords=args.coords,
        group=args.group,
        addnnc=args.nnc,
    )

    write_dframe_stdout_file(trans_df, args.output, index=False, caller_logger=logger)
Exemple #16
0
def faults_main(args) -> None:
    """Read from disk and write CSV back to disk"""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    eclfiles = EclFiles(args.DATAFILE)
    if eclfiles:
        deck = eclfiles.get_ecldeck()
    faults_df = df(deck)
    if faults_df.empty:
        logger.warning("Empty FAULT data, not written to disk!")
        return
    write_dframe_stdout_file(
        faults_df,
        args.output,
        index=False,
        caller_logger=logger,
        logstr="Wrote to {}".format(args.output),
    )
Exemple #17
0
def trans_main(args):
    """This is the command line API"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args)
    )
    eclfiles = EclFiles(args.DATAFILE)
    trans_df = df(
        eclfiles,
        vectors=args.vectors,
        boundaryfilter=args.boundaryfilter,
        onlykdir=args.onlyk,
        onlyijdir=args.onlyij,
        coords=args.coords,
        group=args.group,
        addnnc=args.nnc,
    )

    write_dframe_stdout_file(trans_df, args.output, index=False, caller_logger=logger)
Exemple #18
0
def grid_main(args) -> None:
    """This is the command line API"""
    logger = getLogger_ecl2csv(  # pylint: disable=redefined-outer-name
        __name__, vars(args))
    eclfiles = EclFiles(args.DATAFILE)
    grid_df = df(
        eclfiles,
        vectors=args.vectors,
        rstdates=args.rstdates,
        dropconstants=args.dropconstants,
        stackdates=args.stackdates,
    )
    if args.arrow:
        grid_df = _df2pyarrow(grid_df)
    common.write_dframe_stdout_file(grid_df,
                                    args.output,
                                    index=False,
                                    caller_logger=logger)
Exemple #19
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            print(prettyprint(dframe))
        else:
            logger.warning("No tree data to prettyprint")
    elif args.output:
        write_dframe_stdout_file(dframe,
                                 args.output,
                                 index=False,
                                 caller_logger=logger)
Exemple #20
0
def gruptree_main(args) -> None:
    """Entry-point for module, for command line utility."""
    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    if not args.output and not args.prettyprint:
        print("Nothing to do. Set --output or --prettyprint")
        sys.exit(0)
    eclfiles = EclFiles(args.DATAFILE)
    dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate)
    if args.prettyprint:
        if "DATE" in dframe:
            for date in dframe["DATE"].dropna().unique():
                print("Date: " + str(date.astype("M8[D]")))
                for tree in edge_dataframe2dict(dframe[dframe["DATE"] == date]):
                    print(tree_from_dict(tree))
                print("")
        else:
            logger.warning("No tree data to prettyprint")
    if dframe.empty:
        logger.error("Empty GRUPTREE dataframe, not written to disk!")
    elif args.output:
        write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
Exemple #21
0
def test_write_dframe_file(tmpdir):
    """Test that we can write dataframes to files."""
    tmpdir.chdir()
    dframe = pd.DataFrame([{"foo": "bar"}])
    common.write_dframe_stdout_file(dframe, "foo.csv")
    pd.testing.assert_frame_equal(pd.read_csv("foo.csv"), dframe)
Exemple #22
0
def test_write_dframe_stdout(capsys):
    """Test that we can write dataframes to stdout."""
    dframe = pd.DataFrame([{"foo": "bar"}])
    common.write_dframe_stdout_file(dframe, common.MAGIC_STDOUT)
    assert "foo\nbar" in capsys.readouterr().out