def test_repeated_logger_construction(capsys): """If we repeatedly call getLogger(), ensure handlers are not added on top""" logger = ecl2df.getLogger_ecl2csv("nodouble") logger = ecl2df.getLogger_ecl2csv("nodouble") logger.warning("Don't repeat me") captured = capsys.readouterr() assert captured.out.count("Don't repeat me") == 1
def pvt_main(args) -> None: """Entry-point for module, for command line utility for Eclipse to CSV""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) logger.info("Parsed %s", args.DATAFILE) if eclfiles: deck = eclfiles.get_ecldeck() if "TABDIMS" in deck: # Things are easier when a full deck with correct TABDIMS # is supplied: pvt_df = df(deck, keywords=args.keywords) else: # When TABDIMS is not present, the code will try to infer # the number of saturation functions, this is necessarily # more error-prone, and it needs a string as input. stringdeck = Path(args.DATAFILE).read_text(encoding="utf-8") pvt_df = df(stringdeck, keywords=args.keywords) if "PVTNUM" in pvt_df and "KEYWORD" in pvt_df: pvtnums = str(len(pvt_df["PVTNUM"].unique())) keywords = str(pvt_df["KEYWORD"].unique()) else: pvtnums = "-" keywords = "-" common.write_dframe_stdout_file( pvt_df, args.output, index=False, caller_logger=logger, logstr=f"Unique PVTNUMs: {pvtnums}, PVT keywords: {keywords}", )
def pillars_main(args) -> None: """This is the command line API""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) dframe = df( eclfiles, region=args.region, rstdates=args.rstdates, soilcutoff=args.soilcutoff, sgascutoff=args.sgascutoff, swatcutoff=args.swatcutoff, stackdates=args.stackdates, ) groupbies = [] aggregators = { key: AGGREGATORS[key.split("@")[0]] for key in dframe if key.split("@")[0] in AGGREGATORS } if args.region and args.group: groupbies.append(args.region) if args.stackdates and args.group: groupbies.append("DATE") if groupbies: dframe = dframe.groupby(groupbies).agg(aggregators).reset_index() elif args.group: dframe = dframe.drop("PILLAR", axis=1).mean().to_frame().transpose() dframe["PORO"] = dframe["PORV"] / dframe["VOLUME"] common.write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
def satfunc_main(args) -> None: """Entry-point for module, for command line utility""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) if eclfiles: deck = eclfiles.get_ecldeck() if "TABDIMS" in deck: # Things are easier when a full deck with (correct) TABDIMS # is supplied: satfunc_df = df(eclfiles, keywords=args.keywords) else: # This might be an include file for which we have to infer/guess # TABDIMS. Then we send it to df() as a string satfunc_df = df(Path(args.DATAFILE).read_text(encoding="utf-8"), keywords=args.keywords) if "SATNUM" in satfunc_df and "KEYWORD" in satfunc_df: satnums = str(len(satfunc_df["SATNUM"].unique())) keywords = str(satfunc_df["KEYWORD"].unique()) else: satnums = "-" keywords = "-" write_dframe_stdout_file( satfunc_df, args.output, index=False, caller_logger=logger, logstr=f"Unique SATNUMs: {satnums}, saturation keywords: {keywords}", )
def equil_main(args) -> None: """Read from disk and write CSV back to disk""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) if eclfiles: deck = eclfiles.get_ecldeck() if "EQLDIMS" in deck: # Things are easier when a full deck with (correct) EQLDIMS # is supplied: equil_df = df(deck, keywords=args.keywords) else: # This might be an include file for which we have to infer/guess # EQLDIMS. Then we send it to df() as a string equil_df = df(Path(args.DATAFILE).read_text()) if "EQLNUM" in equil_df and "KEYWORD" in equil_df: eqlnums = str(len(equil_df["EQLNUM"].unique())) keywords = str(equil_df["KEYWORD"].unique()) else: eqlnums = "-" keywords = "-" common.write_dframe_stdout_file( equil_df, args.output, index=False, caller_logger=logger, logstr=f"Unique EQLNUMs: {eqlnums}, keywords: {keywords}", )
def summary_main(args) -> None: """Read summary data from disk and write CSV back to disk""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclbase = (args.DATAFILE.replace(".DATA", "").replace(".UNSMRY", "").replace(".SMSPEC", "")) eclfiles = EclFiles(eclbase) sum_df = df( eclfiles, time_index=args.time_index, column_keys=args.column_keys, start_date=args.start_date, end_date=args.end_date, params=args.params, paramfile=args.paramfile, datetime=False, ) if args.arrow: sum_df = _df2pyarrow(sum_df) write_dframe_stdout_file(sum_df, args.output, index=True, caller_logger=logger)
def test_default_logger_levels_and_split(capsys): """Verify that the intended usage of this logger have expected results""" splitlogger = ecl2df.getLogger_ecl2csv("test_levels_split") splitlogger.debug("This DEBUG-text is not to be seen") captured = capsys.readouterr() assert "DEBUG-text" not in captured.out assert "DEBUG-text" not in captured.err splitlogger.info("This INFO-text is not to be seen by default") captured = capsys.readouterr() assert "INFO-text" not in captured.out splitlogger.warning("This WARNING-text is to be seen in stdout") captured = capsys.readouterr() assert "WARNING-text" in captured.out assert "WARNING-text" not in captured.err splitlogger.error("This ERROR-text should only be in stderr") captured = capsys.readouterr() assert "ERROR-text" not in captured.out assert "ERROR-text" in captured.err # If output is written to stdout, all logs should go to stderr: nosplit_logger = ecl2df.getLogger_ecl2csv( "test_levels_nosplit", args_dict={"output": "-", "debug": True} ) nosplit_logger.debug("This DEBUG-text is to be seen in stderr") captured = capsys.readouterr() assert "DEBUG-text" not in captured.out assert "DEBUG-text" in captured.err nosplit_logger.info("This INFO-text is to be seen by in stderr") captured = capsys.readouterr() assert "INFO-text" not in captured.out assert "INFO-text" in captured.err nosplit_logger.warning("This WARNING-text is to be seen in stderr") captured = capsys.readouterr() assert "WARNING-text" not in captured.out assert "WARNING-text" in captured.err nosplit_logger.error("This ERROR-text should only be in stderr") captured = capsys.readouterr() assert "ERROR-text" not in captured.out assert "ERROR-text" in captured.err
def equil_reverse_main(args) -> None: """Entry-point for module, for command line utility for CSV to Eclipse""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) equil_df = pd.read_csv(args.csvfile) logger.info("Parsed %s", args.csvfile) inc_string = df2ecl(equil_df, keywords=args.keywords) common.write_inc_stdout_file(inc_string, args.output)
def wellconnstatus_main(args): """Entry-point for module, for command line utility""" logger = getLogger_ecl2csv(__name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) wellconnstatus_df = df(eclfiles) write_dframe_stdout_file( wellconnstatus_df, args.output, index=False, caller_logger=logger )
def compdat_main(args): """Entry-point for module, for command line utility""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) compdat_df = df(eclfiles, initvectors=args.initvectors) write_dframe_stdout_file(compdat_df, args.output, index=False, caller_logger=logger)
def nnc_main(args) -> None: """Command line access point from main() or from ecl2csv via subparser""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars) write_dframe_stdout_file( nncdf, args.output, index=False, caller_logger=logger, logstr=f"Wrote to {args.output}", ) nncdf.to_csv(args.output, index=False)
def faults_main(args) -> None: """Read from disk and write CSV back to disk""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) if eclfiles: deck = eclfiles.get_ecldeck() faults_df = df(deck) write_dframe_stdout_file( faults_df, args.output, index=False, caller_logger=logger, logstr=f"Wrote to {args.output}", )
def gruptree_main(args) -> None: """Entry-point for module, for command line utility.""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args) ) if not args.output and not args.prettyprint: print("Nothing to do. Set --output or --prettyprint") sys.exit(0) eclfiles = EclFiles(args.DATAFILE) dframe = df(eclfiles.get_ecldeck(), startdate=args.startdate) if args.prettyprint: if "DATE" in dframe: print(prettyprint(dframe)) else: logger.warning("No tree data to prettyprint") elif args.output: write_dframe_stdout_file(dframe, args.output, index=False, caller_logger=logger)
def grid_main(args) -> None: """This is the command line API""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) eclfiles = EclFiles(args.DATAFILE) grid_df = df( eclfiles, vectors=args.vectors, rstdates=args.rstdates, dropconstants=args.dropconstants, stackdates=args.stackdates, ) if args.arrow: grid_df = _df2pyarrow(grid_df) common.write_dframe_stdout_file(grid_df, args.output, index=False, caller_logger=logger)
def trans_main(args): """This is the command line API""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args) ) eclfiles = EclFiles(args.DATAFILE) trans_df = df( eclfiles, vectors=args.vectors, boundaryfilter=args.boundaryfilter, onlykdir=args.onlyk, onlyijdir=args.onlyij, coords=args.coords, group=args.group, addnnc=args.nnc, ) write_dframe_stdout_file(trans_df, args.output, index=False, caller_logger=logger)
def rft_main(args) -> None: """Entry-point for module, for command line utility""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) if args.DATAFILE.endswith(".RFT"): # Support the RFT file as an argument also: eclfiles = EclFiles(args.DATAFILE.replace(".RFT", "") + ".DATA") else: eclfiles = EclFiles(args.DATAFILE) rft_df = df(eclfiles, wellname=args.wellname, date=args.date) if rft_df.empty: if args.wellname is not None or args.date is not None: logger.warning("No data. Check your well and/or date filter") else: logger.error("No data found. Bug?") return write_dframe_stdout_file(rft_df, args.output, index=False, caller_logger=logger)
def summary_reverse_main(args) -> None: """Entry point for usage with "csv2ecl summary" on the command line""" logger = getLogger_ecl2csv( # pylint: disable=redefined-outer-name __name__, vars(args)) summary_df = pd.read_csv(args.csvfile) logger.info("Parsed %s", args.csvfile) outputdir = Path(args.output).parent eclbase = Path(args.output).name # EclSum.fwrite() can only write to current directory: cwd = os.getcwd() eclsum = df2eclsum(summary_df, eclbase) try: os.chdir(outputdir) EclSum.fwrite(eclsum) finally: os.chdir(cwd) logger.info("Wrote to %s and %s", args.output + ".UNSMRY", args.output + ".SMSPEC")