def test_conda_env(tmpdir): conda_prefix = os.path.join(tmpdir, "conda") pmp_share_path = os.path.join(conda_prefix, "share", "pmp") os.makedirs(pmp_share_path) with mock.patch.dict(os.environ, {"CONDA_PREFIX": conda_prefix}): path = resources.resource_path() assert path == os.path.join(tmpdir, "conda", "share", "pmp")
def load_path_as_file_obj(name): """Returns a File object for the file named name.""" egg_pth = resources.resource_path() file_path = os.path.join(egg_pth, name) opened_file = None try: opened_file = open(file_path) except IOError: logging.getLogger("pcmdi_metrics").error( "%s could not be loaded!" % file_path) except Exception: logging.getLogger("pcmdi_metrics").error( "Unexpected error while opening file: " + sys.exc_info()[0]) return opened_file
def test_conda_env_no_exist(resource_filename, getcwd, tmpdir): # Fix issue when tests are ran against an installed package resource_filename.side_effect = Exception() conda_prefix = os.path.join(tmpdir, "conda") getcwd_path = os.path.join(tmpdir, "share", "pmp") os.makedirs(getcwd_path) getcwd.return_value = tmpdir with mock.patch.dict(os.environ, {"CONDA_PREFIX": conda_prefix}): path = resources.resource_path() assert path == os.path.join(tmpdir, "share", "pmp")
def monsoon_wang_runner(args): # args = P.parse_args(sys.argv[1:]) modpath = genutil.StringConstructor(args.test_data_path) modpath.variable = args.modvar outpathdata = args.results_dir if isinstance(args.modnames, str): mods = eval(args.modnames) else: mods = args.modnames json_filename = args.outnamejson if json_filename == "CMIP_MME": json_filename = "/MPI_" + args.mip + "_" + args.experiment # VAR IS FIXED TO BE PRECIP FOR CALCULATING MONSOON PRECIPITATION INDICES var = args.modvar thr = args.threshold sig_digits = ".3f" # Get flag for CMEC output cmec = args.cmec ######################################### # PMP monthly default PR obs cdms2.axis.longitude_aliases.append("longitude_prclim_mpd") cdms2.axis.latitude_aliases.append("latitude_prclim_mpd") fobs = cdms2.open(args.reference_data_path) dobs_orig = fobs(args.obsvar) fobs.close() obsgrid = dobs_orig.getGrid() ######################################## # FCN TO COMPUTE GLOBAL ANNUAL RANGE AND MONSOON PRECIP INDEX annrange_obs, mpi_obs = mpd(dobs_orig) ######################################### # SETUP WHERE TO OUTPUT RESULTING DATA (netcdf) nout = os.path.join(outpathdata, "_".join([args.experiment, args.mip, "wang-monsoon"])) try: os.makedirs(nout) except BaseException: pass # SETUP WHERE TO OUTPUT RESULTS (json) jout = outpathdata try: os.makedirs(nout) except BaseException: pass gmods = [] # "Got" these MODS for i, mod in enumerate(mods): modpath.model = mod for k in modpath.keys(): try: val = getattr(args, k) except Exception: continue if not isinstance(val, (list, tuple)): setattr(modpath, k, val) else: setattr(modpath, k, val[i]) l1 = modpath() if os.path.isfile(l1) is True: gmods.append(mod) if len(gmods) == 0: raise RuntimeError("No model file found!") ######################################### egg_pth = resources.resource_path() globals = {} locals = {} exec( compile( open(os.path.join(egg_pth, "default_regions.py")).read(), os.path.join(egg_pth, "default_regions.py"), "exec", ), globals, locals, ) regions_specs = locals["regions_specs"] doms = ["AllMW", "AllM", "NAMM", "SAMM", "NAFM", "SAFM", "ASM", "AUSM"] mpi_stats_dic = {} for i, mod in enumerate(gmods): modpath.model = mod for k in modpath.keys(): try: val = getattr(args, k) except Exception: continue if not isinstance(val, (list, tuple)): setattr(modpath, k, val) else: setattr(modpath, k, val[i]) modelFile = modpath() mpi_stats_dic[mod] = {} print( "******************************************************************************************" ) print(modelFile) f = cdms2.open(modelFile) d_orig = f(var) annrange_mod, mpi_mod = mpd(d_orig) annrange_mod = annrange_mod.regrid(obsgrid, regridTool="regrid2", regridMethod="conserve", mkCyclic=True) mpi_mod = mpi_mod.regrid(obsgrid, regridTool="regrid2", regridMethod="conserve", mkCyclic=True) for dom in doms: mpi_stats_dic[mod][dom] = {} reg_sel = regions_specs[dom]["domain"] mpi_obs_reg = mpi_obs(reg_sel) mpi_obs_reg_sd = float(statistics.std(mpi_obs_reg, axis="xy")) mpi_mod_reg = mpi_mod(reg_sel) cor = float( statistics.correlation(mpi_mod_reg, mpi_obs_reg, axis="xy")) rms = float(statistics.rms(mpi_mod_reg, mpi_obs_reg, axis="xy")) rmsn = rms / mpi_obs_reg_sd # DOMAIN SELECTED FROM GLOBAL ANNUAL RANGE FOR MODS AND OBS annrange_mod_dom = annrange_mod(reg_sel) annrange_obs_dom = annrange_obs(reg_sel) # SKILL SCORES # HIT/(HIT + MISSED + FALSE ALARMS) hit, missed, falarm, score, hitmap, missmap, falarmmap = mpi_skill_scores( annrange_mod_dom, annrange_obs_dom, thr) # POPULATE DICTIONARY FOR JSON FILES mpi_stats_dic[mod][dom] = {} mpi_stats_dic[mod][dom]["cor"] = format(cor, sig_digits) mpi_stats_dic[mod][dom]["rmsn"] = format(rmsn, sig_digits) mpi_stats_dic[mod][dom]["threat_score"] = format(score, sig_digits) # SAVE ANNRANGE AND HIT MISS AND FALSE ALARM FOR EACH MOD DOM fm = os.path.join(nout, "_".join([mod, dom, "wang-monsoon.nc"])) g = cdms2.open(fm, "w") g.write(annrange_mod_dom) g.write(hitmap, dtype=numpy.int32) g.write(missmap, dtype=numpy.int32) g.write(falarmmap, dtype=numpy.int32) g.close() f.close() # OUTPUT METRICS TO JSON FILE OUT = pcmdi_metrics.io.base.Base(os.path.abspath(jout), json_filename) disclaimer = open(os.path.join(egg_pth, "disclaimer.txt")).read() metrics_dictionary = collections.OrderedDict() metrics_dictionary["DISCLAIMER"] = disclaimer metrics_dictionary["REFERENCE"] = ( "The statistics in this file are based on" + " Wang, B., Kim, HJ., Kikuchi, K. et al. " + "Clim Dyn (2011) 37: 941. doi:10.1007/s00382-010-0877-0") metrics_dictionary["RESULTS"] = mpi_stats_dic # collections.OrderedDict() OUT.var = var OUT.write( metrics_dictionary, json_structure=["model", "domain", "statistic"], indent=4, separators=(",", ": "), ) if cmec: print("Writing cmec file") OUT.write_cmec(indent=4, separators=(",", ": "))
def main(): def compute(param): template = populateStringConstructor(args.filename_template, args) template.variable = param.varname template.month = param.monthname fnameRoot = param.fileName reverted = template.reverse(os.path.basename(fnameRoot)) model = reverted["model"] print("Specifying latitude / longitude domain of interest ...") datanameID = "diurnalstd" # Short ID name of output data latrange = (param.args.lat1, param.args.lat2) lonrange = (param.args.lon1, param.args.lon2) region = cdutil.region.domain(latitude=latrange, longitude=lonrange) if param.args.region_name == "": region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange)) else: region_name = param.args.region_name print("Reading %s ..." % fnameRoot) reverted = template.reverse(os.path.basename(fnameRoot)) model = reverted["model"] try: f = cdms2.open(fnameRoot) x = f(datanameID, region) units = x.units print(" Shape =", x.shape) print("Finding RMS area-average ...") x = x * x x = cdutil.averager(x, weights="unweighted") x = cdutil.averager(x, axis="xy") x = numpy.ma.sqrt(x) print( "For %8s in %s, average variance of hourly values = (%5.2f %s)^2" % (model, monthname, x, units)) f.close() except Exception as err: print("Failed model %s with error: %s" % (model, err)) x = 1.0e20 return model, region, {region_name: x} P.add_argument( "-j", "--outnamejson", type=str, dest="outnamejson", default="pr_%(month)_%(firstyear)-%(lastyear)_std_of_hourlymeans.json", help="Output name for jsons", ) P.add_argument("--lat1", type=float, default=-50.0, help="First latitude") P.add_argument("--lat2", type=float, default=50.0, help="Last latitude") P.add_argument("--lon1", type=float, default=0.0, help="First longitude") P.add_argument("--lon2", type=float, default=360.0, help="Last longitude") P.add_argument( "--region_name", type=str, default="TRMM", help="name for the region of interest", ) P.add_argument( "-t", "--filename_template", default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_std.nc", ) P.add_argument("--model", default="*") P.add_argument( "--cmec", dest="cmec", action="store_true", default=False, help="Use to save metrics in CMEC JSON format", ) P.add_argument( "--no_cmec", dest="cmec", action="store_false", default=False, help="Use to disable saving metrics in CMEC JSON format", ) args = P.get_parameter() month = args.month monthname = monthname_d[month] startyear = args.firstyear # noqa: F841 finalyear = args.lastyear # noqa: F841 cmec = args.cmec template = populateStringConstructor(args.filename_template, args) template.month = monthname print("TEMPLATE NAME:", template()) print("Specifying latitude / longitude domain of interest ...") # TRMM (observed) domain: latrange = (args.lat1, args.lat2) lonrange = (args.lon1, args.lon2) region = cdutil.region.domain(latitude=latrange, longitude=lonrange) # Amazon basin: # latrange = (-15.0, -5.0) # lonrange = (285.0, 295.0) print("Preparing to write output to JSON file ...") if not os.path.exists(args.results_dir): os.makedirs(args.results_dir) jsonFile = populateStringConstructor(args.outnamejson, args) jsonFile.month = monthname jsonname = os.path.join(os.path.abspath(args.results_dir), jsonFile()) if not os.path.exists(jsonname) or args.append is False: print("Initializing dictionary of statistical results ...") stats_dic = {} metrics_dictionary = collections.OrderedDict() else: with open(jsonname) as f: metrics_dictionary = json.load(f) stats_dic = metrics_dictionary["RESULTS"] OUT = pcmdi_metrics.io.base.Base(os.path.abspath(args.results_dir), jsonFile()) egg_pth = resources.resource_path() disclaimer = open(os.path.join(egg_pth, "disclaimer.txt")).read() metrics_dictionary["DISCLAIMER"] = disclaimer metrics_dictionary["REFERENCE"] = ( "The statistics in this file are based on Trenberth, Zhang & Gehne, " "J Hydromet. 2017") files = glob.glob(os.path.join(args.modpath, template())) print(files) params = [INPUT(args, name, template) for name in files] print("PARAMS:", params) results = cdp.cdp_run.multiprocess(compute, params, num_workers=args.num_workers) for r in results: m, region, res = r if r[0] not in stats_dic: stats_dic[m] = res else: stats_dic[m].update(res) print("Writing output to JSON file ...") metrics_dictionary["RESULTS"] = stats_dic rgmsk = metrics_dictionary.get("RegionalMasking", {}) nm = list(res.keys())[0] region.id = nm rgmsk[nm] = {"id": nm, "domain": region} metrics_dictionary["RegionalMasking"] = rgmsk OUT.write( metrics_dictionary, json_structure=["model", "domain"], indent=4, separators=(",", ": "), ) if cmec: print("Writing cmec file") OUT.write_cmec(indent=4, separators=(",", ": ")) print("done")
tree, variability_metrics_to_json, write_nc_output, ) # To avoid below error # OpenBLAS blas_thread_init: pthread_create failed for thread XX of 96: Resource temporarily unavailable os.environ["OPENBLAS_NUM_THREADS"] = "1" # Must be done before any CDAT library is called. # https://github.com/CDAT/cdat/issues/2213 if "UVCDAT_ANONYMOUS_LOG" not in os.environ: os.environ["UVCDAT_ANONYMOUS_LOG"] = "no" regions_specs = {} egg_pth = resources.resource_path() exec( compile( open(os.path.join(egg_pth, "default_regions.py")).read(), os.path.join(egg_pth, "default_regions.py"), "exec", )) # ================================================= # Collect user defined options # ------------------------------------------------- P = pcmdi_metrics.driver.pmp_parser.PMPParser( description="Runs PCMDI Modes of Variability Computations", formatter_class=RawTextHelpFormatter, ) P = AddParserArgument(P)
def test_pkg_resources(resource_filename, parse, tmpdir): resource_filename.return_value = str(tmpdir) path = resources.resource_path() assert path == str(tmpdir)
def main(): P.add_argument( "-j", "--outnamejson", type=str, dest="outnamejson", default="pr_%(month)_%(firstyear)-%(lastyear)_savg_DiurnalFourier.json", help="Output name for jsons", ) P.add_argument("--lat1", type=float, default=-50.0, help="First latitude") P.add_argument("--lat2", type=float, default=50.0, help="Last latitude") P.add_argument("--lon1", type=float, default=0.0, help="First longitude") P.add_argument("--lon2", type=float, default=360.0, help="Last longitude") P.add_argument( "--region_name", type=str, default="TRMM", help="name for the region of interest", ) P.add_argument( "-t", "--filename_template", default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_S.nc", help="template for getting at amplitude files", ) P.add_argument( "--filename_template_tS", default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_tS.nc", help="template for phase files", ) P.add_argument( "--filename_template_sftlf", default="cmip5.%(model).%(experiment).r0i0p0.fx.atm.fx.sftlf.%(version).latestX.xml", help="template for sftlf file names", ) P.add_argument("--model", default="*") P.add_argument( "--cmec", dest="cmec", action="store_true", default=False, help="Use to save metrics in CMEC JSON format", ) P.add_argument( "--no_cmec", dest="cmec", action="store_false", default=False, help="Use to disable saving metrics in CMEC JSON format", ) args = P.get_parameter() month = args.month monthname = monthname_d[month] startyear = args.firstyear finalyear = args.lastyear years = "%s-%s" % (startyear, finalyear) # noqa: F841 cmec = args.cmec print("Specifying latitude / longitude domain of interest ...") # TRMM (observed) domain: latrange = (args.lat1, args.lat2) lonrange = (args.lon1, args.lon2) region = cdutil.region.domain(latitude=latrange, longitude=lonrange) if args.region_name == "": region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange)) else: region_name = args.region_name # Amazon basin: # latrange = (-15.0, -5.0) # lonrange = (285.0, 295.0) # Functions to convert phase between angle-in-radians and hours, for # either a 12- or 24-hour clock, i.e. for clocktype = 12 or 24: def hrs_to_rad(hours, clocktype): import MV2 return 2 * MV2.pi * hours / clocktype def rad_to_hrs(phase, clocktype): import MV2 return phase * clocktype / 2 / MV2.pi def vectoravg(hr1, hr2, clocktype): "Function to test vector-averaging of two time values:" import MV2 sin_avg = ( MV2.sin(hrs_to_rad(hr1, clocktype)) + MV2.sin(hrs_to_rad(hr2, clocktype)) ) / 2 cos_avg = ( MV2.cos(hrs_to_rad(hr1, clocktype)) + MV2.cos(hrs_to_rad(hr2, clocktype)) ) / 2 return rad_to_hrs(MV2.arctan2(sin_avg, cos_avg), clocktype) def spacevavg(tvarb1, tvarb2, sftlf, model): """ Given a "root filename" and month/year specifications, vector-average lat/lon arrays in an (amplitude, phase) pair of input data files. Each input data file contains diurnal (24h), semidiurnal (12h) and terdiurnal (8h) Fourier harmonic components of the composite mean day/night cycle. Vector-averaging means we consider the input data to be readings on an 8-, 12- or 24-hour clock and separately average the Cartesian components (called "cosine" and "sine" below). Then the averaged components are combined back into amplitude and phase values and returned. Space-averaging is done globally, as well as separately for land and ocean areas. """ glolf = cdutil.averager(sftlf, axis="xy") print(" Global mean land fraction = %5.3f" % glolf) outD = {} # Output dictionary to be returned by this function harmonics = [1, 2, 3] for harmonic in harmonics: ampl = tvarb1[harmonic - 1] tmax = tvarb2[harmonic - 1] # print ampl[:, :] # print tmax[:, :] clocktype = 24 / harmonic cosine = MV2.cos(hrs_to_rad(tmax, clocktype)) * ampl # X-component sine = MV2.sin(hrs_to_rad(tmax, clocktype)) * ampl # Y-component print("Area-averaging globally, over land only, and over ocean only ...") # Average Cartesian components ... cos_avg_glo = cdutil.averager(cosine, axis="xy") sin_avg_glo = cdutil.averager(sine, axis="xy") cos_avg_lnd = cdutil.averager(cosine * sftlf, axis="xy") sin_avg_lnd = cdutil.averager(sine * sftlf, axis="xy") cos_avg_ocn = cos_avg_glo - cos_avg_lnd sin_avg_ocn = sin_avg_glo - sin_avg_lnd # ... normalized by land-sea fraction: cos_avg_lnd /= glolf sin_avg_lnd /= glolf cos_avg_ocn /= 1 - glolf sin_avg_ocn /= 1 - glolf # Amplitude and phase: # * 86400 Convert kg/m2/s -> mm/d? amp_avg_glo = MV2.sqrt(sin_avg_glo ** 2 + cos_avg_glo ** 2) # * 86400 Convert kg/m2/s -> mm/d? amp_avg_lnd = MV2.sqrt(sin_avg_lnd ** 2 + cos_avg_lnd ** 2) # * 86400 Convert kg/m2/s -> mm/d? amp_avg_ocn = MV2.sqrt(sin_avg_ocn ** 2 + cos_avg_ocn ** 2) pha_avg_glo = MV2.remainder( rad_to_hrs(MV2.arctan2(sin_avg_glo, cos_avg_glo), clocktype), clocktype ) pha_avg_lnd = MV2.remainder( rad_to_hrs(MV2.arctan2(sin_avg_lnd, cos_avg_lnd), clocktype), clocktype ) pha_avg_ocn = MV2.remainder( rad_to_hrs(MV2.arctan2(sin_avg_ocn, cos_avg_ocn), clocktype), clocktype ) if "CMCC-CM" in model: # print '** Correcting erroneous time recording in ', rootfname pha_avg_lnd -= 3.0 pha_avg_lnd = MV2.remainder(pha_avg_lnd, clocktype) elif "BNU-ESM" in model or "CCSM4" in model or "CNRM-CM5" in model: # print '** Correcting erroneous time recording in ', rootfname pha_avg_lnd -= 1.5 pha_avg_lnd = MV2.remainder(pha_avg_lnd, clocktype) print( "Converting singleton transient variables to plain floating-point numbers ..." ) amp_avg_glo = float(amp_avg_glo) pha_avg_glo = float(pha_avg_glo) amp_avg_lnd = float(amp_avg_lnd) pha_avg_lnd = float(pha_avg_lnd) amp_avg_ocn = float(amp_avg_ocn) pha_avg_ocn = float(pha_avg_ocn) print( "%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged globally" % (monthname, harmonic, amp_avg_glo, pha_avg_glo) ) print( "%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged over land" % (monthname, harmonic, amp_avg_lnd, pha_avg_lnd) ) print( "%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged over ocean" % (monthname, harmonic, amp_avg_ocn, pha_avg_ocn) ) # Sub-dictionaries, one for each harmonic component: outD["harmonic" + str(harmonic)] = {} outD["harmonic" + str(harmonic)]["amp_avg_lnd"] = amp_avg_lnd outD["harmonic" + str(harmonic)]["pha_avg_lnd"] = pha_avg_lnd outD["harmonic" + str(harmonic)]["amp_avg_ocn"] = amp_avg_ocn outD["harmonic" + str(harmonic)]["pha_avg_ocn"] = pha_avg_ocn return outD print("Preparing to write output to JSON file ...") if not os.path.exists(args.results_dir): os.makedirs(args.results_dir) jsonFile = populateStringConstructor(args.outnamejson, args) jsonFile.month = monthname jsonname = os.path.join(os.path.abspath(args.results_dir), jsonFile()) if not os.path.exists(jsonname) or args.append is False: print("Initializing dictionary of statistical results ...") stats_dic = {} metrics_dictionary = collections.OrderedDict() else: with open(jsonname) as f: metrics_dictionary = json.load(f) stats_dic = metrics_dictionary["RESULTS"] OUT = pcmdi_metrics.io.base.Base( os.path.abspath(args.results_dir), os.path.basename(jsonname) ) egg_pth = resources.resource_path() disclaimer = open(os.path.join(egg_pth, "disclaimer.txt")).read() metrics_dictionary["DISCLAIMER"] = disclaimer metrics_dictionary[ "REFERENCE" ] = "The statistics in this file are based on Covey et al., J Climate 2016" # Accumulate output from each model (or observed) data source in the # Python dictionary. template_S = populateStringConstructor(args.filename_template, args) template_S.month = monthname template_tS = populateStringConstructor(args.filename_template_tS, args) template_tS.month = monthname template_sftlf = populateStringConstructor(args.filename_template_sftlf, args) template_sftlf.month = monthname print("TEMPLATE:", template_S()) files_S = glob.glob(os.path.join(args.modpath, template_S())) print(files_S) for file_S in files_S: print("Reading Amplitude from %s ..." % file_S) reverted = template_S.reverse(os.path.basename(file_S)) model = reverted["model"] try: template_tS.model = model template_sftlf.model = model S = cdms2.open(file_S)("S", region) print( "Reading Phase from %s ..." % os.path.join(args.modpath, template_tS()) ) tS = cdms2.open(os.path.join(args.modpath, template_tS()))("tS", region) print( "Reading sftlf from %s ..." % os.path.join(args.modpath, template_sftlf()) ) try: sftlf_fnm = glob.glob(os.path.join(args.modpath, template_sftlf()))[0] sftlf = cdms2.open(sftlf_fnm)("sftlf", region) / 100.0 except BaseException as err: print("Failed reading sftlf from file (error was: %s)" % err) print("Creating one for you") sftlf = cdutil.generateLandSeaMask(S.getGrid()) if model not in stats_dic: stats_dic[model] = {region_name: spacevavg(S, tS, sftlf, model)} else: stats_dic[model].update({region_name: spacevavg(S, tS, sftlf, model)}) print(stats_dic) except Exception as err: print("Failed for model %s with error %s" % (model, err)) # Write output to JSON file. metrics_dictionary["RESULTS"] = stats_dic rgmsk = metrics_dictionary.get("RegionalMasking", {}) nm = region_name region.id = nm rgmsk[nm] = {"id": nm, "domain": region} metrics_dictionary["RegionalMasking"] = rgmsk OUT.write( metrics_dictionary, json_structure=["model", "domain", "harmonic", "statistic"], indent=4, separators=(",", ": "), ) if cmec: print("Writing cmec file") OUT.write_cmec(indent=4, separators=(",", ": ")) print("done")
def path_to_default_args(): """Returns path to Default Common Input Arguments in package egg.""" egg_pth = resources.resource_path() file_path = os.path.join(egg_pth, "DefArgsCIA.json") return file_path