def compute(param):
    template = populateStringConstructor(args.filename_template, args)
    template.variable = param.varname
    template.month = param.monthname
    fnameRoot = param.fileName
    reverted = template.reverse(os.path.basename(fnameRoot))
    model = reverted["model"]
    print('Specifying latitude / longitude domain of interest ...')
    datanameID = 'diurnalstd'  # Short ID name of output data
    latrange = (param.args.lat1, param.args.lat2)
    lonrange = (param.args.lon1, param.args.lon2)
    region = cdutil.region.domain(latitude=latrange, longitude=lonrange)
    if param.args.region_name == "":
        region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
    else:
        region_name = param.args.region_name
    print('Reading %s ...' % fnameRoot)
    reverted = template.reverse(os.path.basename(fnameRoot))
    model = reverted["model"]
    try:
        f = cdms2.open(fnameRoot)
        x = f(datanameID, region)
        units = x.units
        print('  Shape =', x.shape)
        print('Finding RMS area-average ...')
        x = x * x
        x = cdutil.averager(x, weights='unweighted')
        x = cdutil.averager(x, axis='xy')
        x = numpy.ma.sqrt(x)
        print('For %8s in %s, average variance of hourly values = (%5.2f %s)^2' % (model, monthname, x, units))
        f.close()
    except Exception as err:
        print("Failed model %s with error: %s" % (model, err))
        x = 1.e20
    return model, region, {region_name: x}
Ejemplo n.º 2
0
 def compute(param):
     template = populateStringConstructor(args.filename_template, args)
     template.variable = param.varname
     template.month = param.monthname
     fnameRoot = param.fileName
     reverted = template.reverse(os.path.basename(fnameRoot))
     model = reverted["model"]
     print('Specifying latitude / longitude domain of interest ...')
     datanameID = 'diurnalstd'  # Short ID name of output data
     latrange = (param.args.lat1, param.args.lat2)
     lonrange = (param.args.lon1, param.args.lon2)
     region = cdutil.region.domain(latitude=latrange, longitude=lonrange)
     if param.args.region_name == "":
         region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
     else:
         region_name = param.args.region_name
     print('Reading %s ...' % fnameRoot)
     reverted = template.reverse(os.path.basename(fnameRoot))
     model = reverted["model"]
     try:
         f = cdms2.open(fnameRoot)
         x = f(datanameID, region)
         units = x.units
         print('  Shape =', x.shape)
         print('Finding RMS area-average ...')
         x = x * x
         x = cdutil.averager(x, weights='unweighted')
         x = cdutil.averager(x, axis='xy')
         x = numpy.ma.sqrt(x)
         print('For %8s in %s, average variance of hourly values = (%5.2f %s)^2' % (model, monthname, x, units))
         f.close()
     except Exception as err:
         print("Failed model %s with error: %s" % (model, err))
         x = 1.e20
     return model, region, {region_name: x}
Ejemplo n.º 3
0
    def compute(param):
        template = populateStringConstructor(args.filename_template, args)
        template.variable = param.varname
        template.month = param.monthname
        fnameRoot = param.fileName
        reverted = template.reverse(os.path.basename(fnameRoot))
        model = reverted["model"]
        print('Specifying latitude / longitude domain of interest ...')
        datanameID = 'diurnalmean'  # Short ID name of output data
        latrange = (param.args.lat1, param.args.lat2)
        lonrange = (param.args.lon1, param.args.lon2)
        region = cdutil.region.domain(latitude=latrange, longitude=lonrange)
        if param.args.region_name == "":
            region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
        else:
            region_name = param.args.region_name
        print('Reading %s ...' % fnameRoot)
        try:
            f = cdms2.open(fnameRoot)
            x = f(datanameID, region)
            units = x.units
            print('  Shape =', x.shape)

            print(
                'Finding standard deviation over first dimension (time of day) ...'
            )
            x = genutil.statistics.std(x)
            print('  Shape =', x.shape)

            print('Finding r.m.s. average over 2nd-3rd dimensions (area) ...')
            x = x * x
            x = cdutil.averager(x, axis='xy')
            x = cdms2.MV2.sqrt(x)

            print(
                'For %8s in %s, average variance of hourly values = (%5.2f %s)^2'
                % (model, monthname, x, units))
            f.close()
        except Exception as err:
            print("Failed model %s with error" % (err))
            x = 1.e20
        return model, region, {region_name: float(x)}
def compute(param):
    template = populateStringConstructor(args.filename_template, args)
    template.variable = param.varname
    template.month = param.monthname
    fnameRoot = param.fileName
    reverted = template.reverse(os.path.basename(fnameRoot))
    model = reverted["model"]
    print('Specifying latitude / longitude domain of interest ...')
    datanameID = 'diurnalmean'  # Short ID name of output data
    latrange = (param.args.lat1, param.args.lat2)
    lonrange = (param.args.lon1, param.args.lon2)
    region = cdutil.region.domain(latitude=latrange, longitude=lonrange)
    if param.args.region_name == "":
        region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
    else:
        region_name = param.args.region_name
    print('Reading %s ...' % fnameRoot)
    try:
        f = cdms2.open(fnameRoot)
        x = f(datanameID, region)
        units = x.units
        print('  Shape =', x.shape)

        print('Finding standard deviation over first dimension (time of day) ...')
        x = genutil.statistics.std(x)
        print('  Shape =', x.shape)

        print('Finding r.m.s. average over 2nd-3rd dimensions (area) ...')
        x = x * x
        x = cdutil.averager(x, axis='xy')
        x = cdms2.MV2.sqrt(x)

        print('For %8s in %s, average variance of hourly values = (%5.2f %s)^2' % (model, monthname, x, units))
        f.close()
    except Exception as err:
        print("Failed model %s with error" % (err))
        x = 1.e20
    return model, region, {region_name: float(x)}
Ejemplo n.º 5
0
def main():
    P.add_argument(
        "-j",
        "--outnamejson",
        type=str,
        dest='outnamejson',
        default='pr_%(month)_%(firstyear)-%(lastyear)_savg_DiurnalFourier.json',
        help="Output name for jsons")

    P.add_argument("--lat1", type=float, default=-50., help="First latitude")
    P.add_argument("--lat2", type=float, default=50., help="Last latitude")
    P.add_argument("--lon1", type=float, default=0., help="First longitude")
    P.add_argument("--lon2", type=float, default=360., help="Last longitude")
    P.add_argument("--region_name",
                   type=str,
                   default="TRMM",
                   help="name for the region of interest")

    P.add_argument(
        "-t",
        "--filename_template",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_S.nc",
        help="template for getting at amplitude files")
    P.add_argument(
        "--filename_template_tS",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_tS.nc",
        help="template for phase files")
    P.add_argument(
        "--filename_template_sftlf",
        default=
        "cmip5.%(model).%(experiment).r0i0p0.fx.atm.fx.sftlf.%(version).latestX.xml",
        help="template for sftlf file names")
    P.add_argument("--model", default="*")

    args = P.get_parameter()
    month = args.month
    monthname = monthname_d[month]
    startyear = args.firstyear
    finalyear = args.lastyear
    years = "%s-%s" % (startyear, finalyear)  # noqa: F841

    print('Specifying latitude / longitude domain of interest ...')
    # TRMM (observed) domain:
    latrange = (args.lat1, args.lat2)
    lonrange = (args.lon1, args.lon2)

    region = cdutil.region.domain(latitude=latrange, longitude=lonrange)

    if args.region_name == "":
        region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
    else:
        region_name = args.region_name

    # Amazon basin:
    # latrange = (-15.0,  -5.0)
    # lonrange = (285.0, 295.0)

    # Functions to convert phase between angle-in-radians and hours, for
    # either a 12- or 24-hour clock, i.e. for clocktype = 12 or 24:

    def hrs_to_rad(hours, clocktype):
        import MV2
        return 2 * MV2.pi * hours / clocktype

    def rad_to_hrs(phase, clocktype):
        import MV2
        return phase * clocktype / 2 / MV2.pi

    def vectoravg(hr1, hr2, clocktype):
        'Function to test vector-averaging of two time values:'
        import MV2

        sin_avg = (MV2.sin(hrs_to_rad(hr1, clocktype)) +
                   MV2.sin(hrs_to_rad(hr2, clocktype))) / 2
        cos_avg = (MV2.cos(hrs_to_rad(hr1, clocktype)) +
                   MV2.cos(hrs_to_rad(hr2, clocktype))) / 2
        return rad_to_hrs(MV2.arctan2(sin_avg, cos_avg), clocktype)

    def spacevavg(tvarb1, tvarb2, sftlf, model):
        '''
        Given a "root filename" and month/year specifications, vector-average lat/lon arrays in an (amplitude, phase)
        pair of input data files. Each input data file contains diurnal (24h), semidiurnal (12h) and terdiurnal (8h)
        Fourier harmonic components of the composite mean day/night cycle.

        Vector-averaging means we consider the input data to be readings on an 8-, 12- or 24-hour clock and separately
        average the Cartesian components (called "cosine" and "sine" below). Then the averaged components are combined
        back into amplitude and phase values and returned.

        Space-averaging is done globally, as well as separately for land and ocean areas.
        '''

        glolf = cdutil.averager(sftlf, axis='xy')
        print('  Global mean land fraction = %5.3f' % glolf)
        outD = {}  # Output dictionary to be returned by this function
        harmonics = [1, 2, 3]
        for harmonic in harmonics:
            ampl = tvarb1[harmonic - 1]
            tmax = tvarb2[harmonic - 1]
            # print ampl[:, :]
            # print tmax[:, :]
            clocktype = 24 / harmonic
            cosine = MV2.cos(hrs_to_rad(tmax, clocktype)) * ampl  # X-component
            sine = MV2.sin(hrs_to_rad(tmax, clocktype)) * ampl  # Y-component

            print(
                'Area-averaging globally, over land only, and over ocean only ...'
            )
            # Average Cartesian components ...
            cos_avg_glo = cdutil.averager(cosine, axis='xy')
            sin_avg_glo = cdutil.averager(sine, axis='xy')
            cos_avg_lnd = cdutil.averager(cosine * sftlf, axis='xy')
            sin_avg_lnd = cdutil.averager(sine * sftlf, axis='xy')
            cos_avg_ocn = cos_avg_glo - cos_avg_lnd
            sin_avg_ocn = sin_avg_glo - sin_avg_lnd
            # ... normalized by land-sea fraction:
            cos_avg_lnd /= glolf
            sin_avg_lnd /= glolf
            cos_avg_ocn /= (1 - glolf)
            sin_avg_ocn /= (1 - glolf)
            # Amplitude and phase:
            # * 86400 Convert kg/m2/s -> mm/d?
            amp_avg_glo = MV2.sqrt(sin_avg_glo**2 + cos_avg_glo**2)
            # * 86400 Convert kg/m2/s -> mm/d?
            amp_avg_lnd = MV2.sqrt(sin_avg_lnd**2 + cos_avg_lnd**2)
            # * 86400 Convert kg/m2/s -> mm/d?
            amp_avg_ocn = MV2.sqrt(sin_avg_ocn**2 + cos_avg_ocn**2)
            pha_avg_glo = MV2.remainder(
                rad_to_hrs(MV2.arctan2(sin_avg_glo, cos_avg_glo), clocktype),
                clocktype)
            pha_avg_lnd = MV2.remainder(
                rad_to_hrs(MV2.arctan2(sin_avg_lnd, cos_avg_lnd), clocktype),
                clocktype)
            pha_avg_ocn = MV2.remainder(
                rad_to_hrs(MV2.arctan2(sin_avg_ocn, cos_avg_ocn), clocktype),
                clocktype)
            if 'CMCC-CM' in model:
                # print '** Correcting erroneous time recording in ', rootfname
                pha_avg_lnd -= 3.0
                pha_avg_lnd = MV2.remainder(pha_avg_lnd, clocktype)
            elif 'BNU-ESM' in model or 'CCSM4' in model or 'CNRM-CM5' in model:
                # print '** Correcting erroneous time recording in ', rootfname
                pha_avg_lnd -= 1.5
                pha_avg_lnd = MV2.remainder(pha_avg_lnd, clocktype)
            print(
                'Converting singleton transient variables to plain floating-point numbers ...'
            )
            amp_avg_glo = float(amp_avg_glo)
            pha_avg_glo = float(pha_avg_glo)
            amp_avg_lnd = float(amp_avg_lnd)
            pha_avg_lnd = float(pha_avg_lnd)
            amp_avg_ocn = float(amp_avg_ocn)
            pha_avg_ocn = float(pha_avg_ocn)
            print(
                '%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged globally'
                % (monthname, harmonic, amp_avg_glo, pha_avg_glo))
            print(
                '%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged over land'
                % (monthname, harmonic, amp_avg_lnd, pha_avg_lnd))
            print(
                '%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged over ocean'
                % (monthname, harmonic, amp_avg_ocn, pha_avg_ocn))
            # Sub-dictionaries, one for each harmonic component:
            outD['harmonic' + str(harmonic)] = {}
            outD['harmonic' + str(harmonic)]['amp_avg_lnd'] = amp_avg_lnd
            outD['harmonic' + str(harmonic)]['pha_avg_lnd'] = pha_avg_lnd
            outD['harmonic' + str(harmonic)]['amp_avg_ocn'] = amp_avg_ocn
            outD['harmonic' + str(harmonic)]['pha_avg_ocn'] = pha_avg_ocn
        return outD

    print('Preparing to write output to JSON file ...')
    if not os.path.exists(args.results_dir):
        os.makedirs(args.results_dir)
    jsonFile = populateStringConstructor(args.outnamejson, args)
    jsonFile.month = monthname

    jsonname = os.path.join(os.path.abspath(args.results_dir), jsonFile())

    if not os.path.exists(jsonname) or args.append is False:
        print('Initializing dictionary of statistical results ...')
        stats_dic = {}
        metrics_dictionary = collections.OrderedDict()
    else:
        with open(jsonname) as f:
            metrics_dictionary = json.load(f)
            stats_dic = metrics_dictionary["RESULTS"]

    OUT = pcmdi_metrics.io.base.Base(os.path.abspath(args.results_dir),
                                     os.path.basename(jsonname))
    try:
        egg_pth = pkg_resources.resource_filename(
            pkg_resources.Requirement.parse("pcmdi_metrics"), "share/pmp")
    except Exception:
        # python 2 seems to fail when ran in home directory of source?
        egg_pth = os.path.join(os.getcwd(), "share", "pmp")
    disclaimer = open(os.path.join(egg_pth, "disclaimer.txt")).read()
    metrics_dictionary["DISCLAIMER"] = disclaimer
    metrics_dictionary[
        "REFERENCE"] = "The statistics in this file are based on Covey et al., J Climate 2016"

    # Accumulate output from each model (or observed) data source in the
    # Python dictionary.
    template_S = populateStringConstructor(args.filename_template, args)
    template_S.month = monthname
    template_tS = populateStringConstructor(args.filename_template_tS, args)
    template_tS.month = monthname
    template_sftlf = populateStringConstructor(args.filename_template_sftlf,
                                               args)
    template_sftlf.month = monthname

    print("TEMPLATE:", template_S())
    files_S = glob.glob(os.path.join(args.modpath, template_S()))
    print(files_S)
    for file_S in files_S:
        print('Reading Amplitude from %s ...' % file_S)
        reverted = template_S.reverse(os.path.basename(file_S))
        model = reverted["model"]
        try:
            template_tS.model = model
            template_sftlf.model = model
            S = cdms2.open(file_S)("S", region)
            print('Reading Phase from %s ...' %
                  os.path.join(args.modpath, template_tS()))
            tS = cdms2.open(os.path.join(args.modpath, template_tS()))("tS",
                                                                       region)
            print('Reading sftlf from %s ...' %
                  os.path.join(args.modpath, template_sftlf()))
            try:
                sftlf_fnm = glob.glob(
                    os.path.join(args.modpath, template_sftlf()))[0]
                sftlf = cdms2.open(sftlf_fnm)("sftlf", region) / 100.
            except BaseException as err:
                print('Failed reading sftlf from file (error was: %s)' % err)
                print('Creating one for you')
                sftlf = cdutil.generateLandSeaMask(S.getGrid())

            if model not in stats_dic:
                stats_dic[model] = {
                    region_name: spacevavg(S, tS, sftlf, model)
                }
            else:
                stats_dic[model].update(
                    {region_name: spacevavg(S, tS, sftlf, model)})
            print(stats_dic)
        except Exception as err:
            print("Failed for model %s with error %s" % (model, err))

    # Write output to JSON file.
    metrics_dictionary["RESULTS"] = stats_dic
    rgmsk = metrics_dictionary.get("RegionalMasking", {})
    nm = region_name
    region.id = nm
    rgmsk[nm] = {"id": nm, "domain": region}
    metrics_dictionary["RegionalMasking"] = rgmsk
    OUT.write(metrics_dictionary,
              json_structure=["model", "domain", "harmonic", "statistic"],
              indent=4,
              separators=(',', ': '))
    print('done')
               default="TRMM",
               help="name for the region of interest")

P.add_argument(
    "-t",
    "--filename_template",
    default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_avg.nc")
P.add_argument("--model", default="*")

args = P.get_parameter()
month = args.month
monthname = monthname_d[month]
startyear = args.firstyear
finalyear = args.lastyear

template = populateStringConstructor(args.filename_template, args)
template.month = monthname

print("TEMPLATE NAME:", template())

print('Specifying latitude / longitude domain of interest ...')
# TRMM (observed) domain:
latrange = (args.lat1, args.lat2)
lonrange = (args.lon1, args.lon2)

region = cdutil.region.domain(latitude=latrange, longitude=lonrange)

# Amazon basin:
# latrange = (-15.0,  -5.0)
# lonrange = (285.0, 295.0)
P.add_argument("--lon1", type=float, default=0., help="First longitude")
P.add_argument("--lon2", type=float, default=360., help="Last longitude")
P.add_argument("--region_name", type=str, default="TRMM",
               help="name for the region of interest")

P.add_argument("-t", "--filename_template",
               default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_std.nc")
P.add_argument("--model", default="*")

args = P.get_parameter()
month = args.month
monthname = monthname_d[month]
startyear = args.firstyear
finalyear = args.lastyear

template = populateStringConstructor(args.filename_template, args)
template.month = monthname

print("TEMPLATE NAME:", template())

print('Specifying latitude / longitude domain of interest ...')
# TRMM (observed) domain:
latrange = (args.lat1, args.lat2)
lonrange = (args.lon1, args.lon2)

region = cdutil.region.domain(latitude=latrange, longitude=lonrange)

# Amazon basin:
# latrange = (-15.0,  -5.0)
# lonrange = (285.0, 295.0)
Ejemplo n.º 8
0
def compute(params):
    fileName = params.fileName
    startyear = params.args.firstyear
    finalyear = params.args.lastyear
    month = params.args.month
    monthname = params.monthname
    varbname = params.varname
    template = populateStringConstructor(args.filename_template, args)
    template.variable = varbname

    reverted = template.reverse(os.path.basename(fileName))
    dataname = reverted["model"]
    if dataname not in args.skip:
        try:
            print 'Data source:', dataname
            print 'Opening %s ...' % fileName
            f = cdms2.open(fileName)
            iYear = 0
            dmean = None
            for year in range(startyear, finalyear + 1):
                print 'Year %s:' % year
                startTime = cdtime.comptime(year, month)
                # Last possible second to get all tpoints
                finishtime = startTime.add(1, cdtime.Month).add(
                    -1, cdtime.Minute)
                print 'Reading %s from %s for time interval %s to %s ...' % (
                    varbname, fileName, startTime, finishtime)
                # Transient variable stores data for current year's month.
                tvarb = f(varbname, time=(startTime, finishtime, "ccn"))
                # *HARD-CODES conversion from kg/m2/sec to mm/day.
                tvarb *= 86400
                # The following tasks need to be done only once, extracting
                # metadata from first-year file:
                tc = tvarb.getTime().asComponentTime()
                current = tc[0]
                while current.month == month:
                    end = cdtime.comptime(current.year, current.month,
                                          current.day).add(1, cdtime.Day)
                    sub = tvarb(time=(current, end, "con"))
                    # Assumes first dimension of input ("axis#0") is time
                    tmp = numpy.ma.average(sub, axis=0)
                    sh = list(tmp.shape)
                    sh.insert(0, 1)
                    if dmean is None:
                        dmean = tmp.reshape(sh)
                    else:
                        dmean = numpy.ma.concatenate((dmean, tmp.reshape(sh)),
                                                     axis=0)
                    current = end
                iYear += 1
            f.close()
            stdvalues = cdms2.MV2.array(genutil.statistics.std(dmean))
            stdvalues.setAxis(0, tvarb.getLatitude())
            stdvalues.setAxis(1, tvarb.getLongitude())
            stdvalues.id = 'dailySD'
            # Standard deviation has same units as mean.
            stdvalues.units = "mm/d"
            stdoutfile = ('%s_%s_%s_%s-%s_std_of_dailymeans.nc') % (
                varbname, dataname, monthname, str(startyear), str(finalyear))
        except Exception as err:
            print "Failed for model: %s with error: %s" % (dataname, err)
    if not os.path.exists(args.output_directory):
        os.makedirs(args.output_directory)
    g = cdms2.open(os.path.join(args.output_directory, stdoutfile), 'w')
    g.write(stdvalues)
    g.close()
              % (monthname, harmonic, amp_avg_lnd, pha_avg_lnd))
        print('%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged over ocean'
              % (monthname, harmonic, amp_avg_ocn, pha_avg_ocn))
        # Sub-dictionaries, one for each harmonic component:
        outD['harmonic' + str(harmonic)] = {}
        outD['harmonic' + str(harmonic)]['amp_avg_lnd'] = amp_avg_lnd
        outD['harmonic' + str(harmonic)]['pha_avg_lnd'] = pha_avg_lnd
        outD['harmonic' + str(harmonic)]['amp_avg_ocn'] = amp_avg_ocn
        outD['harmonic' + str(harmonic)]['pha_avg_ocn'] = pha_avg_ocn
    return outD


print('Preparing to write output to JSON file ...')
if not os.path.exists(args.results_dir):
    os.makedirs(args.results_dir)
jsonFile = populateStringConstructor(args.outnamejson, args)
jsonFile.month = monthname

jsonname = os.path.join(os.path.abspath(args.results_dir), jsonFile())

if not os.path.exists(jsonname) or args.append is False:
    print('Initializing dictionary of statistical results ...')
    stats_dic = {}
    metrics_dictionary = collections.OrderedDict()
else:
    with open(jsonname) as f:
        metrics_dictionary = json.load(f)
        stats_dic = metrics_dictionary["RESULTS"]

OUT = pcmdi_metrics.io.base.Base(
    os.path.abspath(
Ejemplo n.º 10
0
def main():
    def compute(param):
        template = populateStringConstructor(args.filename_template, args)
        template.variable = param.varname
        template.month = param.monthname
        fnameRoot = param.fileName
        reverted = template.reverse(os.path.basename(fnameRoot))
        model = reverted["model"]
        print("Specifying latitude / longitude domain of interest ...")
        datanameID = "diurnalstd"  # Short ID name of output data
        latrange = (param.args.lat1, param.args.lat2)
        lonrange = (param.args.lon1, param.args.lon2)
        region = cdutil.region.domain(latitude=latrange, longitude=lonrange)
        if param.args.region_name == "":
            region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
        else:
            region_name = param.args.region_name
        print("Reading %s ..." % fnameRoot)
        reverted = template.reverse(os.path.basename(fnameRoot))
        model = reverted["model"]
        try:
            f = cdms2.open(fnameRoot)
            x = f(datanameID, region)
            units = x.units
            print("  Shape =", x.shape)
            print("Finding RMS area-average ...")
            x = x * x
            x = cdutil.averager(x, weights="unweighted")
            x = cdutil.averager(x, axis="xy")
            x = numpy.ma.sqrt(x)
            print(
                "For %8s in %s, average variance of hourly values = (%5.2f %s)^2"
                % (model, monthname, x, units))
            f.close()
        except Exception as err:
            print("Failed model %s with error: %s" % (model, err))
            x = 1.0e20
        return model, region, {region_name: x}

    P.add_argument(
        "-j",
        "--outnamejson",
        type=str,
        dest="outnamejson",
        default="pr_%(month)_%(firstyear)-%(lastyear)_std_of_hourlymeans.json",
        help="Output name for jsons",
    )

    P.add_argument("--lat1", type=float, default=-50.0, help="First latitude")
    P.add_argument("--lat2", type=float, default=50.0, help="Last latitude")
    P.add_argument("--lon1", type=float, default=0.0, help="First longitude")
    P.add_argument("--lon2", type=float, default=360.0, help="Last longitude")
    P.add_argument(
        "--region_name",
        type=str,
        default="TRMM",
        help="name for the region of interest",
    )

    P.add_argument(
        "-t",
        "--filename_template",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_std.nc",
    )
    P.add_argument("--model", default="*")
    P.add_argument(
        "--cmec",
        dest="cmec",
        action="store_true",
        default=False,
        help="Use to save metrics in CMEC JSON format",
    )
    P.add_argument(
        "--no_cmec",
        dest="cmec",
        action="store_false",
        default=False,
        help="Use to disable saving metrics in CMEC JSON format",
    )

    args = P.get_parameter()
    month = args.month
    monthname = monthname_d[month]
    startyear = args.firstyear  # noqa: F841
    finalyear = args.lastyear  # noqa: F841
    cmec = args.cmec

    template = populateStringConstructor(args.filename_template, args)
    template.month = monthname

    print("TEMPLATE NAME:", template())

    print("Specifying latitude / longitude domain of interest ...")
    # TRMM (observed) domain:
    latrange = (args.lat1, args.lat2)
    lonrange = (args.lon1, args.lon2)

    region = cdutil.region.domain(latitude=latrange, longitude=lonrange)

    # Amazon basin:
    # latrange = (-15.0,  -5.0)
    # lonrange = (285.0, 295.0)

    print("Preparing to write output to JSON file ...")
    if not os.path.exists(args.results_dir):
        os.makedirs(args.results_dir)
    jsonFile = populateStringConstructor(args.outnamejson, args)
    jsonFile.month = monthname

    jsonname = os.path.join(os.path.abspath(args.results_dir), jsonFile())

    if not os.path.exists(jsonname) or args.append is False:
        print("Initializing dictionary of statistical results ...")
        stats_dic = {}
        metrics_dictionary = collections.OrderedDict()
    else:
        with open(jsonname) as f:
            metrics_dictionary = json.load(f)
            stats_dic = metrics_dictionary["RESULTS"]

    OUT = pcmdi_metrics.io.base.Base(os.path.abspath(args.results_dir),
                                     jsonFile())
    egg_pth = resources.resource_path()
    disclaimer = open(os.path.join(egg_pth, "disclaimer.txt")).read()
    metrics_dictionary["DISCLAIMER"] = disclaimer
    metrics_dictionary["REFERENCE"] = (
        "The statistics in this file are based on Trenberth, Zhang & Gehne, "
        "J Hydromet. 2017")

    files = glob.glob(os.path.join(args.modpath, template()))
    print(files)

    params = [INPUT(args, name, template) for name in files]
    print("PARAMS:", params)

    results = cdp.cdp_run.multiprocess(compute,
                                       params,
                                       num_workers=args.num_workers)

    for r in results:
        m, region, res = r
        if r[0] not in stats_dic:
            stats_dic[m] = res
        else:
            stats_dic[m].update(res)

    print("Writing output to JSON file ...")
    metrics_dictionary["RESULTS"] = stats_dic
    rgmsk = metrics_dictionary.get("RegionalMasking", {})
    nm = list(res.keys())[0]
    region.id = nm
    rgmsk[nm] = {"id": nm, "domain": region}
    metrics_dictionary["RegionalMasking"] = rgmsk
    OUT.write(
        metrics_dictionary,
        json_structure=["model", "domain"],
        indent=4,
        separators=(",", ": "),
    )
    if cmec:
        print("Writing cmec file")
        OUT.write_cmec(indent=4, separators=(",", ": "))
    print("done")
              % (monthname, harmonic, amp_avg_lnd, pha_avg_lnd)
        print '%s %s-harmonic amplitude, phase = %7.3f mm/d, %7.3f hrsLST averaged over ocean'\
              % (monthname, harmonic, amp_avg_ocn, pha_avg_ocn)
        # Sub-dictionaries, one for each harmonic component:
        outD['harmonic' + str(harmonic)] = {}
        outD['harmonic' + str(harmonic)]['amp_avg_lnd'] = amp_avg_lnd
        outD['harmonic' + str(harmonic)]['pha_avg_lnd'] = pha_avg_lnd
        outD['harmonic' + str(harmonic)]['amp_avg_ocn'] = amp_avg_ocn
        outD['harmonic' + str(harmonic)]['pha_avg_ocn'] = pha_avg_ocn
    return outD


print 'Preparing to write output to JSON file ...'
if not os.path.exists(args.output_directory):
    os.makedirs(args.output_directory)
jsonFile = populateStringConstructor(args.outnamejson, args)
jsonFile.month = monthname

jsonname = os.path.join(os.path.abspath(args.output_directory), jsonFile())

if not os.path.exists(jsonname) or args.append is False:
    print 'Initializing dictionary of statistical results ...'
    stats_dic = {}
    metrics_dictionary = collections.OrderedDict()
else:
    with open(jsonname) as f:
        metrics_dictionary = json.load(f)
        stats_dic = metrics_dictionary["RESULTS"]

OUT = pcmdi_metrics.io.base.Base(os.path.abspath(args.output_directory),
                                 os.path.basename(jsonname))
def compute(params):
    fileName = params.fileName
    month = params.args.month
    monthname = params.monthname
    varbname = params.varname
    template = populateStringConstructor(args.filename_template, args)
    template.variable = varbname
    # Units on output (*may be converted below from the units of input*)
    outunits = 'mm/d'
    startime = 1.5  # GMT value for starting time-of-day

    reverted = template.reverse(os.path.basename(fileName))
    dataname = reverted["model"]
    if dataname not in args.skip:
        try:
            print('Data source:', dataname)
            print('Opening %s ...' % fileName)
            f = cdms2.open(fileName)

            # Composite-mean and composite-s.d diurnal cycle for month and year(s):
            iYear = 0
            for year in range(args.firstyear, args.lastyear + 1):
                print('Year %s:' % year)
                startTime = cdtime.comptime(year, month, 1, 1, 30)
                # Last possible second to get all tpoints
                finishtime = startTime.add(1, cdtime.Month).add(
                    -1.5, cdtime.Hour).add(.1, cdtime.Second)
                print('Reading %s from %s for time interval %s to %s ...' %
                      (varbname, fileName, startTime, finishtime))
                # Transient variable stores data for current year's month.
                tvarb = f(varbname, time=(startTime, finishtime))
                # *HARD-CODES conversion from kg/m2/sec to mm/day.
                tvarb *= 86400
                print('Shape:', tvarb.shape)
                # The following tasks need to be done only once, extracting
                # metadata from first-year file:
                if year == args.firstyear:
                    tc = tvarb.getTime().asComponentTime()
                    day1 = cdtime.comptime(tc[0].year, tc[0].month)
                    firstday = tvarb(time=(day1, day1.add(1, cdtime.Day),
                                           "con"))
                    dimensions = firstday.shape
                    # print '  Shape = ', dimensions
                    # Number of time points in the selected month for one year
                    N = dimensions[0]
                    nlats = dimensions[1]
                    nlons = dimensions[2]
                    deltaH = 24. / N
                    dayspermo = tvarb.shape[0] // N
                    print(
                        '  %d timepoints per day, %d hr intervals between timepoints'
                        % (N, deltaH))
                    comptime = firstday.getTime()
                    modellons = tvarb.getLongitude()
                    modellats = tvarb.getLatitude()
                    # Longitude values are needed later to compute Local Solar
                    # Times.
                    lons = modellons[:]
                    print('  Creating temporary storage and output fields ...')
                    # Sorts tvarb into separate GMTs for one year
                    tvslice = MV2.zeros((N, dayspermo, nlats, nlons))
                    # Concatenates tvslice over all years
                    concatenation = MV2.zeros(
                        (N, dayspermo * nYears, nlats, nlons))
                    LSTs = MV2.zeros((N, nlats, nlons))
                    for iGMT in range(N):
                        hour = iGMT * deltaH + startime
                        print(
                            '  Computing Local Standard Times for GMT %5.2f ...'
                            % hour)
                        for j in range(nlats):
                            for k in range(nlons):
                                LSTs[iGMT, j, k] = (hour + lons[k] / 15) % 24
                for iGMT in range(N):
                    hour = iGMT * deltaH + startime
                    print('  Choosing timepoints with GMT %5.2f ...' % hour)
                    # Transient-variable slice: every Nth tpoint gets all of
                    # the current GMT's tpoints for current year:
                    tvslice[iGMT] = tvarb[iGMT:tvarb.shape[0]:N]
                    concatenation[iGMT, iYear * dayspermo:(iYear + 1) *
                                  dayspermo] = tvslice[iGMT]
                iYear += 1
            f.close()

            # For each GMT, take mean and standard deviation over all years for
            # the chosen month:
            avgvalues = MV2.zeros((N, nlats, nlons))
            stdvalues = MV2.zeros((N, nlats, nlons))
            for iGMT in range(N):
                hour = iGMT * deltaH + startime
                print(
                    'Computing mean and standard deviation over all GMT %5.2f timepoints ...'
                    % hour)
                # Assumes first dimension of input ("axis#0") is time
                avgvalues[iGMT] = MV2.average(concatenation[iGMT], axis=0)
                stdvalues[iGMT] = genutil.statistics.std(concatenation[iGMT])
            avgvalues.id = 'diurnalmean'
            stdvalues.id = 'diurnalstd'
            LSTs.id = 'LST'
            avgvalues.units = outunits
            # Standard deviation has same units as mean (not so for
            # higher-moment stats).
            stdvalues.units = outunits
            LSTs.units = 'hr'
            LSTs.longname = 'Local Solar Time'
            avgvalues.setAxis(0, comptime)
            avgvalues.setAxis(1, modellats)
            avgvalues.setAxis(2, modellons)
            stdvalues.setAxis(0, comptime)
            stdvalues.setAxis(1, modellats)
            stdvalues.setAxis(2, modellons)
            LSTs.setAxis(0, comptime)
            LSTs.setAxis(1, modellats)
            LSTs.setAxis(2, modellons)
            avgoutfile = ('%s_%s_%s_%s-%s_diurnal_avg.nc') % (
                varbname, dataname, monthname, str(
                    args.firstyear), str(args.lastyear))
            stdoutfile = ('%s_%s_%s_%s-%s_diurnal_std.nc') % (
                varbname, dataname, monthname, str(
                    args.firstyear), str(args.lastyear))
            LSToutfile = ('%s_%s_LocalSolarTimes.nc' % (varbname, dataname))
            if not os.path.exists(args.results_dir):
                os.makedirs(args.results_dir)
            f = cdms2.open(os.path.join(args.results_dir, avgoutfile), 'w')
            g = cdms2.open(os.path.join(args.results_dir, stdoutfile), 'w')
            h = cdms2.open(os.path.join(args.results_dir, LSToutfile), 'w')
            f.write(avgvalues)
            g.write(stdvalues)
            h.write(LSTs)
            f.close()
            g.close()
            h.close()
        except Exception as err:
            print("Failed for model %s with erro: %s" % (dataname, err))
Ejemplo n.º 13
0
def main():
    def compute(params):
        fileName = params.fileName
        month = params.args.month
        monthname = params.monthname
        varbname = params.varname
        template = populateStringConstructor(args.filename_template, args)
        template.variable = varbname
        # Units on output (*may be converted below from the units of input*)
        outunits = "mm/d"
        startime = 1.5  # GMT value for starting time-of-day

        dataname = params.args.model
        if dataname is None or dataname.find("*") != -1:
            # model not passed or passed as *
            reverted = template.reverse(os.path.basename(fileName))
            print("REVERYING", reverted, dataname)
            dataname = reverted["model"]
        if dataname not in args.skip:
            try:
                print("Data source:", dataname)
                print("Opening %s ..." % fileName)
                f = cdms2.open(fileName)

                # Composite-mean and composite-s.d diurnal cycle for month and year(s):
                iYear = 0
                for year in range(args.firstyear, args.lastyear + 1):
                    print("Year %s:" % year)
                    startTime = cdtime.comptime(year, month)
                    # Last possible second to get all tpoints
                    finishtime = startTime.add(1, cdtime.Month).add(-1, cdtime.Minute)
                    print(
                        "Reading %s from %s for time interval %s to %s ..."
                        % (varbname, fileName, startTime, finishtime)
                    )
                    # Transient variable stores data for current year's month.
                    tvarb = f(varbname, time=(startTime, finishtime))
                    # *HARD-CODES conversion from kg/m2/sec to mm/day.
                    tvarb *= 86400
                    print("Shape:", tvarb.shape)
                    # The following tasks need to be done only once, extracting
                    # metadata from first-year file:
                    if year == args.firstyear:
                        tc = tvarb.getTime().asComponentTime()
                        print("DATA FROM:", tc[0], "to", tc[-1])
                        day1 = cdtime.comptime(tc[0].year, tc[0].month)
                        day1 = tc[0]
                        firstday = tvarb(time=(day1, day1.add(1.0, cdtime.Day), "con"))
                        dimensions = firstday.shape
                        print("  Shape = ", dimensions)
                        # Number of time points in the selected month for one year
                        N = dimensions[0]
                        nlats = dimensions[1]
                        nlons = dimensions[2]
                        deltaH = 24.0 / N
                        dayspermo = tvarb.shape[0] // N
                        print(
                            "  %d timepoints per day, %d hr intervals between timepoints"
                            % (N, deltaH)
                        )
                        comptime = firstday.getTime()
                        modellons = tvarb.getLongitude()
                        modellats = tvarb.getLatitude()
                        # Longitude values are needed later to compute Local Solar
                        # Times.
                        lons = modellons[:]
                        print("  Creating temporary storage and output fields ...")
                        # Sorts tvarb into separate GMTs for one year
                        tvslice = MV2.zeros((N, dayspermo, nlats, nlons))
                        # Concatenates tvslice over all years
                        concatenation = MV2.zeros((N, dayspermo * nYears, nlats, nlons))
                        LSTs = MV2.zeros((N, nlats, nlons))
                        for iGMT in range(N):
                            hour = iGMT * deltaH + startime
                            print(
                                "  Computing Local Standard Times for GMT %5.2f ..."
                                % hour
                            )
                            for j in range(nlats):
                                for k in range(nlons):
                                    LSTs[iGMT, j, k] = (hour + lons[k] / 15) % 24
                    for iGMT in range(N):
                        hour = iGMT * deltaH + startime
                        print("  Choosing timepoints with GMT %5.2f ..." % hour)
                        print("days per mo :", dayspermo)
                        # Transient-variable slice: every Nth tpoint gets all of
                        # the current GMT's tpoints for current year:
                        tvslice[iGMT] = tvarb[iGMT::N]
                        concatenation[
                            iGMT, iYear * dayspermo : (iYear + 1) * dayspermo
                        ] = tvslice[iGMT]
                    iYear += 1
                f.close()

                # For each GMT, take mean and standard deviation over all years for
                # the chosen month:
                avgvalues = MV2.zeros((N, nlats, nlons))
                stdvalues = MV2.zeros((N, nlats, nlons))
                for iGMT in range(N):
                    hour = iGMT * deltaH + startime
                    print(
                        "Computing mean and standard deviation over all GMT %5.2f timepoints ..."
                        % hour
                    )
                    # Assumes first dimension of input ("axis#0") is time
                    avgvalues[iGMT] = MV2.average(concatenation[iGMT], axis=0)
                    stdvalues[iGMT] = genutil.statistics.std(concatenation[iGMT])
                avgvalues.id = "diurnalmean"
                stdvalues.id = "diurnalstd"
                LSTs.id = "LST"
                avgvalues.units = outunits
                # Standard deviation has same units as mean (not so for
                # higher-moment stats).
                stdvalues.units = outunits
                LSTs.units = "hr"
                LSTs.longname = "Local Solar Time"
                avgvalues.setAxis(0, comptime)
                avgvalues.setAxis(1, modellats)
                avgvalues.setAxis(2, modellons)
                stdvalues.setAxis(0, comptime)
                stdvalues.setAxis(1, modellats)
                stdvalues.setAxis(2, modellons)
                LSTs.setAxis(0, comptime)
                LSTs.setAxis(1, modellats)
                LSTs.setAxis(2, modellons)
                avgoutfile = ("%s_%s_%s_%s-%s_diurnal_avg.nc") % (
                    varbname,
                    dataname,
                    monthname,
                    str(args.firstyear),
                    str(args.lastyear),
                )
                stdoutfile = ("%s_%s_%s_%s-%s_diurnal_std.nc") % (
                    varbname,
                    dataname,
                    monthname,
                    str(args.firstyear),
                    str(args.lastyear),
                )
                LSToutfile = "%s_%s_LocalSolarTimes.nc" % (varbname, dataname)
                if not os.path.exists(args.results_dir):
                    os.makedirs(args.results_dir)
                f = cdms2.open(os.path.join(args.results_dir, avgoutfile), "w")
                g = cdms2.open(os.path.join(args.results_dir, stdoutfile), "w")
                h = cdms2.open(os.path.join(args.results_dir, LSToutfile), "w")
                f.write(avgvalues)
                g.write(stdvalues)
                h.write(LSTs)
                f.close()
                g.close()
                h.close()
            except Exception as err:
                print("Failed for model %s with erro: %s" % (dataname, err))

    print("done")
    args = P.get_parameter()

    month = args.month  # noqa: F841
    monthname = monthname_d[args.month]  # noqa: F841

    # -------------------------------------HARD-CODED INPUT (add to command line later?):

    # These models have been processed already (or tried and found wanting,
    # e.g. problematic time coordinates):
    skipMe = args.skip  # noqa: F841

    # Choose only one ensemble member per model, with the following ensemble-member code (for definitions, see
    # http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf):

    # NOTE--These models do not supply 3hr data from the 'r1i1p1' ensemble member,
    #       but do supply it from other ensemble members:
    #       bcc-csm1-1 (3hr data is from r2i1p1)
    #       CCSM4      (3hr data is from r6i1p1)
    #       GFDL-CM3   (3hr data is from r2i1p1, r3i1p1, r4i1p1, r5i1p1)
    #       GISS-E2-H  (3hr data is from r6i1p1, r6i1p3)
    #       GISS-E2-R  (3hr data is from r6i1p2)

    varbname = "pr"

    #           Note that CMIP5 specifications designate (01:30, 04:30, 07:30, ..., 22:30) GMT for 3hr flux fields, but
    # *WARNING* some GMT timepoints are actually (0, 3, 6,..., 21) in submitted CMIP5 data, despite character strings in
    #           file names (and time axis metadata) to the contrary. See CMIP5 documentation and errata! Overrides to
    #           correct these problems are given below:
    # startGMT =  '0:0:0.0' # Include 00Z as a possible starting time, to accomodate (0, 3, 6,..., 21)GMT in the input
    # data.
    # startime = -1.5 # Subtract 1.5h from (0, 3, 6,..., 21)GMT input data. This is needed for BNU-ESM, CCSM4 and
    # CNRM-CM5.
    # startime = -3.0 # Subtract 1.5h from (0, 3, 6,..., 21)GMT input
    # data. This is needed for CMCC-CM.

    # -------------------------------------

    nYears = args.lastyear - args.firstyear + 1

    template = populateStringConstructor(args.filename_template, args)
    template.variable = varbname

    print("TEMPLATE:", template())
    fileList = glob.glob(os.path.join(args.modpath, template()))
    print("FILES:", fileList)
    params = [INPUT(args, name, template) for name in fileList]
    print("PARAMS:", params)
    cdp.cdp_run.multiprocess(compute, params, num_workers=args.num_workers)
Ejemplo n.º 14
0
               help="longitudes")
P.add_argument(
    "-A",
    "--outnameasc",
    type=str,
    dest='outnameasc',
    default='pr_%(month)_%(firstyear)-%(lastyear)_fourierDiurnalGridPoints.asc',
    help="Output name for ascs")
args = P.get_parameter()
month = args.month
monthname = monthname_d[month]
startyear = args.firstyear
finalyear = args.lastyear
yearrange = "%s-%s" % (startyear, finalyear)

template = populateStringConstructor(args.filename_template, args)
template.month = monthname
template_std = populateStringConstructor(args.filename_template_std, args)
template_std.month = monthname
template_LST = populateStringConstructor(args.filename_template_LST, args)
template_LST.month = monthname

LSTfiles = glob.glob(os.path.join(args.modroot, template_LST()))
print("LSTFILES:", LSTfiles)
print("TMPL", template_LST())

ascFile = populateStringConstructor(args.outnameasc, args)
ascFile.month = monthname
ascname = os.path.join(os.path.abspath(args.output_directory), ascFile())

if not os.path.exists(os.path.dirname(ascname)):
def compute(params):
    fileName = params.fileName
    month = params.args.month
    monthname = params.monthname
    varbname = params.varname
    template = populateStringConstructor(args.filename_template, args)
    template.variable = varbname
    # Units on output (*may be converted below from the units of input*)
    outunits = 'mm/d'
    startime = 1.5  # GMT value for starting time-of-day

    reverted = template.reverse(os.path.basename(fileName))
    dataname = reverted["model"]
    if dataname not in args.skip:
        try:
            print('Data source:', dataname)
            print('Opening %s ...' % fileName)
            f = cdms2.open(fileName)

            # Composite-mean and composite-s.d diurnal cycle for month and year(s):
            iYear = 0
            for year in range(args.firstyear, args.lastyear + 1):
                print('Year %s:' % year)
                startTime = cdtime.comptime(year, month, 1, 1, 30)
                # Last possible second to get all tpoints
                finishtime = startTime.add(
                    1, cdtime.Month).add(-1.5, cdtime.Hour).add(.1, cdtime.Second)
                print('Reading %s from %s for time interval %s to %s ...' % (varbname, fileName, startTime, finishtime))
                # Transient variable stores data for current year's month.
                tvarb = f(varbname, time=(startTime, finishtime))
                # *HARD-CODES conversion from kg/m2/sec to mm/day.
                tvarb *= 86400
                print('Shape:', tvarb.shape)
                # The following tasks need to be done only once, extracting
                # metadata from first-year file:
                if year == args.firstyear:
                    tc = tvarb.getTime().asComponentTime()
                    day1 = cdtime.comptime(tc[0].year, tc[0].month)
                    firstday = tvarb(
                        time=(
                            day1,
                            day1.add(
                                1,
                                cdtime.Day),
                            "con"))
                    dimensions = firstday.shape
                    # print '  Shape = ', dimensions
                    # Number of time points in the selected month for one year
                    N = dimensions[0]
                    nlats = dimensions[1]
                    nlons = dimensions[2]
                    deltaH = 24. / N
                    dayspermo = tvarb.shape[0] // N
                    print('  %d timepoints per day, %d hr intervals between timepoints' % (N, deltaH))
                    comptime = firstday.getTime()
                    modellons = tvarb.getLongitude()
                    modellats = tvarb.getLatitude()
                    # Longitude values are needed later to compute Local Solar
                    # Times.
                    lons = modellons[:]
                    print('  Creating temporary storage and output fields ...')
                    # Sorts tvarb into separate GMTs for one year
                    tvslice = MV2.zeros((N, dayspermo, nlats, nlons))
                    # Concatenates tvslice over all years
                    concatenation = MV2.zeros(
                        (N, dayspermo * nYears, nlats, nlons))
                    LSTs = MV2.zeros((N, nlats, nlons))
                    for iGMT in range(N):
                        hour = iGMT * deltaH + startime
                        print('  Computing Local Standard Times for GMT %5.2f ...' % hour)
                        for j in range(nlats):
                            for k in range(nlons):
                                LSTs[iGMT, j, k] = (hour + lons[k] / 15) % 24
                for iGMT in range(N):
                    hour = iGMT * deltaH + startime
                    print('  Choosing timepoints with GMT %5.2f ...' % hour)
                    # Transient-variable slice: every Nth tpoint gets all of
                    # the current GMT's tpoints for current year:
                    tvslice[iGMT] = tvarb[iGMT:tvarb.shape[0]:N]
                    concatenation[iGMT, iYear *
                                  dayspermo: (iYear +
                                              1) *
                                  dayspermo] = tvslice[iGMT]
                iYear += 1
            f.close()

            # For each GMT, take mean and standard deviation over all years for
            # the chosen month:
            avgvalues = MV2.zeros((N, nlats, nlons))
            stdvalues = MV2.zeros((N, nlats, nlons))
            for iGMT in range(N):
                hour = iGMT * deltaH + startime
                print('Computing mean and standard deviation over all GMT %5.2f timepoints ...' % hour)
                # Assumes first dimension of input ("axis#0") is time
                avgvalues[iGMT] = MV2.average(concatenation[iGMT], axis=0)
                stdvalues[iGMT] = genutil.statistics.std(concatenation[iGMT])
            avgvalues.id = 'diurnalmean'
            stdvalues.id = 'diurnalstd'
            LSTs.id = 'LST'
            avgvalues.units = outunits
            # Standard deviation has same units as mean (not so for
            # higher-moment stats).
            stdvalues.units = outunits
            LSTs.units = 'hr'
            LSTs.longname = 'Local Solar Time'
            avgvalues.setAxis(0, comptime)
            avgvalues.setAxis(1, modellats)
            avgvalues.setAxis(2, modellons)
            stdvalues.setAxis(0, comptime)
            stdvalues.setAxis(1, modellats)
            stdvalues.setAxis(2, modellons)
            LSTs.setAxis(0, comptime)
            LSTs.setAxis(1, modellats)
            LSTs.setAxis(2, modellons)
            avgoutfile = ('%s_%s_%s_%s-%s_diurnal_avg.nc') % (varbname,
                                                              dataname, monthname,
                                                              str(args.firstyear), str(args.lastyear))
            stdoutfile = ('%s_%s_%s_%s-%s_diurnal_std.nc') % (varbname,
                                                              dataname, monthname, str(
                                                                  args.firstyear),
                                                              str(args.lastyear))
            LSToutfile = ('%s_%s_LocalSolarTimes.nc' % (varbname, dataname))
            if not os.path.exists(args.results_dir):
                os.makedirs(args.results_dir)
            f = cdms2.open(
                os.path.join(
                    args.results_dir,
                    avgoutfile),
                'w')
            g = cdms2.open(
                os.path.join(
                    args.results_dir,
                    stdoutfile),
                'w')
            h = cdms2.open(
                os.path.join(
                    args.results_dir,
                    LSToutfile),
                'w')
            f.write(avgvalues)
            g.write(stdvalues)
            h.write(LSTs)
            f.close()
            g.close()
            h.close()
        except Exception as err:
            print("Failed for model %s with erro: %s" % (dataname, err))
Ejemplo n.º 16
0
def main():
    P.add_argument(
        "-t",
        "--filename_template",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_avg.nc",
        help="template for file names containing diurnal average",
    )
    P.add_argument("--model", default="*")
    P.add_argument(
        "--filename_template_LST",
        default="pr_%(model)_LocalSolarTimes.nc",
        help="template for file names point to Local Solar Time Files",
    )
    P.add_argument(
        "--filename_template_std",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_std.nc",
        help="template for file names containing diurnal std",
    )
    P.add_argument(
        "-l",
        "--lats",
        nargs="*",
        default=[31.125, 31.125, 36.4, 5.125, 45.125, 45.125],
        help="latitudes",
    )
    P.add_argument(
        "-L",
        "--lons",
        nargs="*",
        default=[-83.125, 111.145, -97.5, 147.145, -169.145, -35.145],
        help="longitudes",
    )
    P.add_argument(
        "-A",
        "--outnameasc",
        type=str,
        dest="outnameasc",
        default=
        "pr_%(month)_%(firstyear)-%(lastyear)_fourierDiurnalGridPoints.asc",
        help="Output name for ascs",
    )
    args = P.get_parameter()
    month = args.month
    monthname = monthname_d[month]
    startyear = args.firstyear
    finalyear = args.lastyear
    yearrange = "%s-%s" % (startyear, finalyear)  # noqa: F841

    template = populateStringConstructor(args.filename_template, args)
    template.month = monthname
    template_std = populateStringConstructor(args.filename_template_std, args)
    template_std.month = monthname
    template_LST = populateStringConstructor(args.filename_template_LST, args)
    template_LST.month = monthname

    LSTfiles = glob.glob(os.path.join(args.modpath, template_LST()))
    print("LSTFILES:", LSTfiles)
    print("TMPL", template_LST())

    ascFile = populateStringConstructor(args.outnameasc, args)
    ascFile.month = monthname
    ascname = os.path.join(os.path.abspath(args.results_dir), ascFile())

    if not os.path.exists(os.path.dirname(ascname)):
        os.makedirs(os.path.dirname(ascname))
    fasc = open(ascname, "w")

    gridptlats = [float(x) for x in args.lats]
    gridptlons = [float(x) for x in args.lons]
    nGridPoints = len(gridptlats)
    assert len(gridptlons) == nGridPoints

    # gridptlats = [-29.125, -5.125,   45.125,  45.125]
    # gridptlons = [-57.125, 75.125, -169.145, -35.145]
    # Gridpoints for JULY    samples in Figure 4 of Covey et al., JClimate 29: 4461 (2016):
    # nGridPoints = 6
    # gridptlats = [ 31.125,  31.125,  36.4,   5.125,   45.125,  45.125]
    # gridptlons = [-83.125, 111.145, -97.5, 147.145, -169.145, -35.145]

    N = 8  # Number of timepoints in a 24-hour cycle
    for LSTfile in LSTfiles:
        print("Reading %s ..." % LSTfile, os.path.basename(LSTfile), file=fasc)
        print("Reading %s ..." % LSTfile, os.path.basename(LSTfile), file=fasc)
        reverted = template_LST.reverse(os.path.basename(LSTfile))
        model = reverted["model"]
        print("====================", file=fasc)
        print(model, file=fasc)
        print("====================", file=fasc)
        template.model = model
        avgfile = template()
        template_std.model = model
        stdfile = template_std()
        print("Reading time series of mean diurnal cycle ...", file=fasc)
        f = cdms2.open(LSTfile)
        g = cdms2.open(os.path.join(args.modpath, avgfile))
        h = cdms2.open(os.path.join(args.modpath, stdfile))
        LSTs = f("LST")
        print("Input shapes: ", LSTs.shape, file=fasc)

        modellats = LSTs.getLatitude()
        modellons = LSTs.getLongitude()
        latbounds = modellats.getBounds()  # noqa: F841
        lonbounds = modellons.getBounds()  # noqa: F841

        # Gridpoints selected above may be offset slightly from points in full
        # grid ...
        closestlats = MV2.zeros(nGridPoints)
        closestlons = MV2.zeros(nGridPoints)
        pointLSTs = MV2.zeros((nGridPoints, N))
        avgvalues = MV2.zeros((nGridPoints, N))
        stdvalues = MV2.zeros((nGridPoints, N))
        # ... in which case, just pick the closest full-grid point:
        for i in range(nGridPoints):
            print(
                "   (lat, lon) = (%8.3f, %8.3f)" %
                (gridptlats[i], gridptlons[i]),
                file=fasc,
            )
            closestlats[i] = gridptlats[i]
            closestlons[i] = gridptlons[i] % 360
            print(
                "   Closest (lat, lon) for gridpoint = (%8.3f, %8.3f)" %
                (closestlats[i], closestlons[i]),
                file=fasc,
            )
            # Time series for selected grid point:
            avgvalues[i] = g(
                "diurnalmean",
                lat=(closestlats[i], closestlats[i], "cob"),
                lon=(closestlons[i], closestlons[i], "cob"),
                squeeze=1,
            )
            stdvalues[i] = h(
                "diurnalstd",
                lat=(closestlats[i], closestlats[i], "cob"),
                lon=(closestlons[i], closestlons[i], "cob"),
                squeeze=1,
            )
            pointLSTs[i] = f(
                "LST",
                lat=(closestlats[i], closestlats[i], "cob"),
                lon=(closestlons[i], closestlons[i], "cob"),
                squeeze=1,
            )
            print(" ", file=fasc)
        f.close()
        g.close()
        h.close()
        # Print results for input to Mathematica.
        if monthname == "Jan":
            # In printed output, numbers for January data follow 0-5 for July data,
            # hence begin with 6.
            deltaI = 6
        else:
            deltaI = 0
        prefix = args.modpath
        for i in range(nGridPoints):
            print(
                "For gridpoint %d at %5.1f deg latitude, %6.1f deg longitude ..."
                % (i, gridptlats[i], gridptlons[i]),
                file=fasc,
            )
            print("   Local Solar Times are:", file=fasc)
            print((prefix + "LST%d = {") % (i + deltaI), file=fasc)
            print(N * "%5.3f, " % tuple(pointLSTs[i]), end="", file=fasc)
            print("};", file=fasc)
            print("   Mean values for each time-of-day are:", file=fasc)
            print((prefix + "mean%d = {") % (i + deltaI), file=fasc)
            print(N * "%5.3f, " % tuple(avgvalues[i]), end="", file=fasc)
            print("};", file=fasc)
            print("   Standard deviations for each time-of-day are:",
                  file=fasc)
            print((prefix + "std%d = {") % (i + deltaI), file=fasc)
            print(N * "%6.4f, " % tuple(stdvalues[i]), end="", file=fasc)
            print("};", file=fasc)
            print(" ", file=fasc)

        # Take fast Fourier transform of the overall multi-year mean diurnal cycle.
        print("**************   ", avgvalues[0][0], file=fasc)
        cycmean, maxvalue, tmax = fastFT(avgvalues, pointLSTs)
        print("**************   ", avgvalues[0][0], file=fasc)
        # Print Fourier harmonics:
        for i in range(nGridPoints):
            print(
                "For gridpoint %d at %5.1f deg latitude, %6.1f deg longitude ..."
                % (i, gridptlats[i], gridptlons[i]),
                file=fasc,
            )
            print("  Mean value over cycle = %6.2f" % cycmean[i], file=fasc)
            print(
                "  Diurnal     maximum   = %6.2f at %6.2f hr Local Solar Time."
                % (maxvalue[i, 0], tmax[i, 0] % 24),
                file=fasc,
            )
            print(
                "  Semidiurnal maximum   = %6.2f at %6.2f hr Local Solar Time."
                % (maxvalue[i, 1], tmax[i, 1] % 24),
                file=fasc,
            )
            print(
                "  Terdiurnal  maximum   = %6.2f at %6.2f hr Local Solar Time."
                % (maxvalue[i, 2], tmax[i, 2] % 24),
                file=fasc,
            )

    print("Results sent to:", ascname)
Ejemplo n.º 17
0
# *WARNING* some GMT timepoints are actually (0, 3, 6,..., 21) in submitted CMIP5 data, despite character strings in
#           file names (and time axis metadata) to the contrary. See CMIP5 documentation and errata! Overrides to
#           correct these problems are given below:
# Include 00Z as a possible starting time, to accomodate (0, 3, 6,...,
# 21)GMT in the input data.
# startime = -1.5     # Subtract 1.5h from (0, 3, 6,..., 21)GMT input
# data. This is needed for BNU-ESM, CCSM4 and CNRM-CM5.
# Subtract 1.5h from (0, 3, 6,..., 21)GMT input data. This is needed for
# CMCC-CM.

# -------------------------------------

monthname = monthname_d[month]
nYears = finalyear - startyear + 1
# Character strings for starting and ending day/GMT (*HARD-CODES
# particular GMT timepoints*):
# *WARNING* GMT timepoints are actually (0, 3, 6,..., 21) in the original TRMM/Obs4MIPs data, despite character strings
# in file names (and time axis metadata). See CMIP5 documentation and
# errata!

template = populateStringConstructor(args.filename_template, args)
template.variable = varbname

fileList = glob.glob(os.path.join(directory, template()))
print "FILES:", fileList

params = [INPUT(args, name, template) for name in fileList]
print "PARAMS:", params

cdp.cdp_run.multiprocess(compute, params, num_workers=args.num_workers)
#           file names (and time axis metadata) to the contrary. See CMIP5 documentation and errata! Overrides to
#           correct these problems are given below:
# Include 00Z as a possible starting time, to accomodate (0, 3, 6,...,
# 21)GMT in the input data.
# startime = -1.5     # Subtract 1.5h from (0, 3, 6,..., 21)GMT input
# data. This is needed for BNU-ESM, CCSM4 and CNRM-CM5.
# Subtract 1.5h from (0, 3, 6,..., 21)GMT input data. This is needed for
# CMCC-CM.

# -------------------------------------

monthname = monthname_d[month]
nYears = finalyear - startyear + 1
# Character strings for starting and ending day/GMT (*HARD-CODES
# particular GMT timepoints*):
# *WARNING* GMT timepoints are actually (0, 3, 6,..., 21) in the original TRMM/Obs4MIPs data, despite character strings
# in file names (and time axis metadata). See CMIP5 documentation and
# errata!


template = populateStringConstructor(args.filename_template, args)
template.variable = varbname

fileList = glob.glob(os.path.join(directory, template()))
print("FILES:", fileList)

params = [INPUT(args, name, template) for name in fileList]
print("PARAMS:", params)

cdp.cdp_run.multiprocess(compute, params, num_workers=args.num_workers)
Ejemplo n.º 19
0
def main():
    def compute(param):
        template = populateStringConstructor(args.filename_template, args)
        template.variable = param.varname
        template.month = param.monthname
        fnameRoot = param.fileName
        reverted = template.reverse(os.path.basename(fnameRoot))
        model = reverted["model"]
        print('Specifying latitude / longitude domain of interest ...')
        datanameID = 'diurnalmean'  # Short ID name of output data
        latrange = (param.args.lat1, param.args.lat2)
        lonrange = (param.args.lon1, param.args.lon2)
        region = cdutil.region.domain(latitude=latrange, longitude=lonrange)
        if param.args.region_name == "":
            region_name = "{:g}_{:g}&{:g}_{:g}".format(*(latrange + lonrange))
        else:
            region_name = param.args.region_name
        print('Reading %s ...' % fnameRoot)
        try:
            f = cdms2.open(fnameRoot)
            x = f(datanameID, region)
            units = x.units
            print('  Shape =', x.shape)

            print(
                'Finding standard deviation over first dimension (time of day) ...'
            )
            x = genutil.statistics.std(x)
            print('  Shape =', x.shape)

            print('Finding r.m.s. average over 2nd-3rd dimensions (area) ...')
            x = x * x
            x = cdutil.averager(x, axis='xy')
            x = cdms2.MV2.sqrt(x)

            print(
                'For %8s in %s, average variance of hourly values = (%5.2f %s)^2'
                % (model, monthname, x, units))
            f.close()
        except Exception as err:
            print("Failed model %s with error" % (err))
            x = 1.e20
        return model, region, {region_name: float(x)}

    P.add_argument(
        "-j",
        "--outnamejson",
        type=str,
        dest='outnamejson',
        default=
        'pr_%(month)_%(firstyear)-%(lastyear)_std_of_meandiurnalcyc.json',
        help="Output name for jsons")

    P.add_argument("--lat1", type=float, default=-50., help="First latitude")
    P.add_argument("--lat2", type=float, default=50., help="Last latitude")
    P.add_argument("--lon1", type=float, default=0., help="First longitude")
    P.add_argument("--lon2", type=float, default=360., help="Last longitude")
    P.add_argument("--region_name",
                   type=str,
                   default="TRMM",
                   help="name for the region of interest")

    P.add_argument(
        "-t",
        "--filename_template",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_avg.nc")
    P.add_argument("--model", default="*")

    args = P.get_parameter()
    month = args.month
    monthname = monthname_d[month]
    startyear = args.firstyear  # noqa: F841
    finalyear = args.lastyear  # noqa: F841

    template = populateStringConstructor(args.filename_template, args)
    template.month = monthname

    print("TEMPLATE NAME:", template())

    print('Specifying latitude / longitude domain of interest ...')
    # TRMM (observed) domain:
    latrange = (args.lat1, args.lat2)
    lonrange = (args.lon1, args.lon2)

    region = cdutil.region.domain(latitude=latrange, longitude=lonrange)

    # Amazon basin:
    # latrange = (-15.0,  -5.0)
    # lonrange = (285.0, 295.0)

    print('Preparing to write output to JSON file ...')
    if not os.path.exists(args.results_dir):
        os.makedirs(args.results_dir)
    jsonFile = populateStringConstructor(args.outnamejson, args)
    jsonFile.month = monthname

    jsonname = os.path.join(os.path.abspath(args.results_dir), jsonFile())

    if not os.path.exists(jsonname) or args.append is False:
        print('Initializing dictionary of statistical results ...')
        stats_dic = {}
        metrics_dictionary = collections.OrderedDict()
    else:
        with open(jsonname) as f:
            metrics_dictionary = json.load(f)
            print("LOADE WITH KEYS:", list(metrics_dictionary.keys()))
            stats_dic = metrics_dictionary["RESULTS"]

    OUT = pcmdi_metrics.io.base.Base(os.path.abspath(args.results_dir),
                                     jsonFile())
    try:
        egg_pth = pkg_resources.resource_filename(
            pkg_resources.Requirement.parse("pcmdi_metrics"), "share/pmp")
    except Exception:
        # python 2 seems to fail when ran in home directory of source?
        egg_pth = os.path.join(os.getcwd(), "share", "pmp")
    disclaimer = open(os.path.join(egg_pth, "disclaimer.txt")).read()
    metrics_dictionary["DISCLAIMER"] = disclaimer
    metrics_dictionary["REFERENCE"] = (
        "The statistics in this file are based on Trenberth, Zhang & Gehne, "
        "J Hydromet. 2017")

    files = glob.glob(os.path.join(args.modpath, template()))
    print(files)

    params = [INPUT(args, name, template) for name in files]
    print("PARAMS:", params)

    results = cdp.cdp_run.multiprocess(compute,
                                       params,
                                       num_workers=args.num_workers)

    for r in results:
        m, region, res = r
        if r[0] not in stats_dic:
            stats_dic[m] = res
        else:
            stats_dic[m].update(res)

    print('Writing output to JSON file ...')
    metrics_dictionary["RESULTS"] = stats_dic
    print("KEYS AT END:", list(metrics_dictionary.keys()))
    rgmsk = metrics_dictionary.get("RegionalMasking", {})
    print("REG MASK:", rgmsk)
    nm = list(res.keys())[0]
    region.id = nm
    rgmsk[nm] = {"id": nm, "domain": region}
    metrics_dictionary["RegionalMasking"] = rgmsk
    OUT.write(metrics_dictionary,
              json_structure=["model", "domain"],
              indent=4,
              separators=(',', ': '))
    print('done')
def compute(params):
    fileName = params.fileName
    startyear = params.args.firstyear
    finalyear = params.args.lastyear
    month = params.args.month
    monthname = params.monthname
    varbname = params.varname
    template = populateStringConstructor(args.filename_template, args)
    template.variable = varbname

    reverted = template.reverse(os.path.basename(fileName))
    dataname = reverted["model"]
    if dataname not in args.skip:
        try:
            print('Data source:', dataname)
            print('Opening %s ...' % fileName)
            f = cdms2.open(fileName)
            iYear = 0
            dmean = None
            for year in range(startyear, finalyear + 1):
                print('Year %s:' % year)
                startTime = cdtime.comptime(year, month)
                # Last possible second to get all tpoints
                finishtime = startTime.add(
                    1, cdtime.Month).add(-1, cdtime.Minute)
                print('Reading %s from %s for time interval %s to %s ...' % (varbname, fileName, startTime, finishtime))
                # Transient variable stores data for current year's month.
                tvarb = f(varbname, time=(startTime, finishtime, "ccn"))
                # *HARD-CODES conversion from kg/m2/sec to mm/day.
                tvarb *= 86400
                # The following tasks need to be done only once, extracting
                # metadata from first-year file:
                tc = tvarb.getTime().asComponentTime()
                current = tc[0]
                while current.month == month:
                    end = cdtime.comptime(
                        current.year,
                        current.month,
                        current.day).add(
                        1,
                        cdtime.Day)
                    sub = tvarb(time=(current, end, "con"))
                    # Assumes first dimension of input ("axis#0") is time
                    tmp = numpy.ma.average(sub, axis=0)
                    sh = list(tmp.shape)
                    sh.insert(0, 1)
                    if dmean is None:
                        dmean = tmp.reshape(sh)
                    else:
                        dmean = numpy.ma.concatenate(
                            (dmean, tmp.reshape(sh)), axis=0)
                    current = end
                iYear += 1
            f.close()
            stdvalues = cdms2.MV2.array(genutil.statistics.std(dmean))
            stdvalues.setAxis(0, tvarb.getLatitude())
            stdvalues.setAxis(1, tvarb.getLongitude())
            stdvalues.id = 'dailySD'
            # Standard deviation has same units as mean.
            stdvalues.units = "mm/d"
            stdoutfile = ('%s_%s_%s_%s-%s_std_of_dailymeans.nc') % (varbname, dataname,
                                                                    monthname, str(startyear), str(finalyear))
        except Exception as err:
            print("Failed for model: %s with error: %s" % (dataname, err))
    if not os.path.exists(args.results_dir):
        os.makedirs(args.results_dir)
    g = cdms2.open(os.path.join(args.results_dir, stdoutfile), 'w')
    g.write(stdvalues)
    g.close()
Ejemplo n.º 21
0
def main():
    P.add_argument(
        "-t",
        "--filename_template",
        default="pr_%(model)_%(month)_%(firstyear)-%(lastyear)_diurnal_avg.nc",
        help="template for file names containing diurnal average",
    )
    P.add_argument("--model", default="*")
    P.add_argument(
        "--filename_template_LST",
        default="pr_%(model)_LocalSolarTimes.nc",
        help="template for file names point to Local Solar Time Files",
    )

    args = P.get_parameter()
    month = args.month
    monthname = monthname_d[month]
    startyear = args.firstyear
    finalyear = args.lastyear
    yearrange = "%s-%s" % (startyear, finalyear)

    template = populateStringConstructor(args.filename_template, args)
    template.month = monthname
    template_LST = populateStringConstructor(args.filename_template_LST, args)
    template_LST.month = monthname

    LSTfiles = glob.glob(os.path.join(args.modpath, template_LST()))

    print("modpath ", args.modpath)
    print("filename_template ", args.filename_template)
    print("filename_template_LST ", args.filename_template_LST)

    print("LSTFILES:", LSTfiles)
    print("TMPL", template_LST())
    for LSTfile in LSTfiles:
        print("Reading %s ..." % LSTfile, os.path.basename(LSTfile))
        reverted = template_LST.reverse(os.path.basename(LSTfile))
        model = reverted["model"]
        print("====================")
        print(model)
        print("====================")
        template.model = model
        avgfile = template()
        print("Reading time series of mean diurnal cycle ...")
        f = cdms2.open(LSTfile)
        g = cdms2.open(os.path.join(args.modpath, avgfile))
        LSTs = f("LST")
        avgs = g("diurnalmean")
        print("Input shapes: ", LSTs.shape, avgs.shape)

        print("Getting latitude and longitude coordinates.")
        # Any file with grid info will do, so use Local Standard Times file:
        modellats = LSTs.getLatitude()
        modellons = LSTs.getLongitude()

        f.close()
        g.close()

        print("Taking fast Fourier transform of the mean diurnal cycle ...")
        cycmean, maxvalue, tmax = fastAllGridFT(avgs, LSTs)
        print("  Output:")
        print("    cycmean", cycmean.shape)
        print("    maxvalue", maxvalue.shape)
        print("    tmax", tmax.shape)

        print('"Re-decorating" Fourier harmonics with grid info, etc., ...')
        cycmean = MV2.array(cycmean)
        maxvalue = MV2.array(maxvalue)
        tmax = MV2.array(tmax)

        cycmean.setAxis(0, modellats)
        cycmean.setAxis(1, modellons)
        cycmean.id = "tmean"
        cycmean.units = "mm / day"

        maxvalue.setAxis(1, modellats)
        maxvalue.setAxis(2, modellons)
        maxvalue.id = "S"
        maxvalue.units = "mm / day"

        tmax.setAxis(1, modellats)
        tmax.setAxis(2, modellons)
        tmax.id = "tS"
        tmax.units = "GMT"

        print("... and writing to netCDF.")
        f = cdms2.open(
            os.path.join(
                args.results_dir,
                "pr_" + model + "_" + monthname + "_" + yearrange +
                "_tmean.nc",
            ),
            "w",
        )
        g = cdms2.open(
            os.path.join(
                args.results_dir,
                "pr_" + model + "_" + monthname + "_" + yearrange + "_S.nc",
            ),
            "w",
        )
        h = cdms2.open(
            os.path.join(
                args.results_dir,
                "pr_" + model + "_" + monthname + "_" + yearrange + "_tS.nc",
            ),
            "w",
        )
        f.write(cycmean)
        g.write(maxvalue)
        h.write(tmax)
        f.close()
        g.close()
        h.close()
Ejemplo n.º 22
0
def main():
    def compute(params):
        fileName = params.fileName
        startyear = params.args.firstyear
        finalyear = params.args.lastyear
        month = params.args.month
        monthname = params.monthname
        varbname = params.varname
        template = populateStringConstructor(args.filename_template, args)
        template.variable = varbname

        dataname = params.args.model
        if dataname is None or dataname.find("*") != -1:
            # model not passed or passed as *
            reverted = template.reverse(os.path.basename(fileName))
            dataname = reverted["model"]
        print('Data source:', dataname)
        print('Opening %s ...' % fileName)
        if dataname not in args.skip:
            try:
                print('Data source:', dataname)
                print('Opening %s ...' % fileName)
                f = cdms2.open(fileName)
                iYear = 0
                dmean = None
                for year in range(startyear, finalyear + 1):
                    print('Year %s:' % year)
                    startTime = cdtime.comptime(year, month)
                    # Last possible second to get all tpoints
                    finishtime = startTime.add(1, cdtime.Month).add(
                        -1, cdtime.Minute)
                    print('Reading %s from %s for time interval %s to %s ...' %
                          (varbname, fileName, startTime, finishtime))
                    # Transient variable stores data for current year's month.
                    tvarb = f(varbname, time=(startTime, finishtime, "ccn"))
                    # *HARD-CODES conversion from kg/m2/sec to mm/day.
                    tvarb *= 86400
                    # The following tasks need to be done only once, extracting
                    # metadata from first-year file:
                    tc = tvarb.getTime().asComponentTime()
                    current = tc[0]
                    while current.month == month:
                        end = cdtime.comptime(current.year, current.month,
                                              current.day).add(1, cdtime.Day)
                        sub = tvarb(time=(current, end, "con"))
                        # Assumes first dimension of input ("axis#0") is time
                        tmp = numpy.ma.average(sub, axis=0)
                        sh = list(tmp.shape)
                        sh.insert(0, 1)
                        if dmean is None:
                            dmean = tmp.reshape(sh)
                        else:
                            dmean = numpy.ma.concatenate(
                                (dmean, tmp.reshape(sh)), axis=0)
                        current = end
                    iYear += 1
                f.close()
                stdvalues = cdms2.MV2.array(genutil.statistics.std(dmean))
                stdvalues.setAxis(0, tvarb.getLatitude())
                stdvalues.setAxis(1, tvarb.getLongitude())
                stdvalues.id = 'dailySD'
                # Standard deviation has same units as mean.
                stdvalues.units = "mm/d"
                stdoutfile = ('%s_%s_%s_%s-%s_std_of_dailymeans.nc') % (
                    varbname, dataname, monthname, str(startyear),
                    str(finalyear))
            except Exception as err:
                print("Failed for model: %s with error: %s" % (dataname, err))
        if not os.path.exists(args.results_dir):
            os.makedirs(args.results_dir)
        g = cdms2.open(os.path.join(args.results_dir, stdoutfile), 'w')
        g.write(stdvalues)
        g.close()

    args = P.get_parameter()
    month = args.month
    startyear = args.firstyear
    finalyear = args.lastyear
    directory = args.modpath  # Input  directory for model data
    # These models have been processed already (or tried and found wanting,
    # e.g. problematic time coordinates):
    skipMe = args.skip
    print("SKIPPING:", skipMe)

    # Choose only one ensemble member per model, with the following ensemble-member code (for definitions, see
    # http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf):

    # NOTE--These models do not supply 3hr data from the 'r1i1p1' ensemble member,
    #       but do supply it from other ensemble members:
    #       bcc-csm1-1 (3hr data is from r2i1p1)
    #       CCSM4      (3hr data is from r6i1p1)
    #       GFDL-CM3   (3hr data is from r2i1p1, r3i1p1, r4i1p1, r5i1p1)
    #       GISS-E2-H  (3hr data is from r6i1p1, r6i1p3)
    #       GISS-E2-R  (3hr data is from r6i1p2)

    varbname = "pr"

    #           Note that CMIP5 specifications designate (01:30, 04:30, 07:30, ..., 22:30) GMT for 3hr flux fields, but
    # *WARNING* some GMT timepoints are actually (0, 3, 6,..., 21) in submitted CMIP5 data, despite character strings in
    #           file names (and time axis metadata) to the contrary. See CMIP5 documentation and errata! Overrides to
    #           correct these problems are given below:
    # Include 00Z as a possible starting time, to accomodate (0, 3, 6,...,
    # 21)GMT in the input data.
    # startime = -1.5     # Subtract 1.5h from (0, 3, 6,..., 21)GMT input
    # data. This is needed for BNU-ESM, CCSM4 and CNRM-CM5.
    # Subtract 1.5h from (0, 3, 6,..., 21)GMT input data. This is needed for
    # CMCC-CM.

    # -------------------------------------

    monthname = monthname_d[month]  # noqa: F841
    nYears = finalyear - startyear + 1  # noqa: F841
    # Character strings for starting and ending day/GMT (*HARD-CODES
    # particular GMT timepoints*):
    # *WARNING* GMT timepoints are actually (0, 3, 6,..., 21) in the original TRMM/Obs4MIPs data, despite character
    # strings in file names (and time axis metadata). See CMIP5 documentation and
    # errata!

    template = populateStringConstructor(args.filename_template, args)
    template.variable = varbname

    fileList = glob.glob(os.path.join(directory, template()))
    print("FILES:", fileList)

    params = [INPUT(args, name, template) for name in fileList]
    print("PARAMS:", params)

    cdp.cdp_run.multiprocess(compute, params, num_workers=args.num_workers)