Example #1
0
def Get_SegmentAveraged_PowerSpectrum_and_RedNoise(d,
                                                   SegmentLength,
                                                   TaperingRatio,
                                                   SegmentOverlapping=False):
    seg_starting_i = []
    segments = []
    freqs_segs = 0
    psd_segs = 0
    rn_segs = 0
    num_segs = 0
    r1_segs = []  # Lag-1 autocorrelation

    if SegmentOverlapping:
        jump = SegmentLength / 2
    else:
        jump = SegmentLength

    for i in range(0, len(d), jump):
        ie = i + SegmentLength
        if ie <= len(d):
            seg_starting_i.append(i)
            seg_starting_i.append(ie)

            d_i = d[i:ie].copy()
            # Tapering
            d_i = taper(d_i, TaperingRatio)
            segments.append([range(i, ie), d_i])

            # Power spectrum
            freqs_i, psd_i = signal.welch(np.array(d_i),
                                          nperseg=len(d_i),
                                          noverlap=0,
                                          window='boxcar')

            # Red noise
            r1_i = lag1_autocorrelation(d_i)
            rn_i = rednoise(psd_i, len(freqs_i), r1_i)
            r1_segs.append(float(r1_i))

            # Collect power spectrum and red noise of each segment to average later
            freqs_segs = MV2.add(freqs_i, freqs_segs)
            psd_segs = MV2.add(psd_i, psd_segs)
            rn_segs = MV2.add(rn_i, rn_segs)

            # Count number of segments to be used for averaging
            num_segs += 1

            print 'segment (num)', i, ie, '(', num_segs, ')'

    freqs_avg = MV2.divide(freqs_segs, num_segs)
    psd_avg = MV2.divide(psd_segs, num_segs)
    rn_avg = MV2.divide(rn_segs, num_segs)
    r1_avg = sum(r1_segs) / float(len(r1_segs))

    return segments, np.array(freqs_avg), np.array(psd_avg), np.array(
        rn_avg), r1_avg
Example #2
0
def mpd(data):
    """Monsoon precipitation intensity and annual range calculation

           .. describe:: Input

               *  data

                   * Assumes climatology array with 12 times step first one January

   """
    months_length = [
        31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.
    ]
    mjjas = compute_season(data, [4, 5, 6, 7, 8], months_length)
    ndjfm = compute_season(data, [10, 11, 0, 1, 2], months_length)
    ann = compute_season(data, list(range(12)), months_length)

    annrange = MV2.subtract(mjjas, ndjfm)

    lat = annrange.getAxis(0)
    i, e = lat.mapInterval((-91, 0, 'con'))
    if i > e:  # reveresedlats
        tmp = i + 1
        i = e + 1
        e = tmp

    annrange[slice(i, e)] = -annrange[slice(i, e)]
    annrange.id = data.id + "_ar"
    annrange.longname = "annual range"

    mpi = MV2.divide(annrange, ann)
    mpi.id = data.id + "_int"
    mpi.longname = "intensity"

    return annrange, mpi
Example #3
0
def sperber_metrics(d, region, debug=False):
    """ d: input, 1d array of cumulative pentad time series """
    # Convert accumulation to fractional accumulation; normalize by sum
    d_sum = d[-1]
    # Normalize
    frac_accum = MV2.divide(d, d_sum)
    # Stat 1: Onset
    onset_index = next(i for i, v in enumerate(frac_accum) if v >= 0.2)
    # Stat 2: Decay
    if region == 'GoG':
        decay_threshold = 0.6
    else:
        decay_threshold = 0.8
    decay_index = next(i for i, v in enumerate(frac_accum)
                       if v >= decay_threshold)
    # Stat 3: Slope
    slope = (frac_accum[decay_index] - frac_accum[onset_index]) \
        / float(decay_index - onset_index)
    # Stat 4: Duration
    duration = decay_index - onset_index + 1
    # Calc done, return result as dic
    return {
        'frac_accum': frac_accum,
        'onset_index': onset_index,
        'decay_index': decay_index,
        'slope': slope,
        'duration': duration
    }
Example #4
0
def gain_pcs_fraction(full_field, eof_pattern, pcs, debug=False):
    """
    NOTE: This function is designed for getting fraction of variace obtained by
          pseudo pcs
    Input: (dimension x, y, t should be identical for above inputs)
    - full_field (t,y,x)
    - eof_pattern (y,x)
    - pcs (t)
    Output:
    - fraction: cdms2 array but for 1 single number which is float.
                Preserve cdms2 array type for netCDF recording.
                fraction of explained variance
    """
    # 1) Get total variacne ---
    variance_total = genutil.statistics.variance(full_field, axis="t")
    variance_total_area_ave = cdutil.averager(
        variance_total, axis="xy", weights="weighted"
    )
    # 2) Get variance for pseudo pattern ---
    # 2-1) Reconstruct field based on pseudo pattern
    if debug:
        print("from gain_pcs_fraction:")
        print("full_field.shape (before grower): ", full_field.shape)
        print("eof_pattern.shape (before grower): ", eof_pattern.shape)
    # Extend eof_pattern (add 3rd dimension as time then copy same 2d value for all time step)
    reconstructed_field = genutil.grower(full_field, eof_pattern)[
        1
    ]  # Matching dimension (add time axis)
    for t in range(0, len(pcs)):
        reconstructed_field[t] = MV2.multiply(reconstructed_field[t], pcs[t])
    # 2-2) Get variance of reconstructed field
    variance_partial = genutil.statistics.variance(reconstructed_field, axis="t")
    variance_partial_area_ave = cdutil.averager(
        variance_partial, axis="xy", weights="weighted"
    )
    # 3) Calculate fraction ---
    fraction = MV2.divide(variance_partial_area_ave, variance_total_area_ave)
    # debugging
    if debug:
        print("full_field.shape (after grower): ", full_field.shape)
        print("reconstructed_field.shape: ", reconstructed_field.shape)
        print("variance_partial_area_ave: ", variance_partial_area_ave)
        print("variance_total_area_ave: ", variance_total_area_ave)
        print("fraction: ", fraction)
        print("from gain_pcs_fraction done")
    # return result
    return fraction
def mpd(data):
    """Monsoon precipitation intensity and annual range calculation

           .. describe:: Input

               *  data

                   * Assumes climatology array with 12 times step first one January

   """
    months_length = [
        31.,
        28.,
        31.,
        30.,
        31.,
        30.,
        31.,
        31.,
        30.,
        31.,
        30.,
        31.]
    mjjas = compute_season(data, [4, 5, 6, 7, 8], months_length)
    ndjfm = compute_season(data, [10, 11, 0, 1, 2], months_length)
    ann = compute_season(data, list(range(12)), months_length)

    annrange = MV2.subtract(mjjas, ndjfm)

    lat = annrange.getAxis(0)
    i, e = lat.mapInterval((-91, 0, 'con'))
    if i > e:  # reveresedlats
        tmp = i + 1
        i = e + 1
        e = tmp

    annrange[slice(i, e)] = -annrange[slice(i, e)]
    annrange.id = data.id + "_ar"
    annrange.longname = "annual range"

    mpi = MV2.divide(annrange, ann)
    mpi.id = data.id + "_int"
    mpi.longname = "intensity"

    return annrange, mpi
Example #6
0
def sperber_metrics(d, region, debug=False):
    """ d: input, 1d array of cumulative pentad time series """
    # Convert accumulation to fractional accumulation; normalize by sum
    d_sum = d[-1]
    frac_accum = MV2.divide(d, d_sum)
    onset_index = next(i for i, v in enumerate(frac_accum) if v >= 0.2)
    if region == 'GoG':
        decay_threshold = 0.6
    else:
        decay_threshold = 0.8
    decay_index = next(i for i, v in enumerate(
        frac_accum) if v >= decay_threshold)
    slope = (frac_accum[decay_index] - frac_accum[onset_index]) \
        / float(decay_index - onset_index)
    return {'frac_accum': frac_accum,
            'onset_index': onset_index,
            'decay_index': decay_index,
            'slope': slope}
def normalize_by_median(stat_xy):
    """ 
    NOTE:
    Input
    - stat_xy: cdms2 MV2 2D array with proper axes decorated, values to visualize.
    Output
    - stat_xy: stat_xy after normalized by median of each row
    """
    # Get median
    median = genutil.statistics.median(stat_xy, axis=1)[0]
    # Match shapes
    stat_xy, median = genutil.grower(stat_xy, median)
    # Normalize by median value
    median = np.array(median)
    stat_xy_normalized = MV2.divide(MV2.subtract(stat_xy, median), median)
    # Decorate axes
    stat_xy_normalized.setAxisList(stat_xy.getAxisList())
    stat_xy_normalized.id = stat_xy.id
    stat_xy = stat_xy_normalized
    return stat_xy
Example #8
0
def sperber_metrics(d, region, debug=False):
    """ d: input, 1d array of cumulative pentad time series """
    # Convert accumulation to fractional accumulation; normalize by sum
    d_sum = d[-1]
    frac_accum = MV2.divide(d, d_sum)
    onset_index = next(i for i, v in enumerate(frac_accum) if v >= 0.2)
    if region == 'GoG':
        decay_threshold = 0.6
    else:
        decay_threshold = 0.8
    decay_index = next(i for i, v in enumerate(frac_accum)
                       if v >= decay_threshold)
    slope = (frac_accum[decay_index] - frac_accum[onset_index]) \
        / float(decay_index - onset_index)
    return {
        'frac_accum': frac_accum,
        'onset_index': onset_index,
        'decay_index': decay_index,
        'slope': slope
    }
def mjo_metric_ewr_calculation(mip, model, exp, run, debug, plot, nc_out,
                               cmmGrid, degX, UnitsAdjust, inputfile, var,
                               startYear, endYear, segmentLength, outdir):

    # Open file to read daily dataset
    if debug:
        print('debug: open file')
    f = cdms2.open(inputfile)
    d = f[var]
    tim = d.getTime()
    comTim = tim.asComponentTime()

    # Get starting and ending year and month
    if debug:
        print('debug: check time')
    first_time = comTim[0]
    last_time = comTim[-1]

    # Adjust years to consider only when continous NDJFMA is available
    if first_time > cdtime.comptime(startYear, 11, 1):
        startYear += 1
    if last_time < cdtime.comptime(endYear, 4, 30):
        endYear -= 1

    # Number of grids for 2d fft input
    NL = len(d.getLongitude())  # number of grid in x-axis (longitude)
    if cmmGrid:
        NL = int(360 / degX)
    NT = segmentLength  # number of time step for each segment (need to be an even number)

    if debug:
        endYear = startYear + 2
        print('debug: startYear, endYear:', startYear, endYear)
        print('debug: NL, NT:', NL, NT)

    #
    # Get daily climatology on each grid, then remove it to get anomaly
    #
    numYear = endYear - startYear
    mon = 11
    day = 1
    # Store each year's segment in a dictionary: segment[year]
    segment = {}
    segment_ano = {}
    daSeaCyc = MV2.zeros((NT, d.shape[1], d.shape[2]), MV2.float)
    for year in range(startYear, endYear):
        print(year)
        segment[year] = subSliceSegment(d, year, mon, day, NT)
        # units conversion
        segment[year] = unit_conversion(segment[year], UnitsAdjust)
        # Get climatology of daily seasonal cycle
        daSeaCyc = MV2.add(MV2.divide(segment[year], float(numYear)), daSeaCyc)
    # Remove daily seasonal cycle from each segment
    if numYear > 1:
        for year in range(startYear, endYear):
            segment_ano[year] = Remove_dailySeasonalCycle(
                segment[year], daSeaCyc)

    #
    # Space-time power spectra
    #
    """
    Handle each segment (i.e. each year) separately.
    1. Get daily time series (3D: time and spatial 2D)
    2. Meridionally average (2D: time and spatial, i.e., longitude)
    3. Get anomaly by removing time mean of the segment
    4. Proceed 2-D FFT to get power.
    Then get multi-year averaged power after the year loop.
    """
    # Define array for archiving power from each year segment
    Power = np.zeros((numYear, NT + 1, NL + 1), np.float)

    # Year loop for space-time spectrum calculation
    if debug:
        print('debug: year loop start')
    for n, year in enumerate(range(startYear, endYear)):
        print('chk: year:', year)
        d_seg = segment_ano[year]
        # Regrid: interpolation to common grid
        if cmmGrid:
            d_seg = interp2commonGrid(d_seg, degX, debug=debug)
        # Subregion, meridional average, and remove segment time mean
        d_seg_x_ano = get_daily_ano_segment(d_seg)
        # Compute space-time spectrum
        if debug:
            print('debug: compute space-time spectrum')
        Power[n, :, :] = space_time_spectrum(d_seg_x_ano)

    # Multi-year averaged power
    Power = np.average(Power, axis=0)
    # Generates axes for the decoration
    Power, ff, ss = generate_axes_and_decorate(Power, NT, NL)
    # Output for wavenumber-frequency power spectra
    OEE = output_power_spectra(NL, NT, Power, ff, ss)

    # E/W ratio
    ewr, eastPower, westPower = calculate_ewr(OEE)
    print('ewr: ', ewr)
    print('east power: ', eastPower)
    print('west power: ', westPower)

    # Output
    output_filename = "{}_{}_{}_{}_{}_{}-{}".format(mip, model, exp, run,
                                                    'mjo', startYear, endYear)
    if cmmGrid:
        output_filename += '_cmmGrid'

    # NetCDF output
    if nc_out:
        if not os.path.exists(outdir(output_type='diagnostic_results')):
            os.makedirs(outdir(output_type='diagnostic_results'))
        fout = os.path.join(outdir(output_type='diagnostic_results'),
                            output_filename)
        write_netcdf_output(OEE, fout)

    # Plot
    if plot:
        if not os.path.exists(outdir(output_type='graphics')):
            os.makedirs(outdir(output_type='graphics'))
        fout = os.path.join(outdir(output_type='graphics'), output_filename)
        title = mip.upper(
        ) + ': ' + model + ' (' + run + ') \n' + var.capitalize(
        ) + ', NDJFMA ' + str(startYear) + '-' + str(endYear)
        if cmmGrid:
            title += ', common grid (2.5x2.5deg)'
        plot_power(OEE, title, fout, ewr)

    # Output to JSON
    metrics_result = {}
    metrics_result['east_power'] = eastPower
    metrics_result['west_power'] = westPower
    metrics_result['east_west_power_ratio'] = ewr
    metrics_result['analysis_time_window_start_year'] = startYear
    metrics_result['analysis_time_window_end_year'] = endYear

    # Debug checking plot
    if debug and plot:
        debug_chk_plot(d_seg_x_ano, Power, OEE, segment[year], daSeaCyc,
                       segment_ano[year])

    f.close()
    return metrics_result
Example #10
0
def model_land_only(model, model_timeseries, lf, debug=False):
    # -------------------------------------------------
    # Mask out over ocean grid
    # - - - - - - - - - - - - - - - - - - - - - - - - -
    if debug:
        print('debug: plot for beforeMask start')
        import vcs
        x = vcs.init()
        x.plot(model_timeseries)
        x.png('_'.join(['test', model, 'beforeMask.png']))
        print('debug: plot for beforeMask done')

    # Check land fraction variable to see if it meet criteria
    # (0 for ocean, 100 for land, no missing value)
    lat_c = lf.getAxis(0)
    lon_c = lf.getAxis(1)
    lf_id = lf.id

    lf = MV2.array(lf.filled(0.))

    lf.setAxis(0, lat_c)
    lf.setAxis(1, lon_c)
    lf.id = lf_id

    if float(MV2.max(lf)) == 1.:
        lf = MV2.multiply(lf, 100.)

    # Matching dimension
    if debug:
        print('debug: match dimension in model_land_only')
    model_timeseries, lf_timeConst = genutil.grower(model_timeseries, lf)

    # Conserve axes
    time_c = model_timeseries.getAxis(0)
    lat_c2 = model_timeseries.getAxis(1)
    lon_c2 = model_timeseries.getAxis(2)

    opt1 = False

    if opt1:  # Masking out partial ocean grids as well
        # Mask out ocean even fractional (leave only pure ocean grid)
        model_timeseries_masked = MV2.masked_where(
            lf_timeConst < 100, model_timeseries)
    else:  # Mask out only full ocean grid & use weighting for partial ocean grid
        model_timeseries_masked = MV2.masked_where(
            lf_timeConst == 0, model_timeseries)  # mask out pure ocean grids
        if model == 'EC-EARTH':
            # Mask out over 90% land grids for models those consider river as
            # part of land-sea fraction. So far only 'EC-EARTH' does..
            model_timeseries_masked = MV2.masked_where(
                lf_timeConst < 90, model_timeseries)
        lf2 = MV2.divide(lf, 100.)
        model_timeseries, lf2_timeConst = genutil.grower(
            model_timeseries, lf2)  # Matching dimension
        model_timeseries_masked = MV2.multiply(
            model_timeseries_masked, lf2_timeConst)  # consider land fraction like as weighting

    # Make sure to have consistent axes
    model_timeseries_masked.setAxis(0, time_c)
    model_timeseries_masked.setAxis(1, lat_c2)
    model_timeseries_masked.setAxis(2, lon_c2)

    if debug:
        x.clear()
        x.plot(model_timeseries_masked)
        x.png('_'.join(['test', model, 'afterMask.png']))
        x.close()
        print('debug: plot for afterMask done')

    return(model_timeseries_masked)
Example #11
0
def generate_portrait(stat_xy, imgName):
    # Get median
    median = genutil.statistics.median(stat_xy, axis=1)[0]
    print(median)
    print(median.shape)
    # Match shapes
    stat_xy, median = genutil.grower(stat_xy, median)
    print(stat_xy.shape)
    print(median.shape)
    # Normalize by median value
    median = np.array(median)
    stat_xy_normalized = MV2.divide(MV2.subtract(stat_xy, median), median)
    print(stat_xy_normalized.shape)
    stat_xy_normalized.setAxisList(stat_xy.getAxisList())

    #
    # Plotting
    #
    # Set up VCS Canvas
    class VCSAddonsNotebook(object):
        def __init__(self, x):
            self.x = x

        def _repr_png_(self):
            fnm = tempfile.mktemp() + ".png"
            self.x.png(fnm)
            encoded = base64.b64encode(open(fnm, "rb").read())
            return encoded

        def __call__(self):
            return self

    # VCS Canvas
    x = vcs.init(bg=True, geometry=(2600, 800))
    show = VCSAddonsNotebook(x)
    # Load our "pretty" colormap
    x.scriptrun(
        os.path.join(sys.prefix, "share", "pmp", "graphics", 'vcs',
                     'portraits.scr'))
    # Set up Portrait Plot
    P = pcmdi_metrics.graphics.portraits.Portrait()
    xax = [t + ' ' for t in stat_xy_normalized.getAxis(1)[:]]
    yax = [t + ' ' for t in stat_xy_normalized.getAxis(0)[:]]
    # Preprocessing step to "decorate" the axis
    P.decorate(stat_xy_normalized, yax, xax)
    #
    # Customize
    #
    SET = P.PLOT_SETTINGS
    # Viewport on the Canvas
    SET.x1 = .05
    SET.x2 = .88
    SET.y1 = .25
    SET.y2 = .9
    # Both X (horizontal) and y (VERTICAL) ticks
    # Text table
    SET.tictable = vcs.createtexttable()
    SET.tictable.color = "black"
    # X (bottom) ticks
    tictextsize = 9
    # Text Orientation
    SET.xticorientation = vcs.createtextorientation()
    SET.xticorientation.angle = -90
    SET.xticorientation.halign = "right"
    SET.xticorientation.height = tictextsize
    # Y (vertical) ticks
    SET.yticorientation = vcs.createtextorientation()
    SET.yticorientation.angle = 0
    SET.yticorientation.halign = "right"
    SET.yticorientation.height = tictextsize
    # We can turn off the "grid"
    SET.draw_mesh = "y"
    # Control color for missing
    SET.missing_color = "grey"
    # Tics length
    SET.xtic1.y1 = 0
    SET.xtic1.y2 = 0
    # Timestamp
    SET.time_stamp = None
    # Colormap
    SET.colormap = "bl_to_darkred"
    # level to use
    SET.levels = [-.5, -.4, -.3, -.2, -.1, 0, .1, .2, .3, .4, .5]
    SET.levels.insert(0, -1.e20)
    SET.levels.append(1.e20)
    # colors to use
    SET.fillareacolors = vcs.getcolors(SET.levels,
                                       split=0,
                                       colors=range(16, 240))
    # Plot
    P.plot(stat_xy_normalized, x=x)
    # Save
    x.png(imgName + '.png')
    #
    # Annotated Plot
    #
    Annotated = False
    if Annotated:
        x.clear()
        SET.values.show = True
        SET.values.array = stat_xy
        P.plot(stat_xy_normalized, x=x, bg=0)
        x.png(imgName + '_annotated.png')
Example #12
0
        model = figfilename.split('_')[5]
        run = figfilename.split('_')[7]
        title = mode+', '+model+' ('+run+')'

        if debug:
          if run == 'r1i1p1':
            PlotOn = True
          else:
            PlotOn = False
    
      f = cdms2.open(ncfile)
      d = f(varname_pc)
      f.close()

      if NormalizePCts:
        d = MV2.divide(d,float(calcSTD(d)))

      d_avg = cdutil.averager(d,axis='0')
      print 'series mean: ', d_avg

      SegmentLength = int(len(d)*SegmentLengthRatio)

      freqs, psd, rn, siglevel, r1, hpf, psd_max = PowerSpectrumAnalysis(
           d, SegmentLength, 
           TaperingRatio=TaperingRatio, 
           SegmentOverlapping=SegmentOverlapping,
           debug=debug)
    
      # Plot
      if PlotOn_xlog:
        figfile = os.path.join(outdir,mode+'_'+obs_data,figfilename+'_xlog.png')
Example #13
0
def model_land_only(model, model_timeseries, lf, debug=False):
    # -------------------------------------------------
    # Mask out over ocean grid
    # - - - - - - - - - - - - - - - - - - - - - - - - -
    if debug:
        print('debug: plot for beforeMask start')
        import vcs
        x = vcs.init()
        x.plot(model_timeseries)
        x.png('_'.join(['test', model, 'beforeMask.png']))
        print('debug: plot for beforeMask done')

    # Check land fraction variable to see if it meet criteria
    # (0 for ocean, 100 for land, no missing value)
    lat_c = lf.getAxis(0)
    lon_c = lf.getAxis(1)
    lf_id = lf.id

    lf = MV2.array(lf.filled(0.))

    lf.setAxis(0, lat_c)
    lf.setAxis(1, lon_c)
    lf.id = lf_id

    if float(MV2.max(lf)) == 1.:
        lf = MV2.multiply(lf, 100.)

    # Matching dimension
    if debug:
        print('debug: match dimension in model_land_only')
    model_timeseries, lf_timeConst = genutil.grower(model_timeseries, lf)

    # Conserve axes
    time_c = model_timeseries.getAxis(0)
    lat_c2 = model_timeseries.getAxis(1)
    lon_c2 = model_timeseries.getAxis(2)

    opt1 = False

    if opt1:  # Masking out partial ocean grids as well
        # Mask out ocean even fractional (leave only pure ocean grid)
        model_timeseries_masked = MV2.masked_where(lf_timeConst < 100,
                                                   model_timeseries)
    else:  # Mask out only full ocean grid & use weighting for partial ocean grid
        model_timeseries_masked = MV2.masked_where(
            lf_timeConst == 0, model_timeseries)  # mask out pure ocean grids
        if model == 'EC-EARTH':
            # Mask out over 90% land grids for models those consider river as
            # part of land-sea fraction. So far only 'EC-EARTH' does..
            model_timeseries_masked = MV2.masked_where(lf_timeConst < 90,
                                                       model_timeseries)
        lf2 = MV2.divide(lf, 100.)
        model_timeseries, lf2_timeConst = genutil.grower(
            model_timeseries, lf2)  # Matching dimension
        model_timeseries_masked = MV2.multiply(
            model_timeseries_masked,
            lf2_timeConst)  # consider land fraction like as weighting

    # Make sure to have consistent axes
    model_timeseries_masked.setAxis(0, time_c)
    model_timeseries_masked.setAxis(1, lat_c2)
    model_timeseries_masked.setAxis(2, lon_c2)

    if debug:
        x.clear()
        x.plot(model_timeseries_masked)
        x.png('_'.join(['test', model, 'afterMask.png']))
        x.close()
        print('debug: plot for afterMask done')

    return (model_timeseries_masked)
outputVarName = ['pr']
outputUnits = ['kg m-2 s-1']

### BETTER IF THE USER DOES NOT CHANGE ANYTHING BELOW THIS LINE...
for fi in range(len(inputVarName)):
    print(fi, inputVarName[fi])
    inputFilePath = inputFilePathbgn + inputFilePathend
    #%% Process variable (with time axis)
    # Open and read input netcdf file
    f = cdm.open(inputFilePath + inputFileName[fi])
    dtmp = f(inputVarName[fi])
    d = MV2.where(MV2.greater(dtmp, -10000.), dtmp, 1.e20)
    d.missing = 1.e20

    cdutil.times.setTimeBoundsMonthly(d)
    d = MV2.divide(d, 86400.)  # CONVERT mm/day to kg m-2 s-1
    lat = d.getLatitude()
    lon = d.getLongitude()
    print(d.shape)
    #time = d.getTime() ; # Assumes variable is named 'time', for the demo file this is named 'months'
    time = d.getAxis(0)
    # Rather use a file dimension-based load statement

    # Deal with problematic "months since" calendar/time axis
    time_bounds = time.getBounds()
    # time_bounds[:,0] = time[:]
    # time_bounds[:-1,1] = time[1:]
    # time_bounds[-1,1] = time_bounds[-1,0]+1
    #time.setBounds() #####time_bounds)
    #####del(time_bounds) ; # Cleanup
    for i in range(40):
        yr = 1979 + i

        print(fi, inputVarName[fi])
        inputFilePath = inputFilePathbgn + inputFilePathend[fi]
        #%% Process variable (with time axis)
        # Open and read input netcdf file
        f = cdm.open(inputFilePath + inputFileName[fi])
        # d1 = f(inputVarName[fi], time = (cdtime.comptime(yr,0,),cdtime.comptime(yr,12)),longitude=(6,10),latitude=(6,10))
        d1 = f(inputVarName[fi],
               time=(cdtime.comptime(
                   yr,
                   0,
               ), cdtime.comptime(yr, 12)))

        if inputVarName[fi] == 'z': d1 = mv.divide(d1, 9.81)

        plev1 = d1.getLevel()

        plev1[:] = plev1[:] * 100.
        plev1 = cdm.createAxis(plev1, id='plev')
        plev1.designateLevel()
        plev1.axis = 'Z'
        plev1.long_name = 'pressure'
        plev1.positive = 'down'
        plev1.realtopology = 'linear'
        plev1.standard_name = 'air_pressure'
        plev1.units = 'Pa'

        lat = d1.getLatitude()
        lon = d1.getLongitude()
Example #16
0
# Open file ---
data_path = '/clim_obs/obs/atm/mo/psl/ERAINT/psl_ERAINT_198901-200911.nc'  ## Put your file here
f = cdms.open(data_path)

# Set time period ---
start_year = 1980
end_year = 2000
start_time = cdtime.comptime(start_year)
end_time = cdtime.comptime(end_year)

# Load variable ---
d = f('psl',
      time=(start_time, end_time),
      longitude=(-180, 180),
      latitude=(20, 90))  # Provide proper variable name
d = MV2.divide(d, 100.)  # Pa to hPa
d.units = 'hPa'

# Get DJF seasonal mean time series ---
d_DJF = cdutil.DJF(d)

# EOF (take only first variance mode...) ---
solver = Eof(d_DJF, weights='area')
eof = solver.eofsAsCovariance(neofs=1)
pc = solver.pcs(npcs=1, pcscaling=1)  # pcscaling=1: scaled to unit variance
# (divided by the square-root of their eigenvalue)
frac = solver.varianceFraction()

# Sign control if needed ---
eof = eof * -1
pc = pc * -1