Пример #1
0
def linear_regression_on_globe_for_teleconnection(pc,
                                                  model_timeseries,
                                                  stdv_pc,
                                                  RmDomainMean,
                                                  EofScaling,
                                                  debug=False):
    """
    - Reconstruct EOF fist mode including teleconnection purpose as well
    - Have confirmed that "eof_lr" is identical to "eof" over EOF domain (i.e., "subdomain")
    - Note that eof_lr has global field
    """
    if debug:
        print('pc.shape, timeseries.shape:', pc.shape, model_timeseries.shape)

    # Linear regression to have extended global map; teleconnection purpose
    slope, intercept = linear_regression(pc, model_timeseries)
    if RmDomainMean:
        eof_lr = MV2.add(MV2.multiply(slope, stdv_pc), intercept)
    else:
        if not EofScaling:
            eof_lr = MV2.add(MV2.multiply(slope, stdv_pc), intercept)
        else:
            eof_lr = MV2.add(slope, intercept)

    debug_print('linear regression done', debug)

    return eof_lr, slope, intercept
Пример #2
0
def Get_SegmentAveraged_PowerSpectrum_and_RedNoise(d,
                                                   SegmentLength,
                                                   TaperingRatio,
                                                   SegmentOverlapping=False):
    seg_starting_i = []
    segments = []
    freqs_segs = 0
    psd_segs = 0
    rn_segs = 0
    num_segs = 0
    r1_segs = []  # Lag-1 autocorrelation

    if SegmentOverlapping:
        jump = SegmentLength / 2
    else:
        jump = SegmentLength

    for i in range(0, len(d), jump):
        ie = i + SegmentLength
        if ie <= len(d):
            seg_starting_i.append(i)
            seg_starting_i.append(ie)

            d_i = d[i:ie].copy()
            # Tapering
            d_i = taper(d_i, TaperingRatio)
            segments.append([range(i, ie), d_i])

            # Power spectrum
            freqs_i, psd_i = signal.welch(np.array(d_i),
                                          nperseg=len(d_i),
                                          noverlap=0,
                                          window='boxcar')

            # Red noise
            r1_i = lag1_autocorrelation(d_i)
            rn_i = rednoise(psd_i, len(freqs_i), r1_i)
            r1_segs.append(float(r1_i))

            # Collect power spectrum and red noise of each segment to average later
            freqs_segs = MV2.add(freqs_i, freqs_segs)
            psd_segs = MV2.add(psd_i, psd_segs)
            rn_segs = MV2.add(rn_i, rn_segs)

            # Count number of segments to be used for averaging
            num_segs += 1

            print 'segment (num)', i, ie, '(', num_segs, ')'

    freqs_avg = MV2.divide(freqs_segs, num_segs)
    psd_avg = MV2.divide(psd_segs, num_segs)
    rn_avg = MV2.divide(rn_segs, num_segs)
    r1_avg = sum(r1_segs) / float(len(r1_segs))

    return segments, np.array(freqs_avg), np.array(psd_avg), np.array(
        rn_avg), r1_avg
Пример #3
0
def main():
    # Prepare dummy data -- create random array for testing
    random_array = np.random.rand(10, 30)
    X = cdms2.createAxis(['model_ ' + str(r) for r in list(range(0, 30))])
    Y = cdms2.createAxis(['metric_ ' + str(r) for r in list(range(0, 10))])
    stat_xy = MV2.array(random_array, axes=(Y, X), id='statistics')
    # Plant missing value
    stat_xy[5][5] = -1.e20
    stat_xy = MV2.masked_where(MV2.equal(stat_xy, -1.e20), stat_xy)
    # Annotate test
    stat_xy_annotate = MV2.multiply(stat_xy, 2)
    # User options
    imgName = 'test_pp_random'
    plotTitle = 'test_pp_random'
    Normalize = True
    # Normalize rows by its median
    if Normalize:
        # Normalize by median value
        stat_xy = normalize_by_median(stat_xy)
        # Revise image file name
        imgName = imgName + '_normalized'
    # Colormap to be used
    colormap = "default"
    clevels = [-1.e20, -.5, -.4, -.3, -.2, -.1, 0, .1, .2, .3, .4, .5, 1.e20]
    ccolors = vcs.getcolors(clevels, split=0, colors=range(16, 240))
    # Dummy data for additional triangles
    stat_xy_2 = normalize_by_median(MV2.add(stat_xy, 2))
    stat_xy_3 = normalize_by_median(MV2.add(stat_xy, 3))
    stat_xy_4 = normalize_by_median(MV2.add(stat_xy, 4))
    axes = stat_xy.getAxisList()
    stat_xy_2.setAxisList(axes)
    stat_xy_3.setAxisList(axes)
    stat_xy_4.setAxisList(axes)
    #
    # Portrait plot
    #
    plot_portrait(stat_xy,
                  imgName=imgName,
                  colormap=colormap,
                  clevels=clevels,
                  ccolors=ccolors,
                  num_box_partitioning=4,
                  stat_xy_2=stat_xy_2,
                  stat_xy_3=stat_xy_3,
                  stat_xy_4=stat_xy_4,
                  GridMeshLine=False)
inputVarName = ['t2m', 'sst', 'netflux', 'lhf', 'shf', 'taux', 'tauy']
outputVarName = ['tas', 'ts', 'hfns', 'hfls', 'hfss', 'tauu', 'tauv']
outputUnits = ['K', 'K', 'W m-2', 'W m-2', 'W m-2', 'Pa', 'Pa']
outpos = ['', '', 'up', 'up', 'up', 'down',
          'down']  #,'','up','down','down','up','','','','down','down','']
####['W m-2',"W m-2","Pa",'kg m-2 s-1','W m-2','W m-2','W m-2','W m-2',"m s-1",'m s-1','m s-1','Pa','Pa','K]]

### BETTER IF THE USER DOES NOT CHANGE ANYTHING BELOW THIS LINE...
for fi in range(len(inputVarName)):
    print(fi, inputVarName[fi])
    inputFilePath = inputFilePathbgn + inputFilePathend
    #%% Process variable (with time axis)
    # Open and read input netcdf file
    f = cdm.open(inputFilePath + inputFileName[fi])
    d = f(inputVarName[fi])
    if inputVarName[fi] in ['t2m', 'sst']: d = MV2.add(d, 273.15)
    # cdutil.times.setTimeBoundsMonthly(d)
    lat = d.getLatitude()
    lon = d.getLongitude()
    print(d.shape)
    #time = d.getTime() ; # Assumes variable is named 'time', for the demo file this is named 'months'
    time = d.getAxis(0)
    # Rather use a file dimension-based load statement

    # Deal with problematic "months since" calendar/time axis
    time_bounds = time.getBounds()
    d.positive = outpos[fi]

    # time_bounds[:,0] = time[:]
    # time_bounds[:-1,1] = time[1:]
    # time_bounds[-1,1] = time_bounds[-1,0]+1
Пример #5
0
def mjo_metric_ewr_calculation(mip, model, exp, run, debug, plot, nc_out,
                               cmmGrid, degX, UnitsAdjust, inputfile, var,
                               startYear, endYear, segmentLength, outdir):

    # Open file to read daily dataset
    if debug:
        print('debug: open file')
    f = cdms2.open(inputfile)
    d = f[var]
    tim = d.getTime()
    comTim = tim.asComponentTime()

    # Get starting and ending year and month
    if debug:
        print('debug: check time')
    first_time = comTim[0]
    last_time = comTim[-1]

    # Adjust years to consider only when continous NDJFMA is available
    if first_time > cdtime.comptime(startYear, 11, 1):
        startYear += 1
    if last_time < cdtime.comptime(endYear, 4, 30):
        endYear -= 1

    # Number of grids for 2d fft input
    NL = len(d.getLongitude())  # number of grid in x-axis (longitude)
    if cmmGrid:
        NL = int(360 / degX)
    NT = segmentLength  # number of time step for each segment (need to be an even number)

    if debug:
        endYear = startYear + 2
        print('debug: startYear, endYear:', startYear, endYear)
        print('debug: NL, NT:', NL, NT)

    #
    # Get daily climatology on each grid, then remove it to get anomaly
    #
    numYear = endYear - startYear
    mon = 11
    day = 1
    # Store each year's segment in a dictionary: segment[year]
    segment = {}
    segment_ano = {}
    daSeaCyc = MV2.zeros((NT, d.shape[1], d.shape[2]), MV2.float)
    for year in range(startYear, endYear):
        print(year)
        segment[year] = subSliceSegment(d, year, mon, day, NT)
        # units conversion
        segment[year] = unit_conversion(segment[year], UnitsAdjust)
        # Get climatology of daily seasonal cycle
        daSeaCyc = MV2.add(MV2.divide(segment[year], float(numYear)), daSeaCyc)
    # Remove daily seasonal cycle from each segment
    if numYear > 1:
        for year in range(startYear, endYear):
            segment_ano[year] = Remove_dailySeasonalCycle(
                segment[year], daSeaCyc)

    #
    # Space-time power spectra
    #
    """
    Handle each segment (i.e. each year) separately.
    1. Get daily time series (3D: time and spatial 2D)
    2. Meridionally average (2D: time and spatial, i.e., longitude)
    3. Get anomaly by removing time mean of the segment
    4. Proceed 2-D FFT to get power.
    Then get multi-year averaged power after the year loop.
    """
    # Define array for archiving power from each year segment
    Power = np.zeros((numYear, NT + 1, NL + 1), np.float)

    # Year loop for space-time spectrum calculation
    if debug:
        print('debug: year loop start')
    for n, year in enumerate(range(startYear, endYear)):
        print('chk: year:', year)
        d_seg = segment_ano[year]
        # Regrid: interpolation to common grid
        if cmmGrid:
            d_seg = interp2commonGrid(d_seg, degX, debug=debug)
        # Subregion, meridional average, and remove segment time mean
        d_seg_x_ano = get_daily_ano_segment(d_seg)
        # Compute space-time spectrum
        if debug:
            print('debug: compute space-time spectrum')
        Power[n, :, :] = space_time_spectrum(d_seg_x_ano)

    # Multi-year averaged power
    Power = np.average(Power, axis=0)
    # Generates axes for the decoration
    Power, ff, ss = generate_axes_and_decorate(Power, NT, NL)
    # Output for wavenumber-frequency power spectra
    OEE = output_power_spectra(NL, NT, Power, ff, ss)

    # E/W ratio
    ewr, eastPower, westPower = calculate_ewr(OEE)
    print('ewr: ', ewr)
    print('east power: ', eastPower)
    print('west power: ', westPower)

    # Output
    output_filename = "{}_{}_{}_{}_{}_{}-{}".format(mip, model, exp, run,
                                                    'mjo', startYear, endYear)
    if cmmGrid:
        output_filename += '_cmmGrid'

    # NetCDF output
    if nc_out:
        if not os.path.exists(outdir(output_type='diagnostic_results')):
            os.makedirs(outdir(output_type='diagnostic_results'))
        fout = os.path.join(outdir(output_type='diagnostic_results'),
                            output_filename)
        write_netcdf_output(OEE, fout)

    # Plot
    if plot:
        if not os.path.exists(outdir(output_type='graphics')):
            os.makedirs(outdir(output_type='graphics'))
        fout = os.path.join(outdir(output_type='graphics'), output_filename)
        title = mip.upper(
        ) + ': ' + model + ' (' + run + ') \n' + var.capitalize(
        ) + ', NDJFMA ' + str(startYear) + '-' + str(endYear)
        if cmmGrid:
            title += ', common grid (2.5x2.5deg)'
        plot_power(OEE, title, fout, ewr)

    # Output to JSON
    metrics_result = {}
    metrics_result['east_power'] = eastPower
    metrics_result['west_power'] = westPower
    metrics_result['east_west_power_ratio'] = ewr
    metrics_result['analysis_time_window_start_year'] = startYear
    metrics_result['analysis_time_window_end_year'] = endYear

    # Debug checking plot
    if debug and plot:
        debug_chk_plot(d_seg_x_ano, Power, OEE, segment[year], daSeaCyc,
                       segment_ano[year])

    f.close()
    return metrics_result
# Plant missing value
stat_xy[5][5] = -1.e20
stat_xy = MV2.masked_where(MV2.equal(stat_xy, -1.e20), stat_xy)

# Normalize rows by its median
Normalize = True
if Normalize:
    # Normalize by median value
    stat_xy = normalize_by_median(stat_xy)

# Additional dummy data for annotate test
stat_xy_annotate = MV2.multiply(stat_xy, 2)

# Additional dummy data for additional triangles
stat_xy_2 = normalize_by_median(MV2.add(stat_xy, 2))
stat_xy_3 = normalize_by_median(MV2.add(stat_xy, 3))
stat_xy_4 = normalize_by_median(MV2.add(stat_xy, 4))
axes = stat_xy.getAxisList()
stat_xy_2.setAxisList(axes)
stat_xy_3.setAxisList(axes)
stat_xy_4.setAxisList(axes)


# ## Portrait plot generation

# ### Exampe 1

# In[7]:

# Colormap to be used